lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/an_ok_binary_tree/src/lib.rs
shen-jinhao/Rust-With-Linked-Lists
924b6fe896192d1e3fc4757080027ae12baad28a
use an_ok_stack::List as Stack; use an_unsafe_queue::List as Queue; use std::fmt::Debug; #[derive(Clone)] pub struct BinaryTree<T> { root: Link<T>, } type Link<T> = Option<Box<Node<T>>>; #[derive(Eq, PartialEq, Clone)] struct Node<T> { elem: T, left: Link<T>, right: Link<T>, } impl<T> Node<T> { pub fn new(elem: T) -> Self { Self { elem, left: None, right: None, } } } impl<T: Debug + PartialEq + Clone> BinaryTree<T> { pub fn new(val: &[T], invalid: T) -> Self { let mut index: usize = 0; Self { root: Self::create(val, &invalid, &mut index) } } fn create(val: &[T], invalid: &T, index: &mut usize) -> Link<T> { let mut new_root: Link<T> = None; let cur = *index; if val[cur] != *invalid { new_root = Some(Box::new(Node::new(val[cur].clone()))); *index += 1; if let Some(node) = new_root.as_mut() { node.left = Self::create(val, invalid, index); } *index += 1; if let Some(node) = new_root.as_mut() { node.right = Self::create(val, invalid, index); } } new_root } pub fn prev_orer(&self) -> Vec<T>{ let mut res = Vec::new(); Self::prev_order_help(&self.root, &mut res); res } fn prev_order_help(root: &Link<T>, res: &mut Vec<T>){ if root.is_none() { return; } let node = root.as_ref().unwrap(); res.push(node.elem.clone()); Self::prev_order_help(&node.left, res); Self::prev_order_help(&node.right, res); } pub fn prev_order_no_r(&self) -> Vec<T>{ let mut res = Vec::new(); let mut stack = Stack::new(); let mut cur = self.root.as_ref(); while cur.is_some() || !stack.is_empty() { while cur.is_some() { let node = cur.unwrap(); res.push(node.elem.clone()); stack.push(node); cur = node.left.as_ref(); } cur = stack.pop().and_then(|node| { node.right.as_ref() }) } res } pub fn in_order(&self) -> Vec<T> { let mut res = Vec::new(); Self::in_order_help(&self.root, &mut res); res } fn in_order_help(root: &Link<T>, res: &mut Vec<T>) { if root.is_none() { return; } let node = root.as_ref().unwrap(); Self::in_order_help(&node.left, res); res.push(node.elem.clone()); Self::in_order_help(&node.right, res); } pub fn in_order_no_r(&self) -> Vec<T>{ let mut res = Vec::new(); let mut stack = Stack::new(); let mut cur = self.root.as_ref(); while cur.is_some() || !stack.is_empty() { while cur.is_some() { let node = cur.unwrap(); stack.push(node); cur = node.left.as_ref(); } cur = stack.pop().and_then(|node| { res.push(node.elem.clone()); node.right.as_ref() }) } res } pub fn post_order(&self) -> Vec<T>{ let mut res = Vec::new(); Self::post_order_help(&self.root, &mut res); res } fn post_order_help(root: &Link<T>, res: &mut Vec<T>) { if root.is_none() { return; } let node = root.as_ref().unwrap(); Self::post_order_help(&node.left, res); Self::post_order_help(&node.right, res); res.push(node.elem.clone()); } pub fn post_order_no_r(&self) -> Vec<T>{ let mut res = Vec::new(); let mut stack = Stack::new(); let mut cur = self.root.as_ref(); let mut prev: Option<&Box<Node<T>>> = None; while cur.is_some() || !stack.is_empty() { while cur.is_some() { let node = cur.unwrap(); stack.push(node); cur = node.left.as_ref(); } let top = stack.peek().unwrap(); if top.right.is_none() || top.right.as_ref() == prev { res.push(top.elem.clone()); prev = Some(top); let _ = stack.pop(); } else { cur = top.right.as_ref(); } } res } pub fn level_order(&self) -> Vec<T>{ let mut res = Vec::new(); let mut queue = Queue::new(); if self.root.is_some() { queue.push(self.root.as_ref().unwrap()); } while !queue.is_empty() { if let Some(node) = queue.pop() { res.push(node.elem.clone()); if node.left.is_some() { queue.push(node.left.as_ref().unwrap()); } if node.right.is_some() { queue.push(node.right.as_ref().unwrap()); } } } res } pub fn tree_node_size(&self) -> usize { let mut size: usize = 0; Self::tree_node_size_help(&self.root, &mut size); size } fn tree_node_size_help(root: &Link<T>, size: &mut usize) { if root.is_none() { return; } Self::tree_node_size_help(&root.as_ref().unwrap().left, size); *size += 1; Self::tree_node_size_help(&root.as_ref().unwrap().right, size); } pub fn tree_leaf_size(&self) -> usize { Self::tree_leaf_size_help(&self.root) } fn tree_leaf_size_help(root: &Link<T>) -> usize{ if root.is_none() { return 0; } if root.as_ref().unwrap().left.is_none() && root.as_ref().unwrap().right.is_none() { return 1; } let left = &root.as_ref().unwrap().left; let right = &root.as_ref().unwrap().right; Self::tree_leaf_size_help(left) + Self::tree_leaf_size_help(right) } pub fn tree_height(&self) -> usize { Self::tree_height_help(&self.root) } fn tree_height_help(root: &Link<T>) -> usize { if root.is_none() { return 0; } let left_height = Self::tree_height_help(&root.as_ref().unwrap().left); let right_height = Self::tree_height_help(&root.as_ref().unwrap().right); return if left_height > right_height { left_height + 1 } else { right_height + 1 } } pub fn get_level_node_size(&self, level: usize) -> Option<usize> { let height = self.tree_height(); if level > height || level == 0 { return None; } Some(Self::get_level_node_size_help(&self.root, level)) } fn get_level_node_size_help(root: &Link<T>, level: usize) -> usize{ if root.is_none() { return 0; } if level == 1 { return 1; } let left = &root.as_ref().unwrap().left; let right = &root.as_ref().unwrap().right; Self::get_level_node_size_help(left, level - 1) + Self::get_level_node_size_help(right, level - 1) } pub fn find(&self, key: T) -> bool { Self::find_help(&self.root, key) } fn find_help(root: &Link<T>, key: T) -> bool { if root.is_none() { return false; } if root.as_ref().unwrap().elem == key { return true; } let left = &root.as_ref().unwrap().left; let right = &root.as_ref().unwrap().right; let res = Self::find_help(left, key.clone()) || Self::find_help(right, key.clone()); if res { return res; } false } pub fn is_complete_tree(&self) -> bool { let mut queue = Queue::new(); if self.root.is_some() { queue.push(self.root.as_ref().unwrap()); } let mut flag = true; while !queue.is_empty() { let head = queue.pop().unwrap(); if head.left.is_some() { if !flag { return false; } queue.push(head.left.as_ref().unwrap()); } else { flag = false; } if head.right.is_some() { if !flag { return false; } queue.push(head.right.as_ref().unwrap()); } else { flag = false; } } true } pub fn destroy_tree(self) { Self::destroy_tree_help(self.root); } fn destroy_tree_help(root: Link<T>) { if root.is_none() { return; } let cur = root.unwrap(); Self::destroy_tree_help(cur.left); Self::destroy_tree_help(cur.right); } } #[cfg(test)] mod tests { use crate::BinaryTree; #[test] fn basic() { let array = ['A', 'B', '#', 'D', '#', '#', 'C' ,'#', '#']; let tree = BinaryTree::new(&array, '#'); assert_eq!(tree.prev_orer(), vec!['A', 'B', 'D', 'C']); assert_eq!(tree.prev_order_no_r(), vec!['A', 'B', 'D', 'C']); assert_eq!(tree.in_order(), vec!['B', 'D', 'A', 'C']); assert_eq!(tree.in_order_no_r(), vec!['B', 'D', 'A', 'C']); assert_eq!(tree.post_order(), vec!['D', 'B', 'C', 'A']); assert_eq!(tree.post_order_no_r(), vec!['D', 'B', 'C', 'A']); assert_eq!(tree.level_order(), vec!['A', 'B', 'C', 'D']); let array = ["Alian".to_string(), "Bob".to_string(), "no".to_string() , "David".to_string(), "no".to_string(), "no".to_string(), "Clion".to_string() ,"no".to_string(), "no".to_string()]; let tree = BinaryTree::new(&array, "no".to_string()); assert_eq!(tree.prev_orer(), vec!["Alian", "Bob", "David", "Clion"]); assert_eq!(tree.prev_order_no_r(), vec!["Alian", "Bob", "David", "Clion"]); assert_eq!(tree.in_order(), vec!["Bob", "David", "Alian", "Clion"]); assert_eq!(tree.in_order_no_r(), vec!["Bob", "David", "Alian", "Clion"]); assert_eq!(tree.post_order(), vec!["David", "Bob", "Clion", "Alian"]); assert_eq!(tree.post_order_no_r(), vec!["David", "Bob", "Clion", "Alian"]); assert_eq!(tree.level_order(), vec!["Alian", "Bob", "Clion", "David"]); } /* 1 / \ 2 3 / / \ 4 5 6 */ #[test] fn size_test() { let array = [1, 2, 4,i32::MIN, i32::MIN, i32::MIN ,3, 5, i32::MIN, i32::MIN, 6, i32::MIN, i32::MIN]; let tree = BinaryTree::new(&array, i32::MIN); assert_eq!(tree.prev_order_no_r(), vec![1, 2, 4, 3, 5, 6]); assert_eq!(tree.in_order_no_r(), vec![4,2,1,5,3,6]); assert_eq!(tree.post_order_no_r(), vec![4,2,5,6,3,1]); assert_eq!(tree.level_order(), vec![1,2,3,4,5,6]); assert_eq!(tree.tree_node_size(), 6); assert_eq!(tree.tree_leaf_size(), 3); assert_eq!(tree.tree_height(), 3); assert_eq!(tree.get_level_node_size(0), None); assert_eq!(tree.get_level_node_size(1), Some(1)); assert_eq!(tree.get_level_node_size(2), Some(2)); assert_eq!(tree.get_level_node_size(3), Some(3)); assert_eq!(tree.get_level_node_size(4), None); assert_eq!(tree.find(0), false); assert_eq!(tree.find(1), true); assert_eq!(tree.find(2), true); assert_eq!(tree.find(3), true); assert_eq!(tree.find(4), true); assert_eq!(tree.find(5), true); assert_eq!(tree.find(6), true); assert_eq!(tree.find(7), false); assert_eq!(tree.is_complete_tree(), false); } #[test] fn is_complete_test() { let array = [1, 2, 4,i32::MIN, i32::MIN, 5, i32::MIN, i32::MIN ,3, 6, i32::MIN, i32::MIN, 7, i32::MIN, i32::MIN]; let tree = BinaryTree::new(&array, i32::MIN); assert_eq!(tree.is_complete_tree(), true); tree.destroy_tree(); } }
use an_ok_stack::List as Stack; use an_unsafe_queue::List as Queue; use std::fmt::Debug; #[derive(Clone)] pub struct BinaryTree<T> { root: Link<T>, } type Link<T> = Option<Box<Node<T>>>; #[derive(Eq, PartialEq, Clone)] struct Node<T> { elem: T, left: Link<T>, right: Link<T>, } impl<T> Node<
()); } let mut flag = true; while !queue.is_empty() { let head = queue.pop().unwrap(); if head.left.is_some() { if !flag { return false; } queue.push(head.left.as_ref().unwrap()); } else { flag = false; } if head.right.is_some() { if !flag { return false; } queue.push(head.right.as_ref().unwrap()); } else { flag = false; } } true } pub fn destroy_tree(self) { Self::destroy_tree_help(self.root); } fn destroy_tree_help(root: Link<T>) { if root.is_none() { return; } let cur = root.unwrap(); Self::destroy_tree_help(cur.left); Self::destroy_tree_help(cur.right); } } #[cfg(test)] mod tests { use crate::BinaryTree; #[test] fn basic() { let array = ['A', 'B', '#', 'D', '#', '#', 'C' ,'#', '#']; let tree = BinaryTree::new(&array, '#'); assert_eq!(tree.prev_orer(), vec!['A', 'B', 'D', 'C']); assert_eq!(tree.prev_order_no_r(), vec!['A', 'B', 'D', 'C']); assert_eq!(tree.in_order(), vec!['B', 'D', 'A', 'C']); assert_eq!(tree.in_order_no_r(), vec!['B', 'D', 'A', 'C']); assert_eq!(tree.post_order(), vec!['D', 'B', 'C', 'A']); assert_eq!(tree.post_order_no_r(), vec!['D', 'B', 'C', 'A']); assert_eq!(tree.level_order(), vec!['A', 'B', 'C', 'D']); let array = ["Alian".to_string(), "Bob".to_string(), "no".to_string() , "David".to_string(), "no".to_string(), "no".to_string(), "Clion".to_string() ,"no".to_string(), "no".to_string()]; let tree = BinaryTree::new(&array, "no".to_string()); assert_eq!(tree.prev_orer(), vec!["Alian", "Bob", "David", "Clion"]); assert_eq!(tree.prev_order_no_r(), vec!["Alian", "Bob", "David", "Clion"]); assert_eq!(tree.in_order(), vec!["Bob", "David", "Alian", "Clion"]); assert_eq!(tree.in_order_no_r(), vec!["Bob", "David", "Alian", "Clion"]); assert_eq!(tree.post_order(), vec!["David", "Bob", "Clion", "Alian"]); assert_eq!(tree.post_order_no_r(), vec!["David", "Bob", "Clion", "Alian"]); assert_eq!(tree.level_order(), vec!["Alian", "Bob", "Clion", "David"]); } /* 1 / \ 2 3 / / \ 4 5 6 */ #[test] fn size_test() { let array = [1, 2, 4,i32::MIN, i32::MIN, i32::MIN ,3, 5, i32::MIN, i32::MIN, 6, i32::MIN, i32::MIN]; let tree = BinaryTree::new(&array, i32::MIN); assert_eq!(tree.prev_order_no_r(), vec![1, 2, 4, 3, 5, 6]); assert_eq!(tree.in_order_no_r(), vec![4,2,1,5,3,6]); assert_eq!(tree.post_order_no_r(), vec![4,2,5,6,3,1]); assert_eq!(tree.level_order(), vec![1,2,3,4,5,6]); assert_eq!(tree.tree_node_size(), 6); assert_eq!(tree.tree_leaf_size(), 3); assert_eq!(tree.tree_height(), 3); assert_eq!(tree.get_level_node_size(0), None); assert_eq!(tree.get_level_node_size(1), Some(1)); assert_eq!(tree.get_level_node_size(2), Some(2)); assert_eq!(tree.get_level_node_size(3), Some(3)); assert_eq!(tree.get_level_node_size(4), None); assert_eq!(tree.find(0), false); assert_eq!(tree.find(1), true); assert_eq!(tree.find(2), true); assert_eq!(tree.find(3), true); assert_eq!(tree.find(4), true); assert_eq!(tree.find(5), true); assert_eq!(tree.find(6), true); assert_eq!(tree.find(7), false); assert_eq!(tree.is_complete_tree(), false); } #[test] fn is_complete_test() { let array = [1, 2, 4,i32::MIN, i32::MIN, 5, i32::MIN, i32::MIN ,3, 6, i32::MIN, i32::MIN, 7, i32::MIN, i32::MIN]; let tree = BinaryTree::new(&array, i32::MIN); assert_eq!(tree.is_complete_tree(), true); tree.destroy_tree(); } }
T> { pub fn new(elem: T) -> Self { Self { elem, left: None, right: None, } } } impl<T: Debug + PartialEq + Clone> BinaryTree<T> { pub fn new(val: &[T], invalid: T) -> Self { let mut index: usize = 0; Self { root: Self::create(val, &invalid, &mut index) } } fn create(val: &[T], invalid: &T, index: &mut usize) -> Link<T> { let mut new_root: Link<T> = None; let cur = *index; if val[cur] != *invalid { new_root = Some(Box::new(Node::new(val[cur].clone()))); *index += 1; if let Some(node) = new_root.as_mut() { node.left = Self::create(val, invalid, index); } *index += 1; if let Some(node) = new_root.as_mut() { node.right = Self::create(val, invalid, index); } } new_root } pub fn prev_orer(&self) -> Vec<T>{ let mut res = Vec::new(); Self::prev_order_help(&self.root, &mut res); res } fn prev_order_help(root: &Link<T>, res: &mut Vec<T>){ if root.is_none() { return; } let node = root.as_ref().unwrap(); res.push(node.elem.clone()); Self::prev_order_help(&node.left, res); Self::prev_order_help(&node.right, res); } pub fn prev_order_no_r(&self) -> Vec<T>{ let mut res = Vec::new(); let mut stack = Stack::new(); let mut cur = self.root.as_ref(); while cur.is_some() || !stack.is_empty() { while cur.is_some() { let node = cur.unwrap(); res.push(node.elem.clone()); stack.push(node); cur = node.left.as_ref(); } cur = stack.pop().and_then(|node| { node.right.as_ref() }) } res } pub fn in_order(&self) -> Vec<T> { let mut res = Vec::new(); Self::in_order_help(&self.root, &mut res); res } fn in_order_help(root: &Link<T>, res: &mut Vec<T>) { if root.is_none() { return; } let node = root.as_ref().unwrap(); Self::in_order_help(&node.left, res); res.push(node.elem.clone()); Self::in_order_help(&node.right, res); } pub fn in_order_no_r(&self) -> Vec<T>{ let mut res = Vec::new(); let mut stack = Stack::new(); let mut cur = self.root.as_ref(); while cur.is_some() || !stack.is_empty() { while cur.is_some() { let node = cur.unwrap(); stack.push(node); cur = node.left.as_ref(); } cur = stack.pop().and_then(|node| { res.push(node.elem.clone()); node.right.as_ref() }) } res } pub fn post_order(&self) -> Vec<T>{ let mut res = Vec::new(); Self::post_order_help(&self.root, &mut res); res } fn post_order_help(root: &Link<T>, res: &mut Vec<T>) { if root.is_none() { return; } let node = root.as_ref().unwrap(); Self::post_order_help(&node.left, res); Self::post_order_help(&node.right, res); res.push(node.elem.clone()); } pub fn post_order_no_r(&self) -> Vec<T>{ let mut res = Vec::new(); let mut stack = Stack::new(); let mut cur = self.root.as_ref(); let mut prev: Option<&Box<Node<T>>> = None; while cur.is_some() || !stack.is_empty() { while cur.is_some() { let node = cur.unwrap(); stack.push(node); cur = node.left.as_ref(); } let top = stack.peek().unwrap(); if top.right.is_none() || top.right.as_ref() == prev { res.push(top.elem.clone()); prev = Some(top); let _ = stack.pop(); } else { cur = top.right.as_ref(); } } res } pub fn level_order(&self) -> Vec<T>{ let mut res = Vec::new(); let mut queue = Queue::new(); if self.root.is_some() { queue.push(self.root.as_ref().unwrap()); } while !queue.is_empty() { if let Some(node) = queue.pop() { res.push(node.elem.clone()); if node.left.is_some() { queue.push(node.left.as_ref().unwrap()); } if node.right.is_some() { queue.push(node.right.as_ref().unwrap()); } } } res } pub fn tree_node_size(&self) -> usize { let mut size: usize = 0; Self::tree_node_size_help(&self.root, &mut size); size } fn tree_node_size_help(root: &Link<T>, size: &mut usize) { if root.is_none() { return; } Self::tree_node_size_help(&root.as_ref().unwrap().left, size); *size += 1; Self::tree_node_size_help(&root.as_ref().unwrap().right, size); } pub fn tree_leaf_size(&self) -> usize { Self::tree_leaf_size_help(&self.root) } fn tree_leaf_size_help(root: &Link<T>) -> usize{ if root.is_none() { return 0; } if root.as_ref().unwrap().left.is_none() && root.as_ref().unwrap().right.is_none() { return 1; } let left = &root.as_ref().unwrap().left; let right = &root.as_ref().unwrap().right; Self::tree_leaf_size_help(left) + Self::tree_leaf_size_help(right) } pub fn tree_height(&self) -> usize { Self::tree_height_help(&self.root) } fn tree_height_help(root: &Link<T>) -> usize { if root.is_none() { return 0; } let left_height = Self::tree_height_help(&root.as_ref().unwrap().left); let right_height = Self::tree_height_help(&root.as_ref().unwrap().right); return if left_height > right_height { left_height + 1 } else { right_height + 1 } } pub fn get_level_node_size(&self, level: usize) -> Option<usize> { let height = self.tree_height(); if level > height || level == 0 { return None; } Some(Self::get_level_node_size_help(&self.root, level)) } fn get_level_node_size_help(root: &Link<T>, level: usize) -> usize{ if root.is_none() { return 0; } if level == 1 { return 1; } let left = &root.as_ref().unwrap().left; let right = &root.as_ref().unwrap().right; Self::get_level_node_size_help(left, level - 1) + Self::get_level_node_size_help(right, level - 1) } pub fn find(&self, key: T) -> bool { Self::find_help(&self.root, key) } fn find_help(root: &Link<T>, key: T) -> bool { if root.is_none() { return false; } if root.as_ref().unwrap().elem == key { return true; } let left = &root.as_ref().unwrap().left; let right = &root.as_ref().unwrap().right; let res = Self::find_help(left, key.clone()) || Self::find_help(right, key.clone()); if res { return res; } false } pub fn is_complete_tree(&self) -> bool { let mut queue = Queue::new(); if self.root.is_some() { queue.push(self.root.as_ref().unwrap
random
[ { "content": "struct Node {\n\n elem: i32,\n\n next: Link,\n\n}\n\n\n", "file_path": "src/a_bad_stack/src/lib.rs", "rank": 0, "score": 126590.78467837354 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Link<T>,\n\n}\n\n\n\nimpl<T> List<T> {\n\n pub fn new() -> Self {\n\n List {\n\n head: std::ptr::null_mut(),\n\n tail: std::ptr::null_mut(),\n\n }\n\n }\n\n\n\n pub fn push(&mut self, elem: T) {\n\n unsafe {\n\n let new_tail = Box::into_raw(Box::new(Node {\n\n elem,\n\n next: std::ptr::null_mut(),\n\n }));\n\n if !self.tail.is_null() {\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 1, "score": 120792.40226254502 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Link<T>,\n\n}\n\n\n\nimpl<T> List<T> {\n\n pub fn new() -> Self {\n\n List {\n\n head: None,\n\n }\n\n }\n\n\n\n pub fn push(&mut self, elem: T) {\n\n let new_node = Box::new(Node {\n\n elem,\n\n next: self.head.take(),\n\n });\n\n self.head = Some(new_node);\n\n }\n\n\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 2, "score": 120574.44810382854 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Link<T>,\n\n}\n\n\n\nimpl<T> List<T> {\n\n pub fn new() -> Self {\n\n List {\n\n head: None,\n\n }\n\n }\n\n\n\n pub fn prepend(&self, elem: T) -> List<T>{\n\n let new_node = Rc::new(Node {\n\n elem,\n\n next: self.head.clone(),\n\n });\n\n List {\n\n head: Some(new_node)\n\n }\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 3, "score": 120574.44810382854 }, { "content": "type Link<T> = *mut Node<T>;\n\n\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 4, "score": 107242.29856947143 }, { "content": "type Link<T> = Option<Rc<Node<T>>>;\n\n\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 5, "score": 103064.70369839028 }, { "content": "type Link<T> = Option<Box<Node<T>>>;\n\n\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 6, "score": 103064.70369839028 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Link<T>,\n\n}\n\n\n\nimpl<T> Stack<T> {\n\n pub fn new() -> Self {\n\n Stack {\n\n head: None,\n\n }\n\n }\n\n\n\n pub fn push(&mut self, elem: T) {\n\n let new_node = Box::new(Node {\n\n elem,\n\n next: None,\n\n });\n\n self.push_node(new_node);\n\n }\n\n\n", "file_path": "src/silly1/src/lib.rs", "rank": 7, "score": 90742.06868130618 }, { "content": "struct Node<T> {\n\n next: Option<NonNull<Node<T>>>,\n\n prev: Option<NonNull<Node<T>>>,\n\n elem: T,\n\n}\n\n\n\nimpl<T> Node<T> {\n\n fn new(elem: T) -> Self {\n\n Node {\n\n next: None,\n\n prev: None,\n\n elem,\n\n }\n\n }\n\n}\n\n\n\nimpl<T> List<T> {\n\n #[inline]\n\n fn push_front_node(&mut self, mut node: Box<Node<T>>) {\n\n unsafe {\n", "file_path": "src/an_ok_nonnull_deque/src/lib.rs", "rank": 9, "score": 86483.01157860423 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Option<Box<Node<T>>>,\n\n prev: *mut Node<T>,\n\n}\n\n\n\nimpl<T> Node<T> {\n\n pub fn new(elem: T) -> Self {\n\n Node {\n\n elem,\n\n next: None,\n\n prev: std::ptr::null_mut(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Default> Default for Node<T> {\n\n fn default() -> Self {\n\n Node::new(T::default())\n\n }\n", "file_path": "src/an_ok_unsafe_deque/src/lib.rs", "rank": 10, "score": 86483.01157860423 }, { "content": "struct Node<T> {\n\n elem: T,\n\n next: Link<T>,\n\n prev: Link<T>,\n\n}\n\n\n\nimpl<T> Node<T> {\n\n pub fn new(elem: T) -> Rc<RefCell<Node<T>>> {\n\n Rc::new(RefCell::new(Node {\n\n elem,\n\n next: None,\n\n prev: None,\n\n }))\n\n }\n\n}\n\n\n\nimpl<T> List<T> {\n\n pub fn new() -> Self {\n\n List {\n\n head: None,\n", "file_path": "src/a_bad_safe_deque/src/lib.rs", "rank": 11, "score": 86483.01157860423 }, { "content": "type Link<T> = Option<Box<Node<T>>>;\n\n\n", "file_path": "src/silly1/src/lib.rs", "rank": 12, "score": 76518.45673847955 }, { "content": "type Link<T> = Option<Rc<RefCell<Node<T>>>>;\n\n\n", "file_path": "src/a_bad_safe_deque/src/lib.rs", "rank": 14, "score": 69020.73481402088 }, { "content": "fn to_string<K: PartialOrd + ToString, V: ToString>(node: &Link<K, V>) -> String {\n\n match node {\n\n None => \"Ø\".to_string(),\n\n Some(box_node) => box_node.to_string(),\n\n }\n\n}", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 15, "score": 38390.95144850264 }, { "content": " Some(node.elem)\n\n }\n\n\n\n }\n\n }\n\n\n\n pub fn peek(&self) -> Option<&T> {\n\n unsafe {\n\n self.head.as_ref().map(|node| {\n\n &node.elem\n\n })\n\n }\n\n }\n\n\n\n pub fn peek_mut(&mut self) -> Option<&mut T> {\n\n unsafe {\n\n self.head.as_mut().map(|node| {\n\n &mut node.elem\n\n })\n\n }\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 16, "score": 34576.367068477724 }, { "content": "pub struct IterMut<'a, T> {\n\n next: Option<&'a mut Node<T>>,\n\n}\n\n\n\nimpl<'a, T> Iterator for IterMut<'a, T> {\n\n type Item = &'a mut T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n unsafe {\n\n self.next.take().map(|node| {\n\n self.next = node.next.as_mut();\n\n &mut node.elem\n\n })\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::List;\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 17, "score": 34575.32809379802 }, { "content": " }\n\n}\n\n\n\npub struct Iter<'a, T> {\n\n next: Option<&'a Node<T>>,\n\n}\n\n\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n\n type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n unsafe {\n\n self.next.map(|node| {\n\n self.next = node.next.as_ref();\n\n &node.elem\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 18, "score": 34573.80864941828 }, { "content": "\n\npub struct List<T> {\n\n head: Link<T>,\n\n tail: *mut Node<T>,\n\n}\n\n\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 19, "score": 34570.79575031648 }, { "content": " (*self.tail).next = new_tail;\n\n }\n\n else {\n\n self.head = new_tail;\n\n }\n\n self.tail = new_tail;\n\n }\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<T> {\n\n unsafe {\n\n if self.head.is_null() {\n\n None\n\n }\n\n else {\n\n let node = Box::from_raw(self.head);\n\n self.head = node.next;\n\n if self.head.is_null() {\n\n self.tail = std::ptr::null_mut();\n\n }\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 20, "score": 34569.917573883155 }, { "content": " *elem = 4\n\n });\n\n iter.next().map(|elem| {\n\n *elem = 5\n\n });\n\n\n\n iter.next().map(|elem| {\n\n *elem = 6\n\n });\n\n\n\n iter.next().map(|elem| {\n\n *elem = 7\n\n });\n\n\n\n let mut iter = list.iter_mut();\n\n assert_eq!(iter.next(), Some(&mut 4));\n\n assert_eq!(iter.next(), Some(&mut 5));\n\n assert_eq!(iter.next(), Some(&mut 6));\n\n assert_eq!(iter.next(), None);\n\n }\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 21, "score": 34568.8519436564 }, { "content": " }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.head.is_null()\n\n }\n\n\n\n pub fn into_iter(self) -> IntoIter<T> {\n\n IntoIter(self)\n\n }\n\n\n\n pub fn iter(&self) -> Iter<'_, T> {\n\n unsafe {\n\n Iter {\n\n next: self.head.as_ref(),\n\n }\n\n }\n\n }\n\n\n\n pub fn iter_mut(&mut self) -> IterMut<'_, T> {\n\n unsafe {\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 22, "score": 34568.17456483619 }, { "content": " for elem in list.iter_mut() {\n\n *elem *= 100;\n\n }\n\n\n\n let mut iter = list.iter();\n\n assert_eq!(iter.next(), Some(&400));\n\n assert_eq!(iter.next(), Some(&500));\n\n assert_eq!(iter.next(), Some(&600));\n\n assert_eq!(iter.next(), None);\n\n assert_eq!(iter.next(), None);\n\n\n\n assert!(list.pop() == Some(400));\n\n list.peek_mut().map(|x| *x *= 10);\n\n assert!(list.peek() == Some(&5000));\n\n list.push(7);\n\n\n\n // Drop it on the ground and let the dtor exercise itself\n\n }\n\n}", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 23, "score": 34567.351620208945 }, { "content": " IterMut {\n\n next: self.head.as_mut(),\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Drop for List<T> {\n\n fn drop(&mut self) {\n\n while let Some(_) = self.pop() {}\n\n }\n\n}\n\n\n\npub struct IntoIter<T>(List<T>);\n\n\n\nimpl<T> Iterator for IntoIter<T> {\n\n type Item = T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.pop()\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 24, "score": 34566.51340888881 }, { "content": "\n\n #[test]\n\n fn basics() {\n\n let mut list = List::new();\n\n assert!(list.is_empty());\n\n\n\n // Check empty list behaves right\n\n assert_eq!(list.pop(), None);\n\n\n\n // Populate list\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n assert!(!list.is_empty());\n\n\n\n // Check normal removal\n\n assert_eq!(list.pop(), Some(1));\n\n assert_eq!(list.pop(), Some(2));\n\n\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 25, "score": 34566.43226169577 }, { "content": " let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n let mut iter = list.iter();\n\n assert_eq!(iter.next(), Some(&1));\n\n assert_eq!(iter.next(), Some(&2));\n\n assert_eq!(iter.next(), Some(&3));\n\n\n\n assert_eq!(list.peek(), Some(&1));\n\n }\n\n\n\n #[test]\n\n fn iter_mut() {\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n let mut iter = list.iter_mut();\n\n iter.next().map(|elem| {\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 26, "score": 34566.23785231501 }, { "content": " // Push some more just to make sure nothing's corrupted\n\n list.push(4);\n\n list.push(5);\n\n\n\n // Check normal removal\n\n assert_eq!(list.pop(), Some(3));\n\n assert_eq!(list.pop(), Some(4));\n\n\n\n // Check exhaustion\n\n assert_eq!(list.pop(), Some(5));\n\n assert_eq!(list.pop(), None);\n\n\n\n // Check the exhaustion case fixed the pointer right\n\n list.push(6);\n\n list.push(7);\n\n\n\n // Check normal removal\n\n assert_eq!(list.pop(), Some(6));\n\n assert_eq!(list.pop(), Some(7));\n\n assert_eq!(list.pop(), None);\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 27, "score": 34566.07550170776 }, { "content": "\n\n assert!(list.is_empty());\n\n }\n\n\n\n #[test]\n\n fn peek() {\n\n let mut list = List::new();\n\n assert_eq!(list.peek(), None);\n\n assert_eq!(list.peek_mut(), None);\n\n list.push(1); list.push(2); list.push(3);\n\n\n\n assert_eq!(list.peek(), Some(&1));\n\n assert_eq!(list.peek_mut(), Some(&mut 1));\n\n }\n\n\n\n #[test]\n\n fn peek_mut() {\n\n let mut list = List::new();\n\n assert_eq!(list.peek(), None);\n\n assert_eq!(list.peek_mut(), None);\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 28, "score": 34563.89369759983 }, { "content": " });\n\n assert_eq!(list.peek_mut(), Some(&mut 11));\n\n }\n\n\n\n #[test]\n\n fn into_iter() {\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n let mut iter = list.into_iter();\n\n assert_eq!(iter.next(), Some(1));\n\n assert_eq!(iter.next(), Some(2));\n\n assert_eq!(iter.next(), Some(3));\n\n assert_eq!(iter.next(), None);\n\n }\n\n\n\n #[test]\n\n fn iter() {\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 29, "score": 34563.89369759983 }, { "content": " list.push(\"jack\".to_string());\n\n list.push(\"marry\".to_string());\n\n list.push(\"hello\".to_string());\n\n assert_eq!(list.peek(), Some(&String::from(\"jack\")));\n\n assert_eq!(list.peek_mut(), Some(&mut String::from(\"jack\")));\n\n\n\n list.peek_mut().map(|value| {\n\n value.push_str(\" hello\");\n\n });\n\n\n\n assert_eq!(list.peek(), Some(&String::from(\"jack hello\")));\n\n assert_eq!(list.pop(), Some(String::from(\"jack hello\")));\n\n\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n assert_eq!(list.peek_mut(), Some(&mut 1));\n\n list.peek_mut().map(|value| {\n\n *value = 11;\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 30, "score": 34563.89369759983 }, { "content": "\n\n #[test]\n\n fn miri_food() {\n\n let mut list = List::new();\n\n\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n assert!(list.pop() == Some(1));\n\n list.push(4);\n\n assert!(list.pop() == Some(2));\n\n list.push(5);\n\n\n\n assert!(list.peek() == Some(&3));\n\n list.push(6);\n\n list.peek_mut().map(|x| *x *= 10);\n\n assert!(list.peek() == Some(&30));\n\n assert!(list.pop() == Some(30));\n\n\n", "file_path": "src/an_unsafe_queue/src/lib.rs", "rank": 31, "score": 34563.89369759983 }, { "content": " }\n\n\n\n pub fn tail(&self) -> List<T> {\n\n let next = self.head.as_ref().and_then(|node| {\n\n node.next.clone()\n\n });\n\n List {\n\n head: next\n\n }\n\n }\n\n\n\n pub fn head(&self) -> Option<&T> {\n\n self.head.as_ref().map(|node| {\n\n &node.elem\n\n })\n\n }\n\n}\n\n\n\npub struct Iter<'a, T> {\n\n next: Option<&'a Node<T>>,\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 32, "score": 34344.450008782485 }, { "content": " pub fn pop(&mut self) -> Option<T> {\n\n self.head.take().map(|node| {\n\n self.head = node.next;\n\n node.elem\n\n })\n\n }\n\n\n\n pub fn peek(&self) -> Option<&T> {\n\n match &self.head {\n\n None => {\n\n None\n\n }\n\n Some(node) => {\n\n Some(&node.elem)\n\n }\n\n }\n\n // self.head.as_ref().map(|node|{\n\n // &node.elem\n\n // })\n\n }\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 33, "score": 34342.79629929413 }, { "content": "\n\n pub fn peek_mut(&mut self) -> Option<&mut T> {\n\n match &mut self.head {\n\n None => {\n\n None\n\n }\n\n Some(node) => {\n\n Some(&mut node.elem)\n\n }\n\n }\n\n\n\n // self.head.as_mut().map(|node| {\n\n // &mut node.elem\n\n // })\n\n }\n\n\n\n pub fn is_empty(&self) -> bool{\n\n self.head.is_none()\n\n }\n\n}\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 34, "score": 34342.14003256076 }, { "content": "}\n\n\n\nimpl<T> List<T> {\n\n pub fn iter(&self) -> Iter<'_, T> {\n\n Iter {\n\n next: self.head.as_deref(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n\n type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.take().map(|node| {\n\n self.next = node.next.as_deref();\n\n &node.elem\n\n })\n\n }\n\n}\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 35, "score": 34339.542297587075 }, { "content": "\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n\n type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.map(|node| {\n\n //self.next = node.next.as_ref().map(|node|&**node);\n\n //self.next = node.next.as_ref().map::<&Node<T>, _>(|node| &node);\n\n self.next = node.next.as_deref();\n\n &node.elem\n\n })\n\n }\n\n}\n\n\n\n//实现IterMut迭代器\n\npub struct IterMut<'a, T> {\n\n next: Option<&'a mut Node<T>>\n\n}\n\n\n\nimpl<T> List<T> {\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 36, "score": 34339.280830058575 }, { "content": " pub fn iter_mut(&mut self) -> IterMut<'_, T> {\n\n IterMut {\n\n next: self.head.as_deref_mut()\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T> Iterator for IterMut<'a, T> {\n\n type Item = &'a mut T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.take().map(|node| {\n\n self.next = node.next.as_deref_mut();\n\n &mut node.elem\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 37, "score": 34338.87386283817 }, { "content": "use std::mem;\n\n\n\npub struct List {\n\n head: Link,\n\n}\n\n\n", "file_path": "src/a_bad_stack/src/lib.rs", "rank": 38, "score": 34337.79025111796 }, { "content": " type Item = T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.0.pop()\n\n }\n\n}\n\n\n\n//实现Iter迭代器\n\npub struct Iter<'a, T> {\n\n next: Option<&'a Node<T>>,\n\n}\n\n\n\nimpl<T> List<T> {\n\n pub fn iter(&self) -> Iter<'_, T> {\n\n Iter {\n\n next: self.head.as_ref().map(|node| &**node)\n\n //next: self.head.as_deref()\n\n }\n\n }\n\n}\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 39, "score": 34337.48166504198 }, { "content": "use std::rc::Rc;\n\n\n\npub struct List<T> {\n\n head: Link<T>,\n\n}\n\n\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 40, "score": 34337.45454615887 }, { "content": "\n\nimpl<T> Drop for List<T> {\n\n fn drop(&mut self) {\n\n let mut head = self.head.take();\n\n while let Some(node) = head {\n\n if let Ok(mut node) = Rc::try_unwrap(node) {\n\n head = node.next.take()\n\n }\n\n else {\n\n break;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::List;\n\n\n\n #[test]\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 41, "score": 34337.29910537873 }, { "content": " match mem::replace(&mut self.head, Link::Empty) {\n\n Link::Empty => {\n\n None\n\n }\n\n Link::More(node) => {\n\n self.head = node.next;\n\n Some(node.elem)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for List {\n\n fn drop(&mut self) {\n\n let mut cur_link = mem::replace(&mut self.head, Link::Empty);\n\n while let Link::More(mut boxed_node) = cur_link {\n\n cur_link = mem::replace(&mut boxed_node.next, Link::Empty);\n\n }\n\n }\n\n}\n", "file_path": "src/a_bad_stack/src/lib.rs", "rank": 42, "score": 34336.914498340586 }, { "content": "\n\nimpl<T> Drop for List<T> {\n\n fn drop(&mut self) {\n\n let mut cur_link = self.head.take();\n\n while let Some(mut box_node) = cur_link {\n\n cur_link = box_node.next.take();\n\n }\n\n }\n\n}\n\n\n\n//实现IntoIter迭代器\n\npub struct IntoIter<T>(List<T>);\n\n\n\nimpl<T> List<T> {\n\n pub fn into_iter(self) -> IntoIter<T> {\n\n IntoIter(self)\n\n }\n\n}\n\n\n\nimpl<T> Iterator for IntoIter<T> {\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 43, "score": 34336.60518722587 }, { "content": " use crate::List;\n\n\n\n #[test]\n\n fn basics() {\n\n let mut list = List::new();\n\n assert!(list.is_empty());\n\n\n\n // Check empty list behaves right\n\n assert_eq!(list.pop(), None);\n\n\n\n // Populate list\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n assert!(!list.is_empty());\n\n\n\n // Check normal removal\n\n assert_eq!(list.pop(), Some(3));\n\n assert_eq!(list.pop(), Some(2));\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 44, "score": 34335.22678237157 }, { "content": " *elem = 5\n\n });\n\n\n\n iter.next().map(|elem| {\n\n *elem = 6\n\n });\n\n\n\n iter.next().map(|elem| {\n\n *elem = 7\n\n });\n\n\n\n let mut iter = list.iter_mut();\n\n assert_eq!(iter.next(), Some(&mut 4));\n\n assert_eq!(iter.next(), Some(&mut 5));\n\n assert_eq!(iter.next(), Some(&mut 6));\n\n assert_eq!(iter.next(), None);\n\n }\n\n}\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 45, "score": 34335.0741698371 }, { "content": " list.push(3);\n\n let mut iter = list.iter();\n\n assert_eq!(iter.next(), Some(&3));\n\n assert_eq!(iter.next(), Some(&2));\n\n assert_eq!(iter.next(), Some(&1));\n\n\n\n assert_eq!(list.peek(), Some(&3));\n\n }\n\n\n\n #[test]\n\n fn iter_mut() {\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n let mut iter = list.iter_mut();\n\n iter.next().map(|elem| {\n\n *elem = 4\n\n });\n\n iter.next().map(|elem| {\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 46, "score": 34334.26950600032 }, { "content": "\n\npub struct List<T> {\n\n head: Link<T>,\n\n}\n\n\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 47, "score": 34333.97142163207 }, { "content": "\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::List;\n\n\n\n #[test]\n\n fn basics() {\n\n let mut list = List::new();\n\n assert_eq!(list.pop(), None);\n\n\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n assert_eq!(list.pop(), Some(3));\n\n assert_eq!(list.pop(), Some(2));\n\n\n\n list.push(4);\n", "file_path": "src/a_bad_stack/src/lib.rs", "rank": 48, "score": 34333.03385838668 }, { "content": " assert_eq!(list.peek(), Some(&String::from(\"hello world\")));\n\n assert_eq!(list.pop(), Some(String::from(\"hello world\")));\n\n\n\n // let mut list = List::new();\n\n // assert_eq!(list.peek(), None);\n\n // assert_eq!(list.peek_mut(), None);\n\n // list.push(1); list.push(2); list.push(3);\n\n //\n\n // assert_eq!(list.peek(), Some(&3));\n\n // assert_eq!(list.peek_mut(), Some(&mut 3));\n\n\n\n // // 这里使用了模式匹配,只是将结果的elem复制到了value中,所以并没有改变链表中的值\n\n // list.peek_mut().map(|&mut mut value| {\n\n // value = 13;\n\n // println!(\"value = {}\", value);\n\n // });\n\n //\n\n // assert_eq!(list.peek(), Some(&3));\n\n // assert_eq!(list.pop(), Some(3));\n\n }\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 49, "score": 34332.30544194474 }, { "content": "\n\n assert_eq!(list.peek(), Some(&3));\n\n assert_eq!(list.peek_mut(), Some(&mut 3));\n\n }\n\n\n\n #[test]\n\n fn peek_mut() {\n\n let mut list = List::new();\n\n assert_eq!(list.peek(), None);\n\n assert_eq!(list.peek_mut(), None);\n\n list.push(\"jack\".to_string());\n\n list.push(\"marry\".to_string());\n\n list.push(\"hello\".to_string());\n\n assert_eq!(list.peek(), Some(&String::from(\"hello\")));\n\n assert_eq!(list.peek_mut(), Some(&mut String::from(\"hello\")));\n\n\n\n list.peek_mut().map(|value| {\n\n value.push_str(\" world\");\n\n });\n\n\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 50, "score": 34330.309656696576 }, { "content": "\n\n #[test]\n\n fn into_iter() {\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n\n list.push(3);\n\n\n\n let mut iter = list.into_iter();\n\n assert_eq!(iter.next(), Some(3));\n\n assert_eq!(iter.next(), Some(2));\n\n assert_eq!(iter.next(), Some(1));\n\n assert_eq!(iter.next(), None);\n\n }\n\n\n\n #[test]\n\n fn iter() {\n\n let mut list = List::new();\n\n list.push(1);\n\n list.push(2);\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 51, "score": 34330.309656696576 }, { "content": " list.push(5);\n\n\n\n assert_eq!(list.pop(), Some(5));\n\n assert_eq!(list.pop(), Some(4));\n\n\n\n assert_eq!(list.pop(), Some(1));\n\n assert_eq!(list.pop(), None);\n\n }\n\n}\n", "file_path": "src/a_bad_stack/src/lib.rs", "rank": 52, "score": 34330.309656696576 }, { "content": "\n\n #[test]\n\n fn iter() {\n\n let list = List::new().prepend(1).prepend(2).prepend(3);\n\n let mut iter = list.iter();\n\n assert_eq!(iter.next(), Some(&3));\n\n assert_eq!(iter.next(), Some(&2));\n\n assert_eq!(iter.next(), Some(&1));\n\n }\n\n}\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 53, "score": 34330.309656696576 }, { "content": " fn basics() {\n\n let list = List::new();\n\n assert_eq!(list.head(), None);\n\n\n\n let list = list.prepend(1).prepend(2).prepend(3);\n\n assert_eq!(list.head(), Some(&3));\n\n\n\n let list = list.tail();\n\n assert_eq!(list.head(), Some(&2));\n\n\n\n let list = list.tail();\n\n assert_eq!(list.head(), Some(&1));\n\n\n\n let list = list.tail();\n\n assert_eq!(list.head(), None);\n\n\n\n // Make sure empty tail works\n\n let list = list.tail();\n\n assert_eq!(list.head(), None);\n\n }\n", "file_path": "src/a_persistent_stack/src/lib.rs", "rank": 54, "score": 34330.309656696576 }, { "content": "\n\n // Push some more just to make sure nothing's corrupted\n\n list.push(4);\n\n list.push(5);\n\n\n\n // Check normal removal\n\n assert_eq!(list.pop(), Some(5));\n\n assert_eq!(list.pop(), Some(4));\n\n\n\n // Check exhaustion\n\n assert_eq!(list.pop(), Some(1));\n\n assert_eq!(list.pop(), None);\n\n }\n\n\n\n #[test]\n\n fn peek() {\n\n let mut list = List::new();\n\n assert_eq!(list.peek(), None);\n\n assert_eq!(list.peek_mut(), None);\n\n list.push(1); list.push(2); list.push(3);\n", "file_path": "src/an_ok_stack/src/lib.rs", "rank": 55, "score": 34330.309656696576 }, { "content": " }\n\n\n\n pub fn level_order(&self, buf: &mut Vec<T>) {\n\n let mut queue = VecDeque::new();\n\n queue.push_back(self);\n\n while !queue.is_empty() {\n\n if let Some(node) = queue.pop_front() {\n\n for k in &node.keys {\n\n buf.push(k.clone());\n\n }\n\n for c in &node.children {\n\n queue.push_back(c);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 56, "score": 33461.381818589776 }, { "content": " //let mut right_child = &mut self.children[index + 1];\n\n let left_child = &mut self.children[i];\n\n\n\n left_child.keys.push(self.keys[i].clone());\n\n left_child.keys.append(&mut right_child.keys);\n\n\n\n left_child.children.append(&mut right_child.children);\n\n self.keys.remove(i);\n\n self.children.remove(i + 1);\n\n if self.key_len() == 0 {\n\n *self = std::mem::replace(&mut self.children[i], Node::new(self.degree, None, None));\n\n }\n\n }\n\n\n\n pub fn search(&self, key: &T) -> Option<(&Node<T>, usize)> {\n\n let mut i = 0;\n\n while i < self.key_len() && *key > self.keys[i] {\n\n i += 1;\n\n }\n\n if i < self.key_len() && *key == self.keys[i] {\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 57, "score": 33461.26522874265 }, { "content": "use std::collections::VecDeque;\n\nuse std::convert::TryFrom;\n\nuse std::fmt::{Debug, Formatter};\n\n\n\npub struct Node<T> {\n\n keys: Vec<T>,\n\n children: Vec<Node<T>>,\n\n degree: usize,\n\n}\n\n\n\nimpl<T: Debug> Debug for Node<T> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"BNode\")\n\n .field(&self.keys)\n\n .field(&self.children)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<T: PartialOrd + Clone + Debug> Node<T> {\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 58, "score": 33459.08634725227 }, { "content": " pub fn is_full_keys(&self) -> bool {\n\n let max_keys = 2 * self.degree - 1;\n\n self.key_len() == max_keys\n\n }\n\n\n\n pub fn insert_child(&mut self, index: usize, elem: Node<T>) {\n\n self.children.insert(index, elem);\n\n }\n\n\n\n pub fn get_child(&self, index: usize) -> &Node<T> {\n\n &self.children[index]\n\n }\n\n\n\n pub fn insert_key(&mut self, index: usize, elem: T) {\n\n self.keys.insert(index, elem);\n\n }\n\n\n\n pub fn get_key(&self, index: usize) -> &T {\n\n &self.keys[index]\n\n }\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 59, "score": 33458.28256418382 }, { "content": " } else {\n\n None\n\n };\n\n let new_child_node = Node::new(self.degree, Some(right_keys), right_child);\n\n self.insert_key(child_index, middle_key);\n\n self.insert_child(child_index + 1, new_child_node);\n\n }\n\n\n\n // 辅助的递归过程,将关键字key递归插入当前节点,调用时满足:\n\n // 1.当前节点self必须是非满的\n\n // 2.递归调用时如果发现孩子节点是满节点,则调用split_child()进行拆分\n\n pub fn insert_non_full(&mut self, key: T) {\n\n let mut i = isize::try_from(self.key_len()).ok().unwrap() - 1;\n\n while i >= 0 && self.keys[i as usize] >= key {\n\n i -= 1;\n\n }\n\n let mut u_index = usize::try_from(i + 1).ok().unwrap();\n\n if self.is_leaf() {\n\n self.insert_key(u_index, key);\n\n } else {\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 60, "score": 33455.15038619615 }, { "content": " Some((self, i))\n\n } else if self.is_leaf() {\n\n None\n\n } else {\n\n self.children[i].search(key)\n\n }\n\n }\n\n\n\n pub fn in_order(&self, buf: &mut Vec<T>) {\n\n if self.is_leaf() {\n\n for x in &self.keys {\n\n buf.push(x.clone());\n\n }\n\n return;\n\n }\n\n for i in 0..self.key_len() {\n\n self.children[i].in_order(buf);\n\n buf.push(self.keys[i].clone());\n\n }\n\n self.children[self.key_len()].in_order(buf);\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 61, "score": 33453.54400333617 }, { "content": " pub fn new(degree: usize, _key: Option<Vec<T>>, _child: Option<Vec<Node<T>>>) -> Self {\n\n let new_key = match _key {\n\n None => Vec::with_capacity(2 * degree - 1),\n\n Some(key) => key,\n\n };\n\n let new_child = match _child {\n\n None => Vec::with_capacity(2 * degree),\n\n Some(child) => child,\n\n };\n\n Node {\n\n keys: new_key,\n\n children: new_child,\n\n degree,\n\n }\n\n }\n\n\n\n pub fn is_leaf(&self) -> bool {\n\n self.children.is_empty()\n\n }\n\n\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 62, "score": 33453.24876303391 }, { "content": "\n\n pub fn key_len(&self) -> usize {\n\n self.keys.len()\n\n }\n\n\n\n pub fn children_len(&self) -> usize {\n\n self.children.len()\n\n }\n\n\n\n // 分裂B树中child_index指向的孩子节点,输入必须满足:\n\n // 1.当前节点self是非满的内部节点,且不能是叶子节点\n\n // 2.下标为child_index的孩子是满节点\n\n pub fn split_child(&mut self, child_index: usize) {\n\n let mid_key_index = (2 * self.degree - 1) / 2;\n\n let child = &mut self.children[child_index];\n\n let right_keys = child.keys.split_off(mid_key_index + 1);\n\n let middle_key = child.keys.pop().unwrap();\n\n\n\n let right_child = if !child.is_leaf() {\n\n Some(child.children.split_off(mid_key_index + 1))\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 63, "score": 33452.85407356802 }, { "content": " //case 2\n\n if self.children[i].key_len() >= t {\n\n //case a\n\n let max_key = self.children[i].max_key();\n\n self.keys[i] = max_key.clone();\n\n return self.children[i].delete(max_key);\n\n } else if self.children[i + 1].key_len() >= t {\n\n //case b\n\n let min_key = self.children[i + 1].min_key();\n\n self.keys[i] = min_key.clone();\n\n return self.children[i + 1].delete(min_key);\n\n } else {\n\n // case c\n\n self.unionchild(i);\n\n return self.delete(key);\n\n }\n\n } else if self.children[i].key_len() == t - 1 {\n\n //case 3\n\n if i >= 1 && self.children[i - 1].key_len() >= t {\n\n // a_left\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 64, "score": 33452.84410636587 }, { "content": " if self.children[u_index].is_full_keys() {\n\n self.split_child(u_index);\n\n if self.keys[u_index] < key {\n\n u_index += 1;\n\n }\n\n }\n\n self.children[u_index].insert_non_full(key);\n\n }\n\n }\n\n\n\n pub fn max_key(&self) -> T {\n\n let mut cur = self;\n\n while !cur.is_leaf() {\n\n cur = &cur.children[cur.key_len()];\n\n }\n\n cur.keys[cur.key_len() - 1].clone()\n\n }\n\n\n\n pub fn min_key(&self) -> T {\n\n let mut cur = self;\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 65, "score": 33452.75063150354 }, { "content": " // 再将self.children[i - 1]的某个关键字升至self,最后将self.children[i - 1]相应的孩子移到self.children[i]\n\n pub fn roright(&mut self, i: usize) {\n\n self.children[i].keys.insert(0, self.keys[i - 1].clone());\n\n if !self.children[i - 1].children.is_empty() {\n\n let new_child = self.children[i - 1].children.pop().unwrap();\n\n self.children[i].children.insert(0, new_child);\n\n }\n\n\n\n self.keys[i - 1] = self.children[i - 1].keys.pop().unwrap();\n\n }\n\n\n\n // 设当前节点的index指向的key关键字记为k, 当前节点前于k的子节点记为y,当前节点后于k的子节点记为z。\n\n // 该函数将k和z全部合并进y。这样当前节点就失去了k和指向z的指针。\n\n // 输入必须保证:y和z必须存在且y和z的关键字个数都等于t - 1\n\n // 调用完成后y的关键字个数为2t-1\n\n pub fn unionchild(&mut self, i: usize) {\n\n let mut right_child = std::mem::replace(\n\n &mut self.children[i + 1],\n\n Node::new(self.degree, None, None),\n\n );\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 66, "score": 33452.69304112929 }, { "content": " // 此时self.children[i + 1]至少有t个关键字,则将self的一个关键字降至self.children[i]中,\n\n // 再将self.children[i + 1]的一个关键字升至self,最后将self.children[i + 1]相应的孩子移到self.children[i]\n\n pub fn roleft(&mut self, i: usize) {\n\n self.children[i].keys.push(self.keys[i].clone());\n\n if !self.children[i + 1].children.is_empty() {\n\n let new_child = std::mem::replace(\n\n &mut self.children[i + 1].children[0],\n\n Node::new(self.degree, None, None),\n\n );\n\n self.children[i].children.push(new_child);\n\n self.children[i + 1].children.remove(0);\n\n }\n\n\n\n self.keys[i] = self.children[i + 1].keys[0].clone();\n\n self.children[i + 1].keys.remove(0);\n\n }\n\n\n\n // 调用条件:self.children[i]只有t-1个关键字,但是他的左兄弟self.children[i - 1]存在且至少有t个关键字\n\n // 待删除节点存在于子树self.children[i]中,但是self.children[i]只有t-1个关键字key,直接删除就不满足B树的结构\n\n // 此时self.children[i - 1]至少有t个关键字,则将self的某个关键字降至self.children[i]中,\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 67, "score": 33452.475363536025 }, { "content": " while !cur.is_leaf() {\n\n cur = &cur.children[0];\n\n }\n\n cur.keys[0].clone()\n\n }\n\n\n\n pub fn delete(&mut self, key: T) {\n\n let t = self.degree; // 除了根节点外每个节点必须至少有t-1个关键字key\n\n let mut i = 0;\n\n while i < self.key_len() && self.keys[i] < key {\n\n i += 1;\n\n }\n\n if self.is_leaf() {\n\n //case 1\n\n if i < self.key_len() && key == self.keys[i] {\n\n self.keys.remove(i);\n\n }\n\n return;\n\n }\n\n if i < self.key_len() && key == self.keys[i] {\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 68, "score": 33452.45068983862 }, { "content": " self.roright(i);\n\n return self.children[i].delete(key);\n\n } else if i + 1 < self.children_len() && self.children[i + 1].key_len() >= t {\n\n // a_right\n\n self.roleft(i);\n\n return self.children[i].delete(key);\n\n } else {\n\n // b\n\n if i >= self.key_len() {\n\n i -= 1;\n\n }\n\n self.unionchild(i);\n\n return self.delete(key);\n\n }\n\n }\n\n self.children[i].delete(key)\n\n }\n\n\n\n // 调用条件:self.children[i]只有t-1个关键字,但是他的右兄弟self.children[i + 1]存在且至少有t个关键字\n\n // 待删除节点存在于子树self.children[i]中,但是self.children[i]只有t-1个关键字key,直接删除就不满足B树的结构\n", "file_path": "src/an_ok_btree/src/node.rs", "rank": 69, "score": 33449.434756526396 }, { "content": "}\n\n\n\nimpl<'a, T> Iterator for Iter<'a, T> {\n\n type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.next.map(|node| {\n\n self.next = node.prev.as_deref();\n\n &node.data\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::List;\n\n\n\n #[test]\n\n fn it_works() {\n\n List::push(None, 3, |list| {\n", "file_path": "src/stack_allocated_list/src/lib.rs", "rank": 70, "score": 33148.58958594427 }, { "content": "pub struct List<'a, T> {\n\n pub data: T,\n\n pub prev: Option<&'a List<'a, T>>,\n\n}\n\n\n\nimpl<'a, T> List<'a, T> {\n\n pub fn push<U>(prev: Option<&'a List<'a, T>>, data: T, callback: impl FnOnce(& List<'a, T>) -> U) -> U{\n\n let list = List {data, prev};\n\n callback(&list)\n\n }\n\n\n\n pub fn iter(&'a self) -> Iter<'a, T> {\n\n Iter {\n\n next: Some(self)\n\n }\n\n }\n\n}\n\n\n\npub struct Iter<'a, T> {\n\n next: Option<&'a List<'a, T>>,\n", "file_path": "src/stack_allocated_list/src/lib.rs", "rank": 71, "score": 33146.216744499936 }, { "content": " #[test]\n\n fn cell() {\n\n use std::cell::Cell;\n\n\n\n List::push(None, Cell::new(3), |list| {\n\n List::push(Some(list), Cell::new(5), |list| {\n\n List::push(Some(list), Cell::new(13), |list| {\n\n // Multiply every value in the list by 10\n\n for val in list.iter() {\n\n val.set(val.get() * 10)\n\n }\n\n\n\n let mut vals = list.iter();\n\n assert_eq!(vals.next().unwrap().get(), 130);\n\n assert_eq!(vals.next().unwrap().get(), 50);\n\n assert_eq!(vals.next().unwrap().get(), 30);\n\n assert_eq!(vals.next(), None);\n\n assert_eq!(vals.next(), None);\n\n })\n\n })\n\n })\n\n }\n\n}\n", "file_path": "src/stack_allocated_list/src/lib.rs", "rank": 72, "score": 33143.97288118148 }, { "content": "enum Link {\n\n Empty,\n\n More(Box<Node>),\n\n}\n\n\n\nimpl List {\n\n pub fn new() -> Self {\n\n List {\n\n head: Link::Empty,\n\n }\n\n }\n\n pub fn push(&mut self, elem: i32) {\n\n let new_node = Box::new(Node {\n\n elem,\n\n next: mem::replace(&mut self.head, Link::Empty),\n\n });\n\n self.head = Link::More(new_node);\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<i32> {\n", "file_path": "src/a_bad_stack/src/lib.rs", "rank": 73, "score": 33141.97454834404 }, { "content": " println!(\"{}\", list.data);\n\n List::push(Some(list), 13, |list| {\n\n println!(\"{}\", list.data);\n\n })\n\n })\n\n }\n\n\n\n #[test]\n\n fn elegance() {\n\n List::push(None, 3, |list| {\n\n assert_eq!(list.iter().copied().sum::<i32>(), 3);\n\n List::push(Some(list), 5, |list| {\n\n assert_eq!(list.iter().copied().sum::<i32>(), 5 + 3);\n\n List::push(Some(list), 13, |list| {\n\n assert_eq!(list.iter().copied().sum::<i32>(), 13 + 5 + 3);\n\n })\n\n })\n\n })\n\n }\n\n\n", "file_path": "src/stack_allocated_list/src/lib.rs", "rank": 74, "score": 33141.97454834404 }, { "content": " buf.push(node.key.clone());\n\n Self::in_order(&node.right, buf);\n\n }\n\n }\n\n\n\n // 后序遍历\n\n pub fn post_order(root: &Link<K, V>, buf: &mut Vec<K>) {\n\n if let Some(node) = root {\n\n Self::post_order(&node.left, buf);\n\n Self::post_order(&node.right, buf);\n\n buf.push(node.key.clone());\n\n }\n\n }\n\n\n\n // 层序遍历\n\n pub fn level_order(root: &Link<K, V>, buf: &mut Vec<K>) {\n\n let mut queue = VecDeque::new();\n\n if let Some(node) = root {\n\n queue.push_back(node);\n\n }\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 75, "score": 32311.87421656803 }, { "content": " while !queue.is_empty() {\n\n if let Some(node) = queue.pop_front() {\n\n buf.push(node.get_key().unwrap().clone());\n\n if !node.left().is_nil() {\n\n queue.push_back(node.left());\n\n }\n\n if !node.right().is_nil() {\n\n queue.push_back(node.right());\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// 返回以该节点为根的树高\n\n pub fn height(&self) -> usize {\n\n if self.is_nil() {\n\n return 0;\n\n }\n\n let left_height = self.left().height();\n\n let right_height = self.right().height();\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 76, "score": 32311.770539690922 }, { "content": " while !queue.is_empty() {\n\n if let Some(node) = queue.pop_front() {\n\n buf.push(node.key.clone());\n\n if let Some(left) = node.left.as_ref() {\n\n queue.push_back(left);\n\n }\n\n if let Some(right) = node.right.as_ref() {\n\n queue.push_back(right);\n\n }\n\n }\n\n }\n\n }\n\n\n\n // 返回查找的键值对的不可变借用\n\n pub fn search_pair(&self, key: &K,) -> Option<(&K, &V)> {\n\n if self.key < *key {\n\n self.right\n\n .as_ref()\n\n .and_then(|right| right.search_pair(key))\n\n } else if self.key > *key {\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 77, "score": 32310.875878861654 }, { "content": " } else if self.key > *key {\n\n self.left.as_ref().and_then(|left| left.predecessor(key))\n\n } else {\n\n self.left.as_ref().map(|left| left.max_pair())\n\n }\n\n }\n\n\n\n // 前序遍历\n\n pub fn prev_order(root: &Link<K, V>, buf: &mut Vec<K>) {\n\n if let Some(node) = root {\n\n buf.push(node.key.clone());\n\n Self::prev_order(&node.left, buf);\n\n Self::prev_order(&node.right, buf);\n\n }\n\n }\n\n\n\n // 中序遍历\n\n pub fn in_order(root: &Link<K, V>, buf: &mut Vec<K>) {\n\n if let Some(node) = root {\n\n Self::in_order(&node.left, buf);\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 78, "score": 32309.09526115749 }, { "content": "use std::cmp::{max, Ordering};\n\nuse std::collections::VecDeque;\n\nuse std::fmt::Debug;\n\nuse std::ptr::NonNull;\n\n\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub enum Color {\n\n Red,\n\n Black,\n\n}\n\n\n\npub struct Node<K, V> {\n\n pub key: K,\n\n pub value: V,\n\n pub left: Option<NonNull<Node<K, V>>>,\n\n pub right: Option<NonNull<Node<K, V>>>,\n\n pub parent: Option<NonNull<Node<K, V>>>,\n\n pub color: Color,\n\n}\n\n\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 79, "score": 32307.188406737456 }, { "content": " parent: None,\n\n color: Color::Black,\n\n });\n\n let nil = NonNull::from(Box::leak(nil_node));\n\n let root_node = Node::new(2, 2, nil);\n\n let left_node = Node::new(1, 1, nil);\n\n let mut right_node = Node::new(3, 3, nil);\n\n let right_right_node = Node::new(4, 4, nil);\n\n let mut root_query = NodeQuery::new(Some(root_node), nil);\n\n let mut left_query = NodeQuery::new(Some(left_node), nil);\n\n let mut right_query = NodeQuery::new(Some(right_node), nil);\n\n let mut rr_query = NodeQuery::new(Some(right_right_node), nil);\n\n root_query.set_left(Some(left_node));\n\n root_query.set_right(Some(right_node));\n\n rr_query.set_parent(Some(right_node));\n\n unsafe {\n\n right_node.as_mut().right = Some(right_right_node);\n\n }\n\n root_query.set_color(Color::Black);\n\n left_query.set_color(Color::Black);\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 80, "score": 32306.44916988941 }, { "content": " right_query.set_color(Color::Black);\n\n // 设置完成\n\n assert!(!root_query.is_red());\n\n assert!(root_query.is_some());\n\n assert!(left_query.is_left_child());\n\n assert!(right_query.is_right_child());\n\n assert!(!root_query.is_left_child());\n\n assert!(!root_query.is_right_child());\n\n assert!(!left_query.grandparent().is_left_child());\n\n\n\n assert_eq!(root_query.left().node, Some(left_node));\n\n assert_eq!(root_query.right().node, Some(right_node));\n\n assert_eq!(left_query.left().node, Some(nil));\n\n assert_eq!(left_query.right().node, Some(nil));\n\n\n\n assert_eq!(root_query.left().color(), Some(Color::Black));\n\n assert_eq!(root_query.right().color(), Some(Color::Black));\n\n assert_eq!(left_query.left().color(), Some(Color::Black));\n\n\n\n assert_eq!(root_query.get_key(), Some(&2));\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 81, "score": 32306.44610964912 }, { "content": "use std::cmp::max;\n\nuse std::collections::VecDeque;\n\n\n\npub type Link<K, V> = Option<Box<Node<K, V>>>;\n\n\n\npub struct Node<K, V> {\n\n key: K, //键\n\n value: V, //值\n\n height: u32, //树高\n\n left: Link<K, V>,\n\n right: Link<K, V>,\n\n}\n\n\n\nimpl<K: PartialOrd + Clone, V> Node<K, V> {\n\n\n\n pub fn new(key: K, value: V) -> Self {\n\n Node {\n\n key,\n\n value,\n\n height: 1,\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 82, "score": 32306.299137936363 }, { "content": " return None;\n\n }\n\n self.node\n\n .map(|node| unsafe { (&node.as_ref().key, &node.as_ref().value) })\n\n }\n\n\n\n /// 返回节点的左子树\n\n pub fn left(&self) -> Self {\n\n let left = self.inner().and_then(|node| unsafe { node.as_ref().left });\n\n Self::new(left, self.nil)\n\n }\n\n\n\n /// 返回节点的右子树\n\n pub fn right(&self) -> Self {\n\n let right = self.inner().and_then(|node| unsafe { node.as_ref().right });\n\n Self::new(right, self.nil)\n\n }\n\n\n\n /// 返回节点的双亲\n\n pub fn parent(&self) -> Self {\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 83, "score": 32306.1619584803 }, { "content": " assert_eq!(root_query.left().get_key(), Some(&1));\n\n assert_eq!(root_query.right().get_key(), Some(&3));\n\n assert_eq!(left_query.left().get_key(), None);\n\n\n\n assert_eq!(rr_query.grandparent().node, Some(root_node));\n\n assert_eq!(left_query.grandparent().node, Some(nil));\n\n assert_eq!(root_query.grandparent().node, None);\n\n\n\n assert_eq!(rr_query.uncle().node, Some(left_node));\n\n assert_eq!(left_query.uncle().node, None);\n\n assert_eq!(right_query.uncle().node, None);\n\n assert_eq!(root_query.uncle().node, None);\n\n\n\n assert_eq!(root_query.brother().node, None);\n\n assert_eq!(left_query.brother().node, right_query.node);\n\n assert_eq!(right_query.brother().node, left_query.node);\n\n assert_eq!(rr_query.brother().node, Some(nil));\n\n\n\n assert_eq!(left_query.parent().node, Some(root_node));\n\n assert_eq!(right_query.parent().node, Some(root_node));\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 84, "score": 32305.687476125328 }, { "content": " buf.push(self.get_key().unwrap().clone());\n\n self.right().in_order(buf);\n\n }\n\n }\n\n\n\n /// 后序遍历\n\n pub fn post_order(&self, buf: &mut Vec<K>) {\n\n if !self.is_nil() {\n\n self.left().post_order(buf);\n\n self.right().post_order(buf);\n\n buf.push(self.get_key().unwrap().clone());\n\n }\n\n }\n\n\n\n /// 层序遍历\n\n pub fn level_order(&self, buf: &mut Vec<K>) {\n\n let mut queue = VecDeque::new();\n\n if !self.is_nil() {\n\n queue.push_back(self.clone());\n\n }\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 85, "score": 32305.499066350076 }, { "content": " //对当前节点进行一次左旋操作,返回旋转后的根节点\n\n fn left_rotate(mut self) -> Box<Node<K, V>> {\n\n let mut new_root = self.right.take().expect(\"AVL broken\");\n\n self.right = new_root.left.take();\n\n self.update_height();\n\n new_root.left = Some(Box::new(self));\n\n new_root.update_height();\n\n new_root\n\n }\n\n\n\n //对当前节点进行一次右旋操作,返回旋转后的根节点\n\n fn right_rotate(mut self) -> Box<Node<K, V>> {\n\n let mut new_root = self.left.take().expect(\"AVL broken\");\n\n self.left = new_root.right.take();\n\n self.update_height();\n\n new_root.right = Some(Box::new(self));\n\n new_root.update_height();\n\n new_root\n\n }\n\n\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 86, "score": 32305.20289741384 }, { "content": " true\n\n }\n\n\n\n // 判断是否为AVL树\n\n pub fn is_avl_tree(root: &Link<K, V>) -> bool {\n\n match root {\n\n None => true,\n\n Some(node) => {\n\n if !node.is_avl_node() {\n\n return false;\n\n }\n\n Self::is_avl_tree(&node.left) && Self::is_avl_tree(&node.right)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<K: PartialOrd + ToString, V: ToString> ToString for Node<K, V> {\n\n fn to_string(&self) -> String {\n\n format!(\n\n \"[K: {}, V: {}, L: {}, R: {}]\",\n\n self.key.to_string(),\n\n self.value.to_string(),\n\n to_string(&self.left),\n\n to_string(&self.right)\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 87, "score": 32304.915119975594 }, { "content": " assert_eq!(root_query.parent().node, Some(nil));\n\n\n\n rr_query.set_entry(5, 5);\n\n assert_eq!(right_query.right().get_key(), Some(&5));\n\n\n\n assert_eq!(root_query.minimum().node, Some(left_node));\n\n assert_eq!(left_query.minimum().node, Some(left_node));\n\n assert_eq!(root_query.maximum().node, Some(right_right_node));\n\n assert_eq!(right_query.maximum().node, Some(right_right_node));\n\n assert_eq!(rr_query.maximum().node, Some(right_right_node));\n\n assert_eq!(rr_query.minimum().node, Some(right_right_node));\n\n\n\n assert_eq!(root_query.to_string(), String::from(\"[K: 2, V: 2, C: Black L: [K: 1, V: 1, C: Black L: Ø, R: Ø], R: [K: 3, V: 3, C: Black L: Ø, R: [K: 5, V: 5, C: Red L: Ø, R: Ø]]]\"));\n\n\n\n assert_eq!(root_query.height(), 3);\n\n assert_eq!(left_query.height(), 1);\n\n assert_eq!(right_query.height(), 2);\n\n assert_eq!(rr_query.height(), 1);\n\n }\n\n\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 88, "score": 32304.836820135737 }, { "content": " let parent = self\n\n .inner()\n\n .and_then(|node| unsafe { node.as_ref().parent });\n\n Self::new(parent, self.nil)\n\n }\n\n\n\n /// 返回节点的叔叔\n\n pub fn uncle(&self) -> Self {\n\n if self.parent().is_left_child() {\n\n self.grandparent().right()\n\n } else {\n\n self.grandparent().left()\n\n }\n\n }\n\n\n\n /// 返回节点的兄弟\n\n pub fn brother(&self) -> Self {\n\n if self.is_left_child() {\n\n self.parent().right()\n\n } else {\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 89, "score": 32304.614599607492 }, { "content": " let mut new_root = min;\n\n new_root.right = remain_tree;\n\n new_root.left = Some(Box::new(left));\n\n new_root.update_node()\n\n }\n\n\n\n //删除当前节点,重构二叉树,并返回新的根节点\n\n fn delete_root(mut self) -> Link<K, V> {\n\n // AVL树删除节点的三种情况(包括二叉搜索树),AVL树的删除还要多一步旋转操作\n\n // 1.如果是叶子节点,则直接删除\n\n // 2.如果待删除节点只有左子树或只有右子树,删除该节点,然后将左子树或右子树移动到该节点\n\n // 3.如果待删除节点左右子树都有,就选取右子树中最小的节点代替待删除节点的位置(或者取左子树中最大节点代替也可以)。\n\n match (self.left.take(), self.right.take()) {\n\n (None, None) => None,\n\n (Some(left), None) => Some(left),\n\n (None, Some(right)) => Some(right),\n\n (Some(left), Some(right)) => Some(Self::combine_two_subtrees(*left, *right)),\n\n }\n\n }\n\n\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 90, "score": 32303.786532087026 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::node::{Color, Node, NodeQuery};\n\n use std::ptr::NonNull;\n\n\n\n #[test]\n\n fn basic() {\n\n /*\n\n 2(b)\n\n / \\\n\n 1(b) 3(b)\n\n \\\n\n 4(r)\n\n */\n\n let nil_node = Box::new(Node {\n\n key: 0,\n\n value: 0,\n\n left: None,\n\n right: None,\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 91, "score": 32303.285940829683 }, { "content": " let mut cur = self.right();\n\n while !cur.right().is_nil() {\n\n cur = cur.right();\n\n }\n\n Self::new(cur.inner(), cur.nil)\n\n }\n\n\n\n /// 前序遍历\n\n pub fn pre_order(&self, buf: &mut Vec<K>) {\n\n if !self.is_nil() {\n\n buf.push(self.get_key().unwrap().clone());\n\n self.left().pre_order(buf);\n\n self.right().pre_order(buf);\n\n }\n\n }\n\n\n\n /// 中序遍历\n\n pub fn in_order(&self, buf: &mut Vec<K>) {\n\n if !self.is_nil() {\n\n self.left().in_order(buf);\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 92, "score": 32302.409745274028 }, { "content": " //保持左侧平衡。传入的self是一颗不平衡的树,左子树比右子树高2\n\n fn left_balance(mut self) -> Box<Node<K, V>> {\n\n let left = self.left.take().expect(\"AVL broken\");\n\n if Self::height(&left.left) < Self::height(&left.right) {\n\n let rotated = left.left_rotate();\n\n self.left = Some(rotated);\n\n self.update_height();\n\n } else {\n\n self.left = Some(left);\n\n }\n\n self.right_rotate()\n\n }\n\n\n\n //保持右侧平衡。传入的self是一颗不平衡的树,右子树比左子树高2\n\n fn right_balance(mut self) -> Box<Node<K, V>> {\n\n let right = self.right.take().expect(\"AVL broken\");\n\n if Self::height(&right.left) > Self::height(&right.right) {\n\n let rotated = right.right_rotate();\n\n self.right = Some(rotated);\n\n self.update_height();\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 93, "score": 32301.978008795773 }, { "content": " left: None,\n\n right: None,\n\n }\n\n }\n\n\n\n // 判断当前节点是否为叶子节点\n\n fn is_leaf(&self) -> bool {\n\n self.left.is_none() && self.right.is_none()\n\n }\n\n\n\n // 得到当前节点的高度\n\n fn height(node: &Link<K, V>) -> u32 {\n\n node.as_ref().map_or(0, |node| node.height)\n\n }\n\n\n\n // 更新当前节点的高度\n\n fn update_height(&mut self) {\n\n self.height = max(Self::height(&self.left), Self::height(&self.right)) + 1;\n\n }\n\n\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 94, "score": 32301.83939805584 }, { "content": " pub fn set_color(&mut self, color: Color) {\n\n if let Some(mut node) = self.inner() {\n\n unsafe { node.as_mut().color = color }\n\n }\n\n }\n\n\n\n /// 设置节点的左子树\n\n pub fn set_left(&mut self, left_node: Option<NonNull<Node<K, V>>>) {\n\n if let Some(mut node) = self.inner() {\n\n unsafe {\n\n node.as_mut().left = left_node;\n\n }\n\n if let Some(mut node) = left_node {\n\n unsafe {\n\n node.as_mut().parent = self.inner();\n\n }\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 95, "score": 32301.614481685498 }, { "content": " //删除节点key,并保持改树仍为AVL树,返回的新生成的树的根节点\n\n pub fn delete(mut self, key: K) -> Link<K, V> {\n\n if self.key < key {\n\n if let Some(succ) = self.right.take() {\n\n self.right = succ.delete(key);\n\n return Some(self.update_node());\n\n }\n\n } else if self.key > key {\n\n if let Some(succ) = self.left.take() {\n\n self.left = succ.delete(key);\n\n return Some(self.update_node());\n\n }\n\n } else {\n\n return self.delete_root();\n\n }\n\n // 没有找到待删除节点则直接返回\n\n Some(Box::new(self))\n\n }\n\n\n\n // 返回第一个大于key的键值对,key可以不存在树中\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 96, "score": 32301.564275013356 }, { "content": " /// 设置节点的右子树\n\n pub fn set_right(&mut self, right_node: Option<NonNull<Node<K, V>>>) {\n\n if let Some(mut node) = self.inner() {\n\n unsafe {\n\n node.as_mut().right = right_node;\n\n }\n\n if let Some(mut node) = right_node {\n\n unsafe {\n\n node.as_mut().parent = self.inner();\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// 设置节点的双亲\n\n pub fn set_parent(&mut self, parent_node: Option<NonNull<Node<K, V>>>) {\n\n if let Some(mut node) = self.inner() {\n\n unsafe {\n\n node.as_mut().parent = parent_node;\n\n }\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 97, "score": 32301.417499139603 }, { "content": " max(left_height, right_height) + 1\n\n }\n\n}\n\n\n\nimpl<K: Clone + PartialOrd + ToString, V: ToString> ToString for NodeQuery<K, V> {\n\n fn to_string(&self) -> String {\n\n if self.is_nil() {\n\n \"Ø\".to_string()\n\n } else {\n\n format!(\n\n \"[K: {}, V: {}, C: {:?} L: {}, R: {}]\",\n\n self.get_key().unwrap().to_string(),\n\n self.get_value().unwrap().to_string(),\n\n self.color().unwrap(),\n\n self.left().to_string(),\n\n self.right().to_string(),\n\n )\n\n }\n\n }\n\n}\n", "file_path": "src/an_unsafe_rb_tree/src/node.rs", "rank": 98, "score": 32301.129778455917 }, { "content": " //找出当前树中值最小的节点,返回元组:(除去最小节点后剩下的树,最小节点)\n\n fn remove_min(mut self) -> (Link<K, V>, Box<Node<K, V>>) {\n\n match self.left.take() {\n\n Some(left) => {\n\n let (new_left, min) = left.remove_min();\n\n self.left = new_left;\n\n (Some(self.update_node()), min)\n\n }\n\n None => (self.right.take(), Box::new(self)),\n\n }\n\n }\n\n\n\n //将两棵子树合并为一棵,合并后仍然满足AVL树的规则,返回新生成树的根节点\n\n fn combine_two_subtrees(\n\n left: Node<K, V>,\n\n right: Node<K, V>,\n\n ) -> Box<Node<K, V>> {\n\n // 得到右子树中最小的节点和去除最小节点后剩余的树\n\n let (remain_tree, min) = right.remove_min();\n\n // 最小节点作为两个子树的新根节点\n", "file_path": "src/an_ok_avl_tree/src/node.rs", "rank": 99, "score": 32301.107296458395 } ]
Rust
server/prisma-rs/query-engine/native-bridge/src/error.rs
otrebu/prisma
298be5c919119847bb8d102d6b16672edd06b2c5
use crate::protobuf; use connector::error::{ConnectorError, NodeSelectorInfo}; use failure::{Error, Fail}; use prisma_models::DomainError; use prost::DecodeError; use serde_json; #[derive(Debug, Fail)] pub enum BridgeError { #[fail(display = "Error in connector.")] ConnectorError(ConnectorError), #[fail(display = "Error in domain logic.")] DomainError(DomainError), #[fail(display = "Error decoding Protobuf input.")] ProtobufDecodeError(Error), #[fail(display = "Error decoding JSON input.")] JsonDecodeError(Error), #[fail(display = "Error decoding JSON input.")] InvalidConnectionArguments(&'static str), } impl From<ConnectorError> for BridgeError { fn from(e: ConnectorError) -> BridgeError { BridgeError::ConnectorError(e) } } impl From<DomainError> for BridgeError { fn from(e: DomainError) -> BridgeError { BridgeError::DomainError(e) } } impl From<DecodeError> for BridgeError { fn from(e: DecodeError) -> BridgeError { BridgeError::ProtobufDecodeError(e.into()) } } impl From<serde_json::error::Error> for BridgeError { fn from(e: serde_json::error::Error) -> BridgeError { BridgeError::JsonDecodeError(e.into()) } } impl From<NodeSelectorInfo> for protobuf::prisma::NodeSelector { fn from(info: NodeSelectorInfo) -> Self { Self { model_name: info.model, field_name: info.field, value: info.value.into(), } } } impl From<BridgeError> for protobuf::prisma::error::Value { fn from(error: BridgeError) -> protobuf::prisma::error::Value { match error { BridgeError::ConnectorError(e @ ConnectorError::ConnectionError(_)) => { protobuf::prisma::error::Value::ConnectionError(format!("{}", e)) } BridgeError::ConnectorError(e @ ConnectorError::QueryError(_)) => { protobuf::prisma::error::Value::QueryError(format!("{}", e)) } BridgeError::ConnectorError(e @ ConnectorError::InvalidConnectionArguments) => { protobuf::prisma::error::Value::QueryError(format!("{}", e)) } BridgeError::ConnectorError(ConnectorError::FieldCannotBeNull { field }) => { protobuf::prisma::error::Value::FieldCannotBeNull(field) } BridgeError::ConnectorError(ConnectorError::UniqueConstraintViolation { field_name }) => { protobuf::prisma::error::Value::UniqueConstraintViolation(field_name) } BridgeError::ConnectorError(ConnectorError::RelationViolation { relation_name, model_a_name, model_b_name, }) => { let error = protobuf::prisma::RelationViolationError { relation_name, model_a_name, model_b_name, }; protobuf::prisma::error::Value::RelationViolation(error) } BridgeError::ConnectorError(ConnectorError::NodeNotFoundForWhere(info)) => { let node_selector = protobuf::prisma::NodeSelector { model_name: info.model, field_name: info.field, value: info.value.into(), }; protobuf::prisma::error::Value::NodeNotFoundForWhere(node_selector) } BridgeError::ConnectorError(ConnectorError::NodesNotConnected { relation_name, parent_name, parent_where, child_name, child_where, }) => { let error = protobuf::prisma::NodesNotConnectedError { relation_name: relation_name, parent_name: parent_name, parent_where: parent_where.map(protobuf::prisma::NodeSelector::from), child_name: child_name, child_where: child_where.map(protobuf::prisma::NodeSelector::from), }; protobuf::prisma::error::Value::NodesNotConnected(error) } e @ BridgeError::ProtobufDecodeError(_) => { protobuf::prisma::error::Value::ProtobufDecodeError(format!("{}", e)) } e @ BridgeError::JsonDecodeError(_) => protobuf::prisma::error::Value::JsonDecodeError(format!("{}", e)), e @ BridgeError::DomainError(_) => protobuf::prisma::error::Value::InvalidInputError(format!("{}", e)), e => protobuf::prisma::error::Value::InvalidInputError(format!("{}", e)), } } }
use crate::protobuf; use connector::error::{ConnectorError, NodeSelectorInfo}; use failure::{Error, Fail}; use prisma_models::DomainError; use prost::DecodeError; use serde_json; #[derive(Debug, Fail)] pub enum BridgeError { #[fail(display = "Error in connector.")] ConnectorError(ConnectorError), #[fail(display = "Error in domain logic.")] DomainError(DomainError), #[fail(display = "Error decoding Protobuf input.")] ProtobufDecodeError(Error), #[fail(display = "Error decoding JSON input.")] JsonDecodeError(Error), #[fail(display = "Error decoding JSON input.")] InvalidConnectionArguments(&'static str), } impl From<ConnectorError> for BridgeError { fn from(e: ConnectorError) -> BridgeError { BridgeError::ConnectorError(e) } } impl From<DomainError> for BridgeError { fn from(e: DomainError) -> BridgeError { BridgeError::DomainError(e) } } impl From<DecodeError> for BridgeError { fn from(e: DecodeError) -> BridgeError { BridgeError::ProtobufDecodeError(e.into()) } } impl From<serde_json::error::Error> for BridgeError { fn from(e: serde_json::error::Error) -> BridgeError { BridgeError::JsonDecodeError(e.into()) } } impl From<NodeSelectorInfo> for protobuf::prisma::NodeSelector { fn from(info: NodeSelectorInfo) -> Self { Self { model_name: info.model, field_name: info.field, value: info.value.into(), } } } impl From<BridgeError> for protobuf::prisma::error::Value { fn from(error: BridgeError) -> protobuf::prisma::error::Value { match error { BridgeError::ConnectorError(e @ ConnectorError::ConnectionError(_)) => { protobuf::prisma::error::Value::ConnectionError(format!("{}", e)) } BridgeError::ConnectorError(e @ ConnectorError::QueryError(_)) => { protobuf::prisma::error::Value::QueryError(format!("{}", e)) } BridgeError::ConnectorError(e @ ConnectorError::InvalidConnectionArguments) => { protobuf::prisma::error::Value::QueryError(format!("{}", e)) } BridgeError::ConnectorError(ConnectorError::FieldCannotBeNull { field }) => { protobuf::prisma::error::Value::FieldCannotBeNull(field) } BridgeError::ConnectorError(ConnectorError::UniqueConstraintViolation { field_name }) => { protobuf::prisma::error::Value::UniqueConstraintViolation(field_name) } BridgeError::ConnectorError(ConnectorError::RelationViolation { relation_name, model_a_name, model_b_name, }) => { let error = protobuf::prisma::RelationViolationError { relation_name, model_a_name, model_b_name, }; protobuf::prisma::error::Value::RelationViolation(error) } BridgeError::ConnectorError(ConnectorError::NodeNotFoundForWhere(info)) => {
protobuf::prisma::error::Value::NodeNotFoundForWhere(node_selector) } BridgeError::ConnectorError(ConnectorError::NodesNotConnected { relation_name, parent_name, parent_where, child_name, child_where, }) => { let error = protobuf::prisma::NodesNotConnectedError { relation_name: relation_name, parent_name: parent_name, parent_where: parent_where.map(protobuf::prisma::NodeSelector::from), child_name: child_name, child_where: child_where.map(protobuf::prisma::NodeSelector::from), }; protobuf::prisma::error::Value::NodesNotConnected(error) } e @ BridgeError::ProtobufDecodeError(_) => { protobuf::prisma::error::Value::ProtobufDecodeError(format!("{}", e)) } e @ BridgeError::JsonDecodeError(_) => protobuf::prisma::error::Value::JsonDecodeError(format!("{}", e)), e @ BridgeError::DomainError(_) => protobuf::prisma::error::Value::InvalidInputError(format!("{}", e)), e => protobuf::prisma::error::Value::InvalidInputError(format!("{}", e)), } } }
let node_selector = protobuf::prisma::NodeSelector { model_name: info.model, field_name: info.field, value: info.value.into(), };
assignment_statement
[ { "content": "pub fn parse_and_validate(input: &str) -> dml::Schema {\n\n let ast = datamodel::parser::parse(&String::from(input)).expect(\"Unable to parse datamodel.\");\n\n let validator = datamodel::validator::Validator::new();\n\n validator.validate(&ast).expect(\"Validation error\")\n\n}\n", "file_path": "server/prisma-rs/libs/datamodel/tests/common.rs", "rank": 0, "score": 395172.0336140658 }, { "content": "// TODO: swap this out with connector loader and do not hard code associated type\n\npub fn connector() -> Box<MigrationConnector<DatabaseMigrationStep = impl DatabaseMigrationStepExt>> {\n\n let file_path = dbg!(file!());\n\n let file_name = dbg!(Path::new(file_path).file_stem().unwrap().to_str().unwrap());\n\n Box::new(SqlMigrationConnector::new(file_name.to_string()))\n\n}\n", "file_path": "server/prisma-rs/migration-engine/core/tests/test_harness.rs", "rank": 1, "score": 378117.46034724975 }, { "content": "fn assert_symmetric_serde(json: &str, expected: MigrationStep) {\n\n let serde_value: serde_json::Value = serde_json::from_str(&json).expect(\"The provided input was invalid json.\");\n\n let deserialized: MigrationStep = serde_json::from_str(&json).expect(\"Deserialization failed.\");\n\n let serialized_again = serde_json::to_value(&deserialized).expect(\"Serialization failed\");\n\n assert_eq!(\n\n deserialized, expected,\n\n \"The provided json could not be serialized into the expected struct.\"\n\n );\n\n assert_eq!(\n\n serialized_again, serde_value,\n\n \"Reserializing did not produce the original json input.\"\n\n );\n\n}\n", "file_path": "server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs", "rank": 2, "score": 364215.4382981436 }, { "content": " def migrationValueForField(field: ScalarField): GCValue = field.typeIdentifier match {\n\n case _ if field.defaultValue.isDefined => field.defaultValue.get\n\n case TypeIdentifier.String => StringGCValue(\"\")\n\n case TypeIdentifier.Boolean => BooleanGCValue(false)\n\n case TypeIdentifier.Int => IntGCValue(0)\n\n case TypeIdentifier.Float => FloatGCValue(0.0)\n\n case TypeIdentifier.DateTime => DateTimeGCValue(new DateTime(\"1970-01-01T00:00:00Z\"))\n\n case TypeIdentifier.Json => JsonGCValue(Json.parse(\"{}\"))\n\n case TypeIdentifier.Enum => EnumGCValue(field.enum.get.values.head)\n\n case TypeIdentifier.Cuid => StringIdGCValue(\"DefaultCUIDMigrationValue\")\n\n case TypeIdentifier.UUID => UuidGCValue.parse_!(\"550e8400-e29b-11d4-a716-446655440000\")\n\n }\n\n}\n", "file_path": "server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/MigrationValueGenerator.scala", "rank": 3, "score": 360425.7511580728 }, { "content": "/// Create a json envelope\n\nfn json_envelope(id: &str, map: serde_json::Map<String, Value>) -> Value {\n\n let mut envelope = JsonMap::new();\n\n envelope.insert(id.to_owned(), Value::Object(map));\n\n Value::Object(envelope)\n\n}\n", "file_path": "server/prisma-rs/query-engine/prisma/src/req_handlers/graphql.rs", "rank": 4, "score": 348559.1893635392 }, { "content": " def enumReads[E <: Enumeration](enum: E): Reads[E#Value] = {\n\n case JsString(s) =>\n\n try {\n\n JsSuccess(enum.withName(s))\n\n } catch {\n\n case _: NoSuchElementException => JsError(s\"Enumeration expected of type: '${enum.getClass}', but it does not appear to contain the value: '$s'\")\n\n }\n\n case _ => JsError(\"String value expected\")\n\n }\n\n\n\n implicit def enumWrites[E <: Enumeration]: Writes[E#Value] = (v: E#Value) => JsString(v.toString)\n\n\n\n implicit def enumFormat[E <: Enumeration](enum: E): Format[E#Value] = {\n\n Format(EnumUtils.enumReads(enum), EnumUtils.enumWrites)\n\n }\n\n }\n\n\n\n implicit val onDeleteEnumTypeFormat = EnumUtils.enumFormat(OnDelete)\n\n\n\n implicit val createEnumFormat = Json.format[CreateEnum]\n", "file_path": "server/shared-models/src/main/scala/com/prisma/shared/models/MigrationStepsJsonFormatter.scala", "rank": 5, "score": 346914.10406506236 }, { "content": "pub fn field_to_dmmf(field: &dml::Field) -> Field {\n\n Field {\n\n name: field.name.clone(),\n\n kind: get_field_kind(field),\n\n dbName: field.database_name.clone(),\n\n arity: get_field_arity(field),\n\n isUnique: field.is_unique,\n\n field_type: get_field_type(field),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/libs/datamodel/src/dmmf/mod.rs", "rank": 6, "score": 343160.6464704115 }, { "content": "// Whole datamodel parsing\n\npub fn parse(datamodel_string: &str) -> Result<Schema, ParserError> {\n\n let mut datamodel_result = PrismaDatamodelParser::parse(Rule::datamodel, datamodel_string);\n\n\n\n match datamodel_result {\n\n Ok(mut datamodel_wrapped) => {\n\n let datamodel = datamodel_wrapped.next().unwrap();\n\n let mut models: Vec<ModelOrEnum> = vec![];\n\n\n\n match_children! { datamodel, current,\n\n Rule::model_declaration => models.push(ModelOrEnum::Model(parse_model(&current))),\n\n Rule::enum_declaration => models.push(ModelOrEnum::Enum(parse_enum(&current))),\n\n Rule::EOI => {},\n\n _ => panic!(\"Encounterd impossible datamodel declaration during parsing: {:?}\", current.tokens())\n\n }\n\n\n\n Ok(Schema {\n\n models,\n\n comments: vec![],\n\n })\n\n }\n\n Err(err) => match err.location {\n\n pest::error::InputLocation::Pos(pos) => Err(ParserError::new(\"Error during parsing\", &Span::new(pos, pos))),\n\n pest::error::InputLocation::Span((from, to)) => {\n\n Err(ParserError::new(\"Error during parsing\", &Span::new(from, to)))\n\n }\n\n },\n\n }\n\n}\n", "file_path": "server/prisma-rs/libs/datamodel/src/ast/parser/mod.rs", "rank": 7, "score": 337244.1644156775 }, { "content": "pub fn enum_to_dmmf(en: &dml::Enum) -> Enum {\n\n Enum {\n\n name: en.name.clone(),\n\n values: en.values.clone(),\n\n isEnum: true,\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/libs/datamodel/src/dmmf/mod.rs", "rank": 8, "score": 331127.1774501811 }, { "content": "pub fn find_relation_field(project: ProjectRef, model: String, field: String) -> Arc<RelationField> {\n\n project\n\n .internal_data_model()\n\n .find_model(&model)\n\n .unwrap()\n\n .fields()\n\n .find_from_relation_fields(&field)\n\n .unwrap()\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 9, "score": 328069.39834440104 }, { "content": "pub fn serialize(resp: ResponseSet) -> Value {\n\n let mut map = Map::new();\n\n\n\n // Error workaround\n\n if let Response::Error(err) = resp.first().unwrap() {\n\n map.insert(\n\n \"errors\".into(),\n\n Value::Array(vec![envelope!(\"error\".into(), Value::String(err.to_string()))]),\n\n );\n\n } else {\n\n let vals: Vec<Value> = resp\n\n .into_iter()\n\n .map(|res| match res {\n\n Response::Data(name, Item::List(list)) => envelope!(name, Value::Array(serialize_list(list))),\n\n Response::Data(name, Item::Map(_parent, map)) => envelope!(name, Value::Object(serialize_map(map))),\n\n _ => unreachable!(),\n\n })\n\n .collect();\n\n\n\n map.insert(\n", "file_path": "server/prisma-rs/query-engine/prisma/src/serializer/json.rs", "rank": 10, "score": 319315.7407617271 }, { "content": "pub fn convert_list_args(proto: crate::protobuf::prisma::PrismaArgs) -> Vec<(String, PrismaListValue)> {\n\n let mut result = vec![];\n\n for arg in proto.args {\n\n let value: PrismaListValue = arg.value.into();\n\n let tuple = (arg.key, value);\n\n result.push(tuple)\n\n }\n\n result\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 11, "score": 318185.3710868293 }, { "content": " def mapForCreateCase(field: ScalarField): InputType[Any] = field.isRequired && field.defaultValue.isEmpty match {\n\n case true if field.isId =>\n\n (field.behaviour, field.typeIdentifier) match {\n\n case (Some(IdBehaviour(IdStrategy.Auto, _)) | None, TypeIdentifier.UUID | TypeIdentifier.Cuid) => SchemaBuilderUtils.mapToOptionalInputType(field)\n\n case (Some(IdBehaviour(IdStrategy.None, _)), TypeIdentifier.UUID | TypeIdentifier.Cuid) => SchemaBuilderUtils.mapToRequiredInputType(field)\n\n case _ => sys.error(\"Should not happen.\")\n\n }\n\n case true => SchemaBuilderUtils.mapToRequiredInputType(field)\n\n case false => SchemaBuilderUtils.mapToOptionalInputType(field)\n\n }\n\n}\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/schema/InputTypesBuilder.scala", "rank": 12, "score": 316337.78608163603 }, { "content": "pub fn into_model_query_arguments(model: ModelRef, args: QueryArguments) -> connector::QueryArguments {\n\n connector::QueryArguments {\n\n skip: args.skip,\n\n after: args.after.map(|x| x.into()),\n\n first: args.first,\n\n before: args.before.map(|x| x.into()),\n\n last: args.last,\n\n filter: args.filter.map(|x| x.into_filter(model.clone())),\n\n order_by: args.order_by.map(|x| order_by::into_model_order_by(model.clone(), x)),\n\n }\n\n}\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/query_arguments.rs", "rank": 13, "score": 314683.701280631 }, { "content": " def readEnumGCValue(enum: Enum)(json: JsValue): JsResult[EnumGCValue] = {\n\n json.validate[JsString] match {\n\n case JsSuccess(json, _) if enum.values.contains(json.value) => JsSuccess(EnumGCValue(json.value))\n\n case JsSuccess(json, _) => JsError(s\"${json.value} is not a valid value for Enum ${enum.name}\")\n\n case e: JsError => e\n\n }\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/import_export/GCValueJsonFormatter.scala", "rank": 14, "score": 313738.88125038845 }, { "content": "fn jsonToJdbcParameter(json: &serde_json::Value) -> Result<JdbcParameter> {\n\n match json {\n\n &serde_json::Value::Object(ref map) => jsonObjectToJdbcParameter(map),\n\n x => Err(DriverError::GenericError(format!(\n\n \"{} is not a valid value for a JdbcParameter\",\n\n x\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "server/libs/jdbc-native-rs/src/jdbc_params.rs", "rank": 15, "score": 309632.6062812727 }, { "content": "fn is_scalar(field: &Field) -> bool {\n\n match field.field_type {\n\n FieldType::Base(_) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs", "rank": 16, "score": 306630.19427313167 }, { "content": "pub fn new_builtin_enum_directives() -> DirectiveListValidator<dml::Enum> {\n\n let mut validator = DirectiveListValidator::<dml::Enum> {\n\n known_directives: HashMap::new(),\n\n };\n\n\n\n // Adds are missing\n\n\n\n return validator;\n\n}\n", "file_path": "server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/mod.rs", "rank": 17, "score": 304643.6478798603 }, { "content": "pub fn new_builtin_field_directives() -> DirectiveListValidator<dml::Field> {\n\n let mut validator = DirectiveListValidator::<dml::Field> {\n\n known_directives: HashMap::new(),\n\n };\n\n\n\n validator.add(Box::new(db::DbDirectiveValidator {}));\n\n validator.add(Box::new(primary::PrimaryDirectiveValidator {}));\n\n validator.add(Box::new(scalarlist::ScalarListDirectiveValidator {}));\n\n validator.add(Box::new(sequence::SequenceDirectiveValidator {}));\n\n validator.add(Box::new(unique::UniqueDirectiveValidator {}));\n\n validator.add(Box::new(default::DefaultDirectiveValidator {}));\n\n validator.add(Box::new(relation::RelationDirectiveValidator {}));\n\n validator.add(Box::new(ondelete::OnDeleteDirectiveValidator {}));\n\n\n\n return validator;\n\n}\n\n\n", "file_path": "server/prisma-rs/libs/datamodel/src/dml/validator/directive/builtin/mod.rs", "rank": 18, "score": 304432.3835991507 }, { "content": "fn scalar_type(field: &Field) -> &ScalarType {\n\n match &field.field_type {\n\n FieldType::Base(ref scalar) => scalar,\n\n x => panic!(format!(\n\n \"only scalar types are suported here. Type is {:?} on field {}\",\n\n x, field.name\n\n )),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs", "rank": 19, "score": 302916.15546585416 }, { "content": "/// Expects a NULL terminated UTF-8 string behind the pointer.\n\n/// Use if the memory is not owned by Rust, for example when working on JVM owned memory.\n\npub fn to_str<'a>(pointer: *const c_char) -> &'a str {\n\n unsafe {\n\n CStr::from_ptr(pointer).to_str().unwrap()\n\n }\n\n}\n\n\n", "file_path": "server/libs/jwt-native-rs/src/ffi_utils.rs", "rank": 20, "score": 297897.932624549 }, { "content": " def jsonToBson(json: JsValue): BsonValue = json match {\n\n case JsString(v) => BsonString(v)\n\n case JsBoolean(v) => BsonBoolean(v)\n\n case JsNumber(v) => BsonNumber(v.toDouble)\n\n case JsNull => BsonNull()\n\n case JsArray(v) => BsonArray(v.map(x => jsonToBson(x)))\n\n case v: JsObject => jsObjectToBson(v)\n\n case x => sys.error(s\"$x not supported here\")\n\n }\n\n\n", "file_path": "server/libs/mongo-utils/src/main/scala/com/prisma/utils/mongo/JsonBsonConversion.scala", "rank": 21, "score": 297811.2580365112 }, { "content": "fn related_type(field: &Field) -> Option<String> {\n\n match &field.field_type {\n\n FieldType::Relation(relation_info) => {\n\n let RelationInfo {\n\n to,\n\n to_field,\n\n name,\n\n on_delete,\n\n } = relation_info;\n\n Some(to.to_string())\n\n }\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs", "rank": 22, "score": 297101.0548963919 }, { "content": "/// Disconnect a record from the parent.\n\n///\n\n/// The following cases will lead to a relation violation error:\n\n///\n\n/// | p is a list | p is required | c is list | c is required |\n\n/// | ----------- | ------------- | --------- | ------------- |\n\n/// | false | true | false | true |\n\n/// | false | true | false | false |\n\n/// | false | false | false | true |\n\n/// | true | false | false | true |\n\n/// | false | true | true | false |\n\npub fn disconnect(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n actions: &NestedActions,\n\n node_selector: &Option<NodeSelector>,\n\n) -> SqlResult<()> {\n\n if let Some((select, check)) = actions.required_check(parent_id)? {\n\n let ids = conn.select_ids(select)?;\n\n check.call_box(ids.into_iter().next().is_some())?\n\n }\n\n\n\n match node_selector {\n\n None => {\n\n let (select, check) = actions.ensure_parent_is_connected(parent_id);\n\n\n\n let ids = conn.select_ids(select)?;\n\n check.call_box(ids.into_iter().next().is_some())?;\n\n\n\n conn.write(actions.removal_by_parent(parent_id))?;\n\n }\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs", "rank": 23, "score": 295787.51503894833 }, { "content": "/// Connect a record to the parent.\n\n///\n\n/// When nested with a create, will have special behaviour in some cases:\n\n///\n\n/// | action | p is a list | p is required | c is list | c is required |\n\n/// | ------------------------------------ | ----------- | ------------- | --------- | ------------- |\n\n/// | relation violation | false | true | false | true |\n\n/// | check if connected to another parent | false | true | false | false |\n\n///\n\n/// When nesting to an action that is not a create:\n\n///\n\n/// | action | p is a list | p is required | c is list | c is required |\n\n/// | ------------------------------------ | ----------- | ------------- | --------- | ------------- |\n\n/// | relation violation | false | true | false | true |\n\n/// | check if connected to another parent | false | true | false | false |\n\n/// | check if parent has another child | false | true | false | false |\n\n///\n\n/// If none of the checks fail, the record will be disconnected to the\n\n/// previous relation before connecting to the given parent.\n\npub fn connect(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n actions: &NestedActions,\n\n node_selector: &NodeSelector,\n\n relation_field: RelationFieldRef,\n\n) -> SqlResult<()> {\n\n if let Some((select, check)) = actions.required_check(parent_id)? {\n\n let ids = conn.select_ids(select)?;\n\n check.call_box(ids.into_iter().next().is_some())?\n\n }\n\n\n\n let child_id = conn.find_id(node_selector)?;\n\n\n\n if let Some(query) = actions.parent_removal(parent_id) {\n\n conn.write(query)?;\n\n }\n\n\n\n if let Some(query) = actions.child_removal(&child_id) {\n\n conn.write(query)?;\n\n }\n\n\n\n let relation_query = MutationBuilder::create_relation(relation_field, parent_id, &child_id);\n\n conn.write(relation_query)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs", "rank": 24, "score": 295786.8783542557 }, { "content": "/// Connects multiple records into the parent. Rules from `execute_connect`\n\n/// apply.\n\npub fn set(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n actions: &NestedActions,\n\n node_selectors: &Vec<NodeSelector>,\n\n relation_field: RelationFieldRef,\n\n) -> SqlResult<()> {\n\n if let Some((select, check)) = actions.required_check(parent_id)? {\n\n let ids = conn.select_ids(select)?;\n\n check.call_box(ids.into_iter().next().is_some())?\n\n }\n\n\n\n conn.write(actions.removal_by_parent(parent_id))?;\n\n\n\n for selector in node_selectors {\n\n let child_id = conn.find_id(selector)?;\n\n\n\n if !relation_field.is_list {\n\n conn.write(actions.removal_by_child(&child_id))?;\n\n }\n\n\n\n let relation_query = MutationBuilder::create_relation(Arc::clone(&relation_field), parent_id, &child_id);\n\n conn.write(relation_query)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/relation.rs", "rank": 25, "score": 295781.3308501868 }, { "content": "/// A nested delete that removes one item related to the given `parent_id`.\n\n/// If no `RecordFinder` is given, will delete the first item from the\n\n/// table.\n\n///\n\n/// Errors thrown from domain violations:\n\n///\n\n/// - Violating any relations where the deleted record is required\n\n/// - If the deleted record is not connected to the parent\n\n/// - The record does not exist\n\npub fn execute_nested(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n actions: &NestedActions,\n\n node_selector: &Option<NodeSelector>,\n\n relation_field: RelationFieldRef,\n\n) -> SqlResult<()> {\n\n if let Some(ref node_selector) = node_selector {\n\n conn.find_id(node_selector)?;\n\n };\n\n\n\n let child_id = conn\n\n .find_id_by_parent(Arc::clone(&relation_field), parent_id, node_selector)\n\n .map_err(|e| match e {\n\n SqlError::NodesNotConnected {\n\n relation_name,\n\n parent_name,\n\n parent_where: _,\n\n child_name,\n\n child_where,\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete.rs", "rank": 26, "score": 293678.79541520664 }, { "content": "fn toJdbcParametersInner(json: &serde_json::Value) -> Result<Vec<JdbcParameter>> {\n\n match json {\n\n serde_json::Value::Array(elements) => {\n\n let x: Result<Vec<JdbcParameter>> = elements.iter().map(jsonToJdbcParameter).collect();\n\n x\n\n },\n\n\n\n json => Err(DriverError::GenericError(String::from(format!(\n\n \"provided json was not an array: {}\",\n\n json\n\n )))),\n\n }\n\n}\n\n\n", "file_path": "server/libs/jdbc-native-rs/src/jdbc_params.rs", "rank": 27, "score": 293568.6194496756 }, { "content": " def dotPath(path: String, field: Field): String = (path, field) match {\n\n case (\"\", rf: RelationField) => rf.dbName\n\n case (path, rf: RelationField) => path + \".\" + rf.dbName\n\n case (\"\", sf: ScalarField) => if (sf.isId) \"_id\" else sf.dbName\n\n case (path, sf: ScalarField) => path + \".\" + (if (sf.isId) \"_id\" else sf.dbName)\n\n }\n\n\n", "file_path": "server/connectors/api-connector-mongo/src/main/scala/com/prisma/api/connector/mongo/extensions/MongoExtensions.scala", "rank": 28, "score": 292078.7539862059 }, { "content": "pub fn get_env(key: &str) -> PrismaResult<String> {\n\n env::var(key)\n\n .map_err(|_| PrismaError::ConfigurationError(format!(\"Environment variable {} required but not found\", key)))\n\n}\n", "file_path": "server/prisma-rs/query-engine/prisma/src/utilities.rs", "rank": 29, "score": 291746.4052432028 }, { "content": "/// Removes nested items matching to filter, or if no filter is given, all\n\n/// nested items related to the given `parent_id`. An error will be thrown\n\n/// if any deleted record is required in a model.\n\npub fn execute_nested(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n filter: &Option<Filter>,\n\n relation_field: RelationFieldRef,\n\n) -> SqlResult<usize> {\n\n let ids = conn.filter_ids_by_parents(Arc::clone(&relation_field), vec![parent_id], filter.clone())?;\n\n let count = ids.len();\n\n\n\n if count == 0 {\n\n return Ok(count);\n\n }\n\n\n\n let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect();\n\n let model = relation_field.model();\n\n\n\n DeleteActions::check_relation_violations(model, ids.as_slice(), |select| {\n\n let ids = conn.select_ids(select)?;\n\n Ok(ids.into_iter().next())\n\n })?;\n\n\n\n for delete in MutationBuilder::delete_many(relation_field.related_model(), ids.as_slice()) {\n\n conn.delete(delete)?;\n\n }\n\n\n\n Ok(count)\n\n}\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/delete_many.rs", "rank": 30, "score": 291612.45990328374 }, { "content": " def readLeafGCValueForField(field: ScalarField)(json: JsValue): JsResult[LeafGCValue] = {\n\n field.typeIdentifier match {\n\n case TypeIdentifier.String => json.validate[StringGCValue]\n\n case TypeIdentifier.Cuid => json.validate[StringIdGCValue]\n\n case TypeIdentifier.UUID => json.validate[UuidGCValue]\n\n case TypeIdentifier.Enum => readEnumGCValue(field.enum.get)(json)\n\n case TypeIdentifier.DateTime => json.validate[DateTimeGCValue]\n\n case TypeIdentifier.Boolean => json.validate[BooleanGCValue]\n\n case TypeIdentifier.Int => json.validate[IntGCValue]\n\n case TypeIdentifier.Float => json.validate[FloatGCValue]\n\n case TypeIdentifier.Json => json.validate[JsonGCValue]\n\n }\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/import_export/GCValueJsonFormatter.scala", "rank": 31, "score": 290576.9560067177 }, { "content": "fn id_field(model: &Model) -> &Field {\n\n model.fields().next().clone().unwrap()\n\n}\n\n\n", "file_path": "server/prisma-rs/migration-engine/connectors/sql-migration-connector/src/database_schema_calculator.rs", "rank": 32, "score": 289467.22649875976 }, { "content": "pub fn convert_mutaction(\n\n m: crate::protobuf::prisma::DatabaseMutaction,\n\n project: ProjectRef,\n\n) -> TopLevelDatabaseMutaction {\n\n use crate::protobuf::prisma::database_mutaction;\n\n match m.type_.unwrap() {\n\n database_mutaction::Type::Create(x) => convert_create_envelope(x, project),\n\n database_mutaction::Type::Update(x) => convert_update_envelope(x, project),\n\n database_mutaction::Type::Upsert(x) => convert_upsert(x, project),\n\n database_mutaction::Type::Delete(x) => convert_delete(x, project),\n\n database_mutaction::Type::Reset(x) => convert_reset(x, project),\n\n database_mutaction::Type::DeleteNodes(x) => convert_delete_nodes(x, project),\n\n database_mutaction::Type::UpdateNodes(x) => convert_update_nodes(x, project),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 33, "score": 286365.74220827664 }, { "content": "/// Creates a new root record and any associated list records to the database.\n\npub fn execute<S>(\n\n conn: &mut Transaction,\n\n model: ModelRef,\n\n non_list_args: &PrismaArgs,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<GraphqlId>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let (insert, returned_id) = MutationBuilder::create_node(Arc::clone(&model), non_list_args.clone());\n\n let last_id = conn.insert(insert)?;\n\n\n\n let id = match returned_id {\n\n Some(id) => id,\n\n None => last_id.unwrap(),\n\n };\n\n\n\n for (field_name, list_value) in list_args {\n\n let field = model.fields().find_from_scalar(field_name.as_ref()).unwrap();\n\n let table = field.scalar_list_table();\n\n\n\n if let Some(insert) = MutationBuilder::create_scalar_list_value(table.table(), &list_value, &id) {\n\n conn.insert(insert)?;\n\n }\n\n }\n\n\n\n Ok(id)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/create.rs", "rank": 34, "score": 285471.1004238653 }, { "content": "/// Updates one record and any associated list record in the database.\n\npub fn execute<S>(\n\n conn: &mut Transaction,\n\n node_selector: &NodeSelector,\n\n non_list_args: &PrismaArgs,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<GraphqlId>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let model = node_selector.field.model();\n\n let id = conn.find_id(node_selector)?;\n\n\n\n if let Some(update) = MutationBuilder::update_one(Arc::clone(&model), &id, non_list_args)? {\n\n conn.update(update)?;\n\n }\n\n\n\n update_list_args(conn, &[id.clone()], Arc::clone(&model), list_args)?;\n\n\n\n Ok(id)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs", "rank": 35, "score": 285471.1004238653 }, { "content": "pub fn convert_create_envelope(\n\n m: crate::protobuf::prisma::CreateNode,\n\n project: ProjectRef,\n\n) -> TopLevelDatabaseMutaction {\n\n TopLevelDatabaseMutaction::CreateNode(convert_create(m, project))\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 36, "score": 283959.29196859646 }, { "content": "pub fn convert_nested_mutactions(\n\n m: crate::protobuf::prisma::NestedMutactions,\n\n project: ProjectRef,\n\n) -> NestedMutactions {\n\n NestedMutactions {\n\n creates: m\n\n .creates\n\n .into_iter()\n\n .map(|m| convert_nested_create(m, Arc::clone(&project)))\n\n .collect(),\n\n updates: m\n\n .updates\n\n .into_iter()\n\n .map(|m| convert_nested_update(m, Arc::clone(&project)))\n\n .collect(),\n\n upserts: m\n\n .upserts\n\n .into_iter()\n\n .map(|m| convert_nested_upsert(m, Arc::clone(&project)))\n\n .collect(),\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 37, "score": 283959.29196859646 }, { "content": "pub fn convert_update_envelope(\n\n m: crate::protobuf::prisma::UpdateNode,\n\n project: ProjectRef,\n\n) -> TopLevelDatabaseMutaction {\n\n TopLevelDatabaseMutaction::UpdateNode(convert_update(m, project))\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 38, "score": 283959.29196859646 }, { "content": "pub fn convert_nested_disconnect(\n\n m: crate::protobuf::prisma::NestedDisconnect,\n\n project: ProjectRef,\n\n) -> NestedDisconnect {\n\n let relation_field = project\n\n .internal_data_model()\n\n .find_model(&m.model_name)\n\n .unwrap()\n\n .fields()\n\n .find_from_relation_fields(&m.field_name)\n\n .unwrap();\n\n\n\n NestedDisconnect {\n\n relation_field: relation_field,\n\n where_: m.where_.map(|w| convert_node_select(w, project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 39, "score": 283959.29196859646 }, { "content": " def readListGCValue(field: ScalarField)(json: JsValue): JsResult[ListGCValue] = {\n\n require(field.isList)\n\n for {\n\n jsArray <- json.validate[JsArray]\n\n gcValueResults = jsArray.value.map(element => readLeafGCValueForField(field)(element)).toVector\n\n gcValues <- sequenceJsResult(gcValueResults)\n\n } yield ListGCValue(gcValues)\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/import_export/GCValueJsonFormatter.scala", "rank": 40, "score": 283601.3352566606 }, { "content": "/// Updates a nested item related to the parent, including any associated\n\n/// list values.\n\npub fn execute_nested<S>(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n node_selector: &Option<NodeSelector>,\n\n relation_field: RelationFieldRef,\n\n non_list_args: &PrismaArgs,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<GraphqlId>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n if let Some(ref node_selector) = node_selector {\n\n conn.find_id(node_selector)?;\n\n };\n\n\n\n let id = conn.find_id_by_parent(Arc::clone(&relation_field), parent_id, node_selector)?;\n\n let node_selector = NodeSelector::from((relation_field.related_model().fields().id(), id));\n\n\n\n execute(conn, &node_selector, non_list_args, list_args)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs", "rank": 41, "score": 283410.9565231906 }, { "content": "/// Updates every record and any associated list records in the database\n\n/// matching the `Filter`.\n\n///\n\n/// Returns the number of updated items, if successful.\n\npub fn execute<S>(\n\n conn: &mut Transaction,\n\n model: ModelRef,\n\n filter: &Filter,\n\n non_list_args: &PrismaArgs,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<usize>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let ids = conn.filter_ids(Arc::clone(&model), filter.clone())?;\n\n let count = ids.len();\n\n\n\n if count == 0 {\n\n return Ok(count);\n\n }\n\n\n\n let updates = {\n\n let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect();\n\n MutationBuilder::update_many(Arc::clone(&model), ids.as_slice(), non_list_args)?\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update_many.rs", "rank": 42, "score": 283410.9470923132 }, { "content": "/// Creates a new nested item related to a parent, including any associated\n\n/// list values, and is connected with the `parent_id` to the parent record.\n\npub fn execute_nested<S>(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n actions: &NestedActions,\n\n relation_field: RelationFieldRef,\n\n non_list_args: &PrismaArgs,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<GraphqlId>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n if let Some((select, check)) = actions.required_check(parent_id)? {\n\n let ids = conn.select_ids(select)?;\n\n check.call_box(ids.into_iter().next().is_some())?\n\n };\n\n\n\n if let Some(query) = actions.parent_removal(parent_id) {\n\n conn.write(query)?;\n\n }\n\n\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/create.rs", "rank": 43, "score": 283410.79806292086 }, { "content": " def existsDuplicateValueByModelAndField(model: Model, field: ScalarField): Future[Boolean] = {\n\n clientDatabase\n\n .getDatabase(database)\n\n .getCollection(model.dbName)\n\n .aggregate(\n\n Seq(\n\n `match`(notEqual(field.dbName, null)),\n\n group(s\"$$${field.dbName}\", sum(\"count\", 1)),\n\n `match`(gt(\"count\", 1)),\n\n mongoProjection(include(\"_id\")),\n\n limit(1)\n\n )\n\n )\n\n .toFuture()\n\n .map(_.nonEmpty)\n\n }\n\n\n\n override def enumValueIsInUse(models: Vector[Model], enumName: String, value: String): Future[Boolean] = {\n\n// val query = MongoDeployDatabaseQueryBuilder.enumValueIsInUse(project.id, models, enumName, value)\n\n Future.successful(false)\n\n }\n\n\n\n}\n", "file_path": "server/connectors/deploy-connector-mongo/src/main/scala/com/prisma/deploy/connector/mongo/impl/MongoClientDbQueries.scala", "rank": 44, "score": 282693.47047001764 }, { "content": "pub fn toJdbcParameters(str: &String) -> Result<Vec<JdbcParameter>> {\n\n let json = serde_json::from_str::<serde_json::Value>(&*str)?;\n\n toJdbcParametersInner(&json)\n\n}\n\n\n", "file_path": "server/libs/jdbc-native-rs/src/jdbc_params.rs", "rank": 45, "score": 282545.0448096552 }, { "content": " def generateFilterForFieldAndId(relationField: RelationField, id: IdGCValue) = relationField.isList match {\n\n case true => ScalarListFilter(relationField.model.dummyField(relationField), ListContains(id))\n\n case false => ScalarFilter(relationField.model.dummyField(relationField), Equals(id))\n\n }\n\n\n\n}\n", "file_path": "server/connectors/api-connector-mongo/src/main/scala/com/prisma/api/connector/mongo/database/NodeSingleQueries.scala", "rank": 46, "score": 282345.73835252965 }, { "content": "pub fn convert_prisma_args(proto: crate::protobuf::prisma::PrismaArgs) -> PrismaArgs {\n\n let mut result = PrismaArgs::default();\n\n for arg in proto.args {\n\n result.insert(arg.key, arg.value);\n\n }\n\n result\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 47, "score": 281813.2431968806 }, { "content": "pub fn convert_nested_update_nodes(\n\n m: crate::protobuf::prisma::NestedUpdateNodes,\n\n project: ProjectRef,\n\n) -> NestedUpdateNodes {\n\n let relation_field = find_relation_field(Arc::clone(&project), m.model_name, m.field_name);\n\n NestedUpdateNodes {\n\n relation_field: Arc::clone(&relation_field),\n\n filter: m.filter.map(|f| f.into_filter(relation_field.related_model())),\n\n non_list_args: convert_prisma_args(m.non_list_args),\n\n list_args: convert_list_args(m.list_args),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 48, "score": 281615.6001056251 }, { "content": "pub fn convert_nested_delete_nodes(\n\n m: crate::protobuf::prisma::NestedDeleteNodes,\n\n project: ProjectRef,\n\n) -> NestedDeleteNodes {\n\n let relation_field = find_relation_field(project, m.model_name, m.field_name);\n\n NestedDeleteNodes {\n\n relation_field: Arc::clone(&relation_field),\n\n filter: m.filter.map(|f| f.into_filter(relation_field.related_model())),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 49, "score": 281615.6001056251 }, { "content": "/// Updates nested items matching to filter, or if no filter is given, all\n\n/// nested items related to the given `parent_id`.\n\npub fn execute_nested<S>(\n\n conn: &mut Transaction,\n\n parent_id: &GraphqlId,\n\n filter: &Option<Filter>,\n\n relation_field: RelationFieldRef,\n\n non_list_args: &PrismaArgs,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<usize>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let ids = conn.filter_ids_by_parents(Arc::clone(&relation_field), vec![parent_id], filter.clone())?;\n\n let count = ids.len();\n\n\n\n if count == 0 {\n\n return Ok(count);\n\n }\n\n\n\n let updates = {\n\n let ids: Vec<&GraphqlId> = ids.iter().map(|id| &*id).collect();\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update_many.rs", "rank": 50, "score": 281390.6135884525 }, { "content": "/// Updates list args related to the given records.\n\npub fn update_list_args<S>(\n\n conn: &mut Transaction,\n\n ids: &[GraphqlId],\n\n model: ModelRef,\n\n list_args: &[(S, PrismaListValue)],\n\n) -> SqlResult<()>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n for (field_name, list_value) in list_args {\n\n let field = model.fields().find_from_scalar(field_name.as_ref()).unwrap();\n\n let table = field.scalar_list_table();\n\n let (deletes, inserts) = MutationBuilder::update_scalar_list_values(&table, &list_value, ids.to_vec());\n\n\n\n for delete in deletes {\n\n conn.delete(delete)?;\n\n }\n\n\n\n for insert in inserts {\n\n conn.insert(insert)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "server/prisma-rs/query-engine/connectors/sql-connector/src/transactional/mutaction_executor/update.rs", "rank": 51, "score": 281383.8622660738 }, { "content": " def defaultHandler(error: Error.Value): Throwable = error match {\n\n case Error.Value.FieldCannotBeNull(err) => FieldCannotBeNull(err)\n\n case Error.Value.ConnectionError(str) => ConnectionError(str)\n\n case Error.Value.InvalidInputError(str) => InvalidInputError(str)\n\n case Error.Value.JsonDecodeError(str) => JsonDecodeError(str)\n\n case Error.Value.NoResultsError(str) => NoResultError(str)\n\n case Error.Value.ProtobufDecodeError(str) => ProtobufDecodeError(str)\n\n case Error.Value.QueryError(str) => QueryError(str)\n\n case Error.Value.InvalidConnectionArguments(str) => InvalidConnectionArguments(str)\n\n case Error.Value.UniqueConstraintViolation(str) => UniqueConstraintViolation(str)\n\n case Error.Value.InternalServerError(msg) => new NativeError(msg)\n\n case Error.Value.Empty => sys.error(\"Empty RPC response error value\")\n\n case Error.Value.RelationViolation(err) => RelationViolation(err.relationName, err.modelAName, err.modelBName)\n\n case Error.Value.NodeNotFoundForWhere(err) =>\n\n NodeNotFoundForWhere(err.modelName, err.fieldName, toGcValue(err.value.prismaValue))\n\n case Error.Value.NodesNotConnected(err) => NodesNotConnected(\n\n err.relationName,\n\n err.parentName,\n\n err.parentWhere.map(w => NodeSelectorInfo(w.modelName, w.fieldName, toGcValue(w.value.prismaValue))),\n\n err.childName,\n", "file_path": "server/libs/prisma-rs-binding/src/main/scala/com/prisma/rs/NativeBinding.scala", "rank": 52, "score": 280926.21724275814 }, { "content": " def reads(json: JsValue): JsResult[DateTime] = json match {\n\n case JsString(s) =>\n\n try {\n\n JsSuccess(formatter.parseDateTime(s))\n\n } catch {\n\n case t: Throwable => error(s)\n\n }\n\n case _ =>\n\n error(json.toString())\n\n }\n\n\n", "file_path": "server/libs/json-utils/src/main/scala/com/prisma/utils/json/JsonUtils.scala", "rank": 53, "score": 280236.8607702394 }, { "content": "fn http_handler((json, req): (Json<Option<GraphQlBody>>, HttpRequest<Arc<RequestContext>>)) -> impl Responder {\n\n let request_context = req.state();\n\n let req: PrismaRequest<GraphQlBody> = PrismaRequest {\n\n body: json.clone().unwrap(),\n\n path: req.path().into(),\n\n headers: req\n\n .headers()\n\n .iter()\n\n .map(|(k, v)| (format!(\"{}\", k), v.to_str().unwrap().into()))\n\n .collect(),\n\n };\n\n\n\n let result = request_context\n\n .graphql_request_handler\n\n .handle(req, &request_context.context);\n\n\n\n serde_json::to_string(&result)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/prisma/src/main.rs", "rank": 54, "score": 278280.8627990412 }, { "content": "pub fn convert_mutaction_result(result: DatabaseMutactionResult) -> crate::protobuf::prisma::DatabaseMutactionResult {\n\n use crate::protobuf::prisma::database_mutaction_result;\n\n\n\n match result.typ {\n\n DatabaseMutactionResultType::Create => {\n\n let result = crate::protobuf::prisma::IdResult { id: result.id().into() };\n\n let typ = database_mutaction_result::Type::Create(result);\n\n\n\n crate::protobuf::prisma::DatabaseMutactionResult { type_: Some(typ) }\n\n }\n\n DatabaseMutactionResultType::Update => {\n\n let result = crate::protobuf::prisma::IdResult { id: result.id().into() };\n\n let typ = database_mutaction_result::Type::Update(result);\n\n\n\n crate::protobuf::prisma::DatabaseMutactionResult { type_: Some(typ) }\n\n }\n\n DatabaseMutactionResultType::Delete => {\n\n let result = crate::protobuf::prisma::NodeResult::from(result.node().clone());\n\n let typ = database_mutaction_result::Type::Delete(result);\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 55, "score": 278220.52611682727 }, { "content": " // TODO: do4gr should think about whether Import/Export works with custom names for id fields. I think it does but that should be validated.\n\n def parseIdGCValue(input: JsObject, model: Model): IdGCValue = model.idField_!.typeIdentifier match {\n\n case TypeIdentifier.UUID => UuidGCValue.parse_!(input.value(\"id\").as[String])\n\n case TypeIdentifier.Cuid => StringIdGCValue(input.value(\"id\").as[String])\n\n case x => sys.error(\"TypeIdentifier not yet supported in Import as ID. \" + x)\n\n }\n\n\n\n private def convertToImportRelation(json: JsValue): ImportRelation = {\n\n val array = json.as[JsArray]\n\n val leftJsObject = array.value.head.as[JsObject]\n\n val rightJsObject = array.value.last.as[JsObject]\n\n val leftModel = project.schema.getModelByName_!(leftJsObject(\"_typeName\").as[String])\n\n val rightModel = project.schema.getModelByName_!(rightJsObject(\"_typeName\").as[String])\n\n val leftId = parseIdGCValue(leftJsObject, leftModel)\n\n val rightId = parseIdGCValue(rightJsObject, rightModel)\n\n val left = ImportRelationSide(ImportIdentifier(leftModel.name, leftId), leftJsObject.value.get(\"fieldName\").flatMap(_.asOpt[String]))\n\n val right = ImportRelationSide(ImportIdentifier(rightModel.name, rightId), rightJsObject.value.get(\"fieldName\").flatMap(_.asOpt[String]))\n\n\n\n ImportRelation(left, right)\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/import_export/BulkImport.scala", "rank": 56, "score": 277240.1016236509 }, { "content": " def nestedDeleteInputField(field: RelationField): Option[InputField[Any]] = (field.isList, field.isRequired) match {\n\n case (true, _) => whereInputField(field, name = \"delete\")\n\n case (false, false) => Some(InputField[Any](\"delete\", OptionInputType(BooleanType)))\n\n case (false, true) => None\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/schema/InputTypesBuilder.scala", "rank": 57, "score": 275708.7166295123 }, { "content": " def nestedConnectInputField(field: RelationField): Option[InputField[Any]] = field.relatedModel_!.isEmbedded match {\n\n case true => None\n\n case false => whereInputField(field, name = \"connect\")\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/schema/InputTypesBuilder.scala", "rank": 58, "score": 273441.3286137384 }, { "content": " def forGCValue(model: Model, field: ScalarField, value: GCValue) = NodeSelector(model, field, value)\n\n}\n\n\n\ncase class NodeSelector(model: Model, field: ScalarField, fieldGCValue: GCValue) {\n\n require(field.isUnique, s\"NodeSelectors may be only instantiated for unique fields! ${field.name} on ${model.name} is not unique.\")\n\n lazy val value = fieldGCValue.value\n\n lazy val fieldName = field.name\n\n}\n\n\n", "file_path": "server/connectors/api-connector/src/main/scala/com/prisma/api/connector/NodeSelector.scala", "rank": 59, "score": 273136.0192049347 }, { "content": " def failed(error: Throwable) = FailedAction(error)\n\n}\n\n\n\ncase class MapAction[A, B](source: MongoAction[A], fn: A => B) extends MongoAction[B]\n\ncase class FlatMapAction[A, B](source: MongoAction[A], fn: A => MongoAction[B]) extends MongoAction[B]\n\n\n\ncase class SimpleMongoAction[+A](fn: MongoDatabase => Future[A]) extends MongoAction[A]\n\n\n\ncase class SequenceAction[A](actions: Vector[MongoAction[A]]) extends MongoAction[Vector[A]]\n\ncase class SuccessAction[A](value: A) extends MongoAction[A]\n\ncase class FailedAction(error: Throwable) extends MongoAction[Nothing]\n\ncase class AsTryAction[A](action: MongoAction[A]) extends MongoAction[Try[A]]\n", "file_path": "server/connectors/api-connector-mongo/src/main/scala/com/prisma/api/connector/mongo/database/MongoActionsBuilder.scala", "rank": 60, "score": 272402.6712575052 }, { "content": "pub fn toJdbcParameterList(str: &String) -> Result<Vec<Vec<JdbcParameter>>> {\n\n match serde_json::from_str::<serde_json::Value>(&*str) {\n\n Ok(serde_json::Value::Array(elements)) => elements.iter().map(toJdbcParametersInner).collect(),\n\n Ok(json) => Err(DriverError::GenericError(String::from(format!(\n\n \"provided json was not an array of arrays: {}\",\n\n json\n\n )))),\n\n Err(e) => Err(DriverError::GenericError(String::from(format!(\n\n \"json parsing failed: {}\",\n\n e\n\n )))),\n\n }\n\n}\n\n\n", "file_path": "server/libs/jdbc-native-rs/src/jdbc_params.rs", "rank": 61, "score": 272071.0643535321 }, { "content": "pub fn into_model_order_by(model: ModelRef, ord: crate::protobuf::prisma::OrderBy) -> OrderBy {\n\n let field = model.fields().find_from_scalar(&ord.scalar_field).unwrap();\n\n\n\n OrderBy {\n\n field: field,\n\n sort_order: ord.sort_order().into(),\n\n }\n\n}\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/order_by.rs", "rank": 62, "score": 271319.39541906817 }, { "content": " def fail = sys.error(\"This JSON Formatter always fails.\")\n\n }\n\n\n", "file_path": "server/shared-models/src/main/scala/com/prisma/shared/models/ProjectJsonFormatter.scala", "rank": 63, "score": 270848.3491218694 }, { "content": "pub fn str_vec_from_bytes<'a>(raw: *const c_char, num_elements: i64) -> Vec<&'a str> {\n\n let mut vec: Vec<&str> = Vec::with_capacity(num_elements as usize);\n\n let mut offset = 0; // Start scanning at 0\n\n unsafe {\n\n for i in 0..num_elements {\n\n let ptr = { raw.offset(offset as isize) };\n\n let s = to_str(ptr);\n\n\n\n offset += s.len() + 1; // Include NULL termination\n\n vec.push(s)\n\n }\n\n }\n\n\n\n vec\n\n}\n", "file_path": "server/libs/jwt-native-rs/src/ffi_utils.rs", "rank": 64, "score": 270602.40659521153 }, { "content": " def nestedSetInputField(field: RelationField): Option[InputField[Any]] = (field.relatedModel_!.isEmbedded, field.isList) match {\n\n case (true, _) => None\n\n case (false, true) => whereInputField(field, name = \"set\")\n\n case (false, false) => None\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/schema/InputTypesBuilder.scala", "rank": 65, "score": 269949.31807973405 }, { "content": "pub fn convert_update(m: crate::protobuf::prisma::UpdateNode, project: ProjectRef) -> UpdateNode {\n\n UpdateNode {\n\n where_: convert_node_select(m.where_, Arc::clone(&project)),\n\n non_list_args: convert_prisma_args(m.non_list_args),\n\n list_args: convert_list_args(m.list_args),\n\n nested_mutactions: convert_nested_mutactions(m.nested, Arc::clone(&project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 66, "score": 269504.03420261655 }, { "content": "pub fn convert_create(m: crate::protobuf::prisma::CreateNode, project: ProjectRef) -> CreateNode {\n\n let model = project.internal_data_model().find_model(&m.model_name).unwrap();\n\n CreateNode {\n\n model: model,\n\n non_list_args: convert_prisma_args(m.non_list_args),\n\n list_args: convert_list_args(m.list_args),\n\n nested_mutactions: convert_nested_mutactions(m.nested, Arc::clone(&project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 67, "score": 269504.03420261655 }, { "content": " def fromSingleJsValue(jsValue: JsValue, field: ScalarField): GCValue = jsValue match {\n\n case JsString(x) if field.typeIdentifier == TypeIdentifier.DateTime => DateTimeGCValue(new DateTime(x))\n\n case JsString(x) => StringGCValue(x)\n\n case JsNumber(x) if field.typeIdentifier == TypeIdentifier.Int => IntGCValue(x.toInt)\n\n case JsNumber(x) if field.typeIdentifier == TypeIdentifier.Float => FloatGCValue(x.toDouble)\n\n case JsBoolean(x) if field.typeIdentifier == TypeIdentifier.Boolean => BooleanGCValue(x)\n\n case _ => sys.error(\"Unhandled JsValue\")\n\n }\n\n\n\n val res = model.scalarNonListFields.map { field =>\n\n val gCValue: JsLookupResult = json \\ field.name\n\n val asOption = gCValue.toOption\n\n val converted = asOption match {\n\n case None => NullGCValue\n\n case Some(JsNull) => NullGCValue\n\n case Some(JsString(x)) if field.typeIdentifier == TypeIdentifier.DateTime => DateTimeGCValue(new DateTime(x))\n\n case Some(JsString(x)) => StringGCValue(x)\n\n case Some(JsNumber(x)) if field.typeIdentifier == TypeIdentifier.Int => IntGCValue(x.toInt)\n\n case Some(JsNumber(x)) if field.typeIdentifier == TypeIdentifier.Float => FloatGCValue(x.toDouble)\n\n case Some(JsBoolean(x)) => BooleanGCValue(x)\n", "file_path": "server/servers/api/src/main/scala/com/prisma/util/coolArgs/GcConverters.scala", "rank": 68, "score": 268989.0579129202 }, { "content": "pub fn convert_nested_connect(m: crate::protobuf::prisma::NestedConnect, project: ProjectRef) -> NestedConnect {\n\n let relation_field = project\n\n .internal_data_model()\n\n .find_model(&m.model_name)\n\n .unwrap()\n\n .fields()\n\n .find_from_relation_fields(&m.field_name)\n\n .unwrap();\n\n\n\n NestedConnect {\n\n relation_field: relation_field,\n\n where_: convert_node_select(m.where_, project),\n\n top_is_create: m.top_is_create,\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 69, "score": 267726.67833901476 }, { "content": "pub fn convert_nested_set(m: crate::protobuf::prisma::NestedSet, project: ProjectRef) -> NestedSet {\n\n let relation_field = project\n\n .internal_data_model()\n\n .find_model(&m.model_name)\n\n .unwrap()\n\n .fields()\n\n .find_from_relation_fields(&m.field_name)\n\n .unwrap();\n\n\n\n NestedSet {\n\n relation_field: relation_field,\n\n wheres: m\n\n .wheres\n\n .into_iter()\n\n .map(|w| convert_node_select(w, Arc::clone(&project)))\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 70, "score": 267726.67833901476 }, { "content": " def enumValueIsInUse(models: Vector[Model], enumName: String, value: String): Future[Boolean]\n\n}\n\n\n", "file_path": "server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/DeployConnector.scala", "rank": 71, "score": 266985.9352906897 }, { "content": " def nestedDisconnectInputField(field: RelationField): Option[InputField[Any]] = (field.relatedModel_!.isEmbedded, field.isList, field.isRequired) match {\n\n case (true, _, _) => None\n\n case (false, true, _) => whereInputField(field, name = \"disconnect\")\n\n case (false, false, false) => Some(InputField[Any](\"disconnect\", OptionInputType(BooleanType)))\n\n case (false, false, true) => None\n\n }\n\n\n", "file_path": "server/servers/api/src/main/scala/com/prisma/api/schema/InputTypesBuilder.scala", "rank": 72, "score": 266390.3735021672 }, { "content": "pub fn convert_reset(_: crate::protobuf::prisma::ResetData, project: ProjectRef) -> TopLevelDatabaseMutaction {\n\n let mutaction = ResetData { project };\n\n TopLevelDatabaseMutaction::ResetData(mutaction)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 73, "score": 265986.110224989 }, { "content": "pub fn convert_upsert(m: crate::protobuf::prisma::UpsertNode, project: ProjectRef) -> TopLevelDatabaseMutaction {\n\n let upsert_node = UpsertNode {\n\n where_: convert_node_select(m.where_, Arc::clone(&project)),\n\n create: convert_create(m.create, Arc::clone(&project)),\n\n update: convert_update(m.update, project),\n\n };\n\n TopLevelDatabaseMutaction::UpsertNode(upsert_node)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 74, "score": 265986.110224989 }, { "content": "pub fn convert_node_select(selector: crate::protobuf::prisma::NodeSelector, project: ProjectRef) -> NodeSelector {\n\n let model = project.internal_data_model().find_model(&selector.model_name).unwrap();\n\n let field = model.fields().find_from_scalar(&selector.field_name).unwrap();\n\n let value: PrismaValue = selector.value.into();\n\n NodeSelector { field, value }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 75, "score": 265986.110224989 }, { "content": "pub fn convert_delete(m: crate::protobuf::prisma::DeleteNode, project: ProjectRef) -> TopLevelDatabaseMutaction {\n\n let delete_node = DeleteNode {\n\n where_: convert_node_select(m.where_, project),\n\n };\n\n TopLevelDatabaseMutaction::DeleteNode(delete_node)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 76, "score": 265986.110224989 }, { "content": "fn jsonObjectToJdbcParameter(map: &serde_json::Map<String, serde_json::Value>) -> Result<JdbcParameter> {\n\n let discriminator = parseDiscriminator(map.get(\"discriminator\").unwrap().as_str().unwrap())?;\n\n let value = map.get(\"value\").unwrap();\n\n\n\n match (discriminator, value) {\n\n (JdbcParameterType::Int, &serde_json::Value::Number(ref n)) => Ok(JdbcParameter::Int(MagicInt {\n\n value: n.as_i64().unwrap(),\n\n underlying: RefCell::new(None),\n\n })),\n\n (JdbcParameterType::String, &serde_json::Value::String(ref s)) => Ok(JdbcParameter::String(s.to_string())),\n\n (JdbcParameterType::Boolean, &serde_json::Value::Bool(b)) => Ok(JdbcParameter::Boolean(b)),\n\n (JdbcParameterType::Null, &serde_json::Value::Null) => Ok(JdbcParameter::Null),\n\n (JdbcParameterType::Double, &serde_json::Value::Number(ref n)) => Ok(JdbcParameter::Double(MagicFloat {\n\n value: n.as_f64().unwrap(),\n\n underlying: RefCell::new(None),\n\n })),\n\n (JdbcParameterType::DateTime, x @ &serde_json::Value::Object(_)) => {\n\n let date: MagicDateTime = serde_json::from_value(x.clone())?;\n\n let dateTime = Utc.ymd(date.year, date.month, date.day).and_hms_milli(date.hour, date.minute, date.seconds, date.millis);\n\n Ok(JdbcParameter::DateTime(dateTime))\n\n },\n\n (JdbcParameterType::Long, &serde_json::Value::Number(ref n)) => Ok(JdbcParameter::Long(n.as_i64().unwrap())),\n\n (JdbcParameterType::UUID, &serde_json::Value::String(ref uuid)) => Ok(JdbcParameter::UUID(Uuid::parse_str(uuid)?)),\n\n (d, v) => Err(DriverError::GenericError(format!(\"Invalid combination: {:?} value {}\", d, v)))\n\n }\n\n}\n\n\n", "file_path": "server/libs/jdbc-native-rs/src/jdbc_params.rs", "rank": 77, "score": 264736.33246673545 }, { "content": "pub fn str_vec_from_pointers<'a>(raw: *const *const c_char, num_elements: i64) -> Vec<&'a str> {\n\n let mut vec: Vec<&str> = Vec::with_capacity(num_elements as usize);\n\n let mut offset = 0; // Start scanning at 0\n\n unsafe {\n\n for i in 0..num_elements {\n\n let ptr: *const c_char = raw.offset(i as isize).read();\n\n let s = to_str(ptr);\n\n\n\n vec.push(s);\n\n }\n\n }\n\n\n\n vec\n\n}\n\n\n", "file_path": "server/libs/jwt-native-rs/src/ffi_utils.rs", "rank": 78, "score": 264633.14390963555 }, { "content": " def apply(field: Field, bison: BsonValue, selectedFields: Option[SelectedFields] = None): GCValue = {\n\n (field.isList, field.isRelation) match {\n\n case (true, false) if bison.isArray =>\n\n val arrayValues: mutable.Seq[BsonValue] = bison.asArray().getValues.asScala\n\n ListGCValue(arrayValues.map(v => apply(field.typeIdentifier, v)).toVector)\n\n\n\n case (false, false) =>\n\n apply(field.typeIdentifier, bison)\n\n\n\n case (true, true) if bison.isArray =>\n\n val arrayValues: mutable.Seq[BsonValue] = bison.asArray().getValues.asScala\n\n ListGCValue(arrayValues.map(v => DocumentToRoot(field.asInstanceOf[RelationField].relatedModel_!, v.asDocument(), selectedFields)).toVector)\n\n\n\n case (false, true) =>\n\n bison match {\n\n case _: BsonNull => NullGCValue\n\n case x => DocumentToRoot(field.asInstanceOf[RelationField].relatedModel_!, x.asDocument(), selectedFields)\n\n }\n\n }\n\n }\n\n\n", "file_path": "server/connectors/api-connector-mongo/src/main/scala/com/prisma/api/connector/mongo/extensions/MongoExtensions.scala", "rank": 79, "score": 264296.8245556033 }, { "content": "pub fn convert_nested_create(m: crate::protobuf::prisma::NestedCreateNode, project: ProjectRef) -> NestedCreateNode {\n\n let relation_field = find_relation_field(Arc::clone(&project), m.model_name, m.field_name);\n\n\n\n NestedCreateNode {\n\n relation_field: relation_field,\n\n non_list_args: convert_prisma_args(m.non_list_args),\n\n list_args: convert_list_args(m.list_args),\n\n top_is_create: m.top_is_create,\n\n nested_mutactions: convert_nested_mutactions(m.nested, Arc::clone(&project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 80, "score": 264281.1649095309 }, { "content": "pub fn convert_delete_nodes(m: crate::protobuf::prisma::DeleteNodes, project: ProjectRef) -> TopLevelDatabaseMutaction {\n\n let model = project.internal_data_model().find_model(&m.model_name).unwrap();\n\n let delete_nodes = DeleteNodes {\n\n model: Arc::clone(&model),\n\n filter: m.filter.into_filter(model),\n\n };\n\n TopLevelDatabaseMutaction::DeleteNodes(delete_nodes)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 81, "score": 264281.1649095309 }, { "content": "pub fn convert_nested_update(m: crate::protobuf::prisma::NestedUpdateNode, project: ProjectRef) -> NestedUpdateNode {\n\n let relation_field = find_relation_field(Arc::clone(&project), m.model_name, m.field_name);\n\n NestedUpdateNode {\n\n relation_field: relation_field,\n\n where_: m.where_.map(|w| convert_node_select(w, Arc::clone(&project))),\n\n non_list_args: convert_prisma_args(m.non_list_args),\n\n list_args: convert_list_args(m.list_args),\n\n nested_mutactions: convert_nested_mutactions(m.nested, Arc::clone(&project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 82, "score": 264281.1649095309 }, { "content": "pub fn convert_nested_delete(m: crate::protobuf::prisma::NestedDeleteNode, project: ProjectRef) -> NestedDeleteNode {\n\n NestedDeleteNode {\n\n relation_field: find_relation_field(Arc::clone(&project), m.model_name, m.field_name),\n\n where_: m.where_.map(|w| convert_node_select(w, project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 83, "score": 264281.1649095309 }, { "content": "pub fn convert_update_nodes(m: crate::protobuf::prisma::UpdateNodes, project: ProjectRef) -> TopLevelDatabaseMutaction {\n\n let model = project.internal_data_model().find_model(&m.model_name).unwrap();\n\n let update_nodes = UpdateNodes {\n\n model: Arc::clone(&model),\n\n filter: m.filter.into_filter(model),\n\n non_list_args: convert_prisma_args(m.non_list_args),\n\n list_args: convert_list_args(m.list_args),\n\n };\n\n TopLevelDatabaseMutaction::UpdateNodes(update_nodes)\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 84, "score": 264281.1649095309 }, { "content": "pub fn convert_nested_upsert(m: crate::protobuf::prisma::NestedUpsertNode, project: ProjectRef) -> NestedUpsertNode {\n\n let relation_field = find_relation_field(Arc::clone(&project), m.model_name, m.field_name);\n\n NestedUpsertNode {\n\n relation_field: relation_field,\n\n where_: m.where_.map(|w| convert_node_select(w, Arc::clone(&project))),\n\n create: convert_nested_create(m.create, Arc::clone(&project)),\n\n update: convert_nested_update(m.update, Arc::clone(&project)),\n\n }\n\n}\n\n\n", "file_path": "server/prisma-rs/query-engine/native-bridge/src/protobuf/mutaction.rs", "rank": 85, "score": 264281.1649095309 }, { "content": " def errorIfNodeIsInRelation(parentId: IdGCValue, field: RelationField)(implicit ec: ExecutionContext): DBIO[Unit] = {\n\n errorIfNodesAreInRelation(Vector(parentId), field)\n\n }\n\n\n", "file_path": "server/connectors/api-connector-jdbc/src/main/scala/com/prisma/api/connector/jdbc/database/ValidationActions.scala", "rank": 86, "score": 263228.92714254203 }, { "content": " def errorIfNodesAreInRelation(parentIds: Vector[IdGCValue], field: RelationField)(implicit ec: ExecutionContext): DBIO[Unit] = {\n\n val query = sql\n\n .select(relationColumn(field.relatedField))\n\n .from(relationTable(field.relation))\n\n .where(\n\n relationColumn(field.relatedField).in(placeHolders(parentIds)),\n\n relationColumn(field).isNotNull\n\n )\n\n\n\n queryToDBIO(query)(\n\n setParams = pp => parentIds.foreach(pp.setGcValue),\n\n readResult = rs => if (rs.next) throw RequiredRelationWouldBeViolated(field.relation)\n\n )\n\n }\n\n}\n", "file_path": "server/connectors/api-connector-jdbc/src/main/scala/com/prisma/api/connector/jdbc/database/ValidationActions.scala", "rank": 87, "score": 259894.3820052464 }, { "content": " def deleteField(field: ScalarField) = {\n\n if (field.isScalarList) {\n\n DeleteScalarListTable(project, field.model, field)\n\n } else {\n\n DeleteColumn(project, field.model, field)\n\n }\n\n }\n\n\n", "file_path": "server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/MigrationStepMapperImpl.scala", "rank": 88, "score": 259376.7927051646 }, { "content": " def createField(field: ScalarField) = {\n\n if (field.isScalarList) {\n\n createScalarListField(field)\n\n } else {\n\n CreateColumn(project, field.model, field)\n\n }\n\n }\n\n\n", "file_path": "server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/MigrationStepMapperImpl.scala", "rank": 89, "score": 259376.7927051646 }, { "content": " def getIDAtPath(parentField: RelationField, path: Path): Option[IdGCValue] = PrismaNode.getNodeAtPath(Some(this), path.segments) match {\n\n case None =>\n\n None\n\n\n\n case Some(n) =>\n\n n.data.map.get(parentField.name) match {\n\n case Some(x: StringIdGCValue) => Some(x)\n\n case _ => None\n\n }\n\n }\n\n}\n\n\n", "file_path": "server/connectors/api-connector/src/main/scala/com/prisma/api/connector/PrismaNode.scala", "rank": 90, "score": 258596.18732465553 }, { "content": "/// Loads the config\n\npub fn load() -> Result<PrismaConfig, CommonError> {\n\n let config: String = match env::var(\"PRISMA_CONFIG\") {\n\n Ok(c) => c,\n\n Err(_) => match find_config_path() {\n\n Some(path) => {\n\n let mut f = File::open(path)?;\n\n let mut contents = String::new();\n\n f.read_to_string(&mut contents)?;\n\n contents\n\n }\n\n None => return Err(CommonError::ConfigurationError(\"Unable to find Prisma config\".into())),\n\n },\n\n };\n\n\n\n let config = substitute_env_vars(config)?;\n\n Ok(serde_yaml::from_str(&config.replace(\"\\\\n\", \"\\n\")).expect(\"Unable to parse YML config.\"))\n\n}\n\n\n", "file_path": "server/prisma-rs/libs/prisma-common/src/config/mod.rs", "rank": 91, "score": 258019.38852480066 }, { "content": " def createScalarListField(field: ScalarField) = CreateScalarListTable(project, field.model, field)\n\n\n", "file_path": "server/connectors/deploy-connector/src/main/scala/com/prisma/deploy/connector/MigrationStepMapperImpl.scala", "rank": 92, "score": 257147.87722228153 }, { "content": " def getFieldValue(field: ScalarField): Option[GCValue] = rootGCMap.get(field.name)\n", "file_path": "server/connectors/api-connector/src/main/scala/com/prisma/api/connector/PrismaArgs.scala", "rank": 93, "score": 257073.53978273756 }, { "content": " def existsNullByModelAndField(model: Model, field: Field): Future[Boolean] = {\n\n// val query = field match {\n\n// case f: ScalarField => MongoDeployDatabaseQueryBuilder.existsNullByModelAndScalarField(project.id, model.name, f.name)\n\n// case f: RelationField => MongoDeployDatabaseQueryBuilder.existsNullByModelAndRelationField(project.id, model.name, f)\n\n// }\n\n Future.successful(false)\n\n }\n\n\n", "file_path": "server/connectors/deploy-connector-mongo/src/main/scala/com/prisma/deploy/connector/mongo/impl/MongoClientDbQueries.scala", "rank": 94, "score": 251684.2606955831 }, { "content": " def enumValueIsInUse(projectId: String, models: Vector[Model], enumName: String, value: String) = ???\n\n}\n", "file_path": "server/connectors/deploy-connector-mongo/src/main/scala/com/prisma/deploy/connector/mongo/database/MongoDeployDatabaseQueryBuilder.scala", "rank": 95, "score": 248959.2432117046 }, { "content": "#[test]\n\nfn DeleteField_must_work() {\n\n let json = r#\"{\"stepType\":\"DeleteField\",\"model\":\"Blog\",\"name\":\"title\"}\"#;\n\n let expected_struct = MigrationStep::DeleteField(DeleteField {\n\n model: \"Blog\".to_string(),\n\n name: \"title\".to_string(),\n\n });\n\n assert_symmetric_serde(json, expected_struct);\n\n}\n\n\n\n// #[test]\n\n// fn CreateEnum_must_work() {\n\n// let json = r#\"{\"stepType\":\"CreateEnum\",\"name\":\"BlogCategory\",\"values\":[\"Politics\",\"Tech\"]}\"#;\n\n// let expected_struct = MigrationStep::CreateEnum(CreateEnum {\n\n// name: \"BlogCategory\".to_string(),\n\n// values: vec![\"Politics\".to_string(), \"Tech\".to_string()],\n\n// });\n\n// assert_symmetric_serde(json, expected_struct);\n\n// }\n\n\n\n// #[test]\n", "file_path": "server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs", "rank": 96, "score": 248333.84390180162 }, { "content": "#[test]\n\nfn resolve_enum_field() {\n\n let dml = r#\"\n\n model User {\n\n email: String\n\n role: Role\n\n }\n\n\n\n enum Role {\n\n ADMIN\n\n USER\n\n PRO\n\n }\n\n \"#;\n\n\n\n let schema = parse_and_validate(dml);\n\n let user_model = schema.assert_has_model(\"User\");\n\n user_model\n\n .assert_has_field(\"email\")\n\n .assert_base_type(&dml::ScalarType::String);\n\n user_model.assert_has_field(\"role\").assert_enum_type(\"Role\");\n\n\n\n let role_enum = schema.assert_has_enum(\"Role\");\n\n role_enum.assert_has_value(\"ADMIN\");\n\n role_enum.assert_has_value(\"PRO\");\n\n role_enum.assert_has_value(\"USER\");\n\n}\n", "file_path": "server/prisma-rs/libs/datamodel/tests/relations.rs", "rank": 97, "score": 246473.42954267672 }, { "content": "#[test]\n\nfn minimal_CreateField_must_work() {\n\n let json =\n\n r#\"{\"stepType\":\"CreateField\",\"model\":\"Blog\",\"name\":\"title\",\"type\":{\"Base\":\"String\"},\"arity\":\"required\"}\"#;\n\n let expected_struct = MigrationStep::CreateField(CreateField {\n\n model: \"Blog\".to_string(),\n\n name: \"title\".to_string(),\n\n tpe: FieldType::Base(ScalarType::String),\n\n arity: FieldArity::Required,\n\n db_name: None,\n\n is_created_at: None,\n\n is_updated_at: None,\n\n id: None,\n\n default: None,\n\n scalar_list: None,\n\n });\n\n assert_symmetric_serde(json, expected_struct);\n\n}\n\n\n\n// TODO: bring back once we have decided on field behavious\n", "file_path": "server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs", "rank": 98, "score": 246220.93527153454 }, { "content": "#[test]\n\nfn full_CreateField_must_work() {\n\n let json = r#\"{\n\n \"stepType\":\"CreateField\",\n\n \"model\":\"Blog\",\n\n \"name\":\"title\",\n\n \"type\":{\"Base\":\"String\"},\n\n \"arity\":\"optional\",\n\n \"dbName\":\"blog\",\n\n \"isCreatedAt\":true,\n\n \"isUpdatedAt\":true,\n\n \"default\":{\"String\":\"default\"},\n\n \"scalarList\": \"Embedded\"\n\n }\"#;\n\n let expected_struct = MigrationStep::CreateField(CreateField {\n\n model: \"Blog\".to_string(),\n\n name: \"title\".to_string(),\n\n tpe: FieldType::Base(ScalarType::String),\n\n arity: FieldArity::Optional,\n\n db_name: Some(\"blog\".to_string()),\n\n is_created_at: Some(true),\n\n is_updated_at: Some(true),\n\n id: None, // TODO: adapt once added to CreateField\n\n default: Some(Value::String(\"default\".to_string())),\n\n scalar_list: Some(ScalarListStrategy::Embedded),\n\n });\n\n\n\n assert_symmetric_serde(json, expected_struct);\n\n}\n\n\n", "file_path": "server/prisma-rs/migration-engine/connectors/migration-connector/tests/steps_tests.rs", "rank": 99, "score": 246220.93527153454 } ]
Rust
main/src/devices/stepper.rs
Alexander89/Stepper-Feedback-Driver
4e698750d75d79ec77b9845374cc9ebf741c4422
use atsamd_hal::{ delay::Delay, gpio::v2::{Pin, PushPullOutput, PA04, PA10, PA11}, prelude::_atsamd_hal_embedded_hal_digital_v2_OutputPin, }; use embedded_hal::digital::v2::PinState; use utils::time::{Microseconds, U32Ext}; use crate::settings::{DT_MIN_U32, STEPS_PER_RESOLUTION_I16, STEPS_PER_RESOLUTION_I16_HALF}; pub enum Direction { CW, CCW, } pub enum NextStepperAction { Idle, DirectionChanged(Direction), StepRequired(i8), } pub struct Stepper { enable: Pin<PA04, PushPullOutput>, direction: Pin<PA10, PushPullOutput>, step: Pin<PA11, PushPullOutput>, state: PinState, turn_cw: bool, current_direction_cw: bool, target_step: i32, current_step: i32, real_step: i32, last_mag_val: i16, filtered_dt_per_step: u32, } impl Stepper { pub fn init( enable: Pin<PA04, PushPullOutput>, direction: Pin<PA10, PushPullOutput>, step: Pin<PA11, PushPullOutput>, ) -> Self { let mut motor = Self { enable, direction, step, state: PinState::Low, turn_cw: true, current_direction_cw: true, target_step: 0, current_step: 0, real_step: 0, last_mag_val: 0, filtered_dt_per_step: 0, }; motor.direction.set_high(); motor.disable(); motor.cw(); motor } pub fn enable(&mut self) { self.enable.set_low(); } pub fn disable(&mut self) { self.enable.set_high(); } pub fn cw(&mut self) { self.turn_cw = true; } pub fn ccw(&mut self) { self.turn_cw = false; } pub fn do_step(&mut self) { if self.turn_cw { self.target_step += 1; } else { self.target_step -= 1; } } pub fn init_stepper(&mut self, start_value: i16) { self.last_mag_val = start_value; } pub fn poll_next_action(&mut self) -> NextStepperAction { if self.current_step == self.target_step { NextStepperAction::Idle } else if self.current_step > self.target_step && self.current_direction_cw { NextStepperAction::DirectionChanged(Direction::CCW) } else if self.current_step < self.target_step && !self.current_direction_cw { NextStepperAction::DirectionChanged(Direction::CW) } else { if self.current_direction_cw { NextStepperAction::StepRequired(1) } else { NextStepperAction::StepRequired(-1) } } } pub fn execute(&mut self, req: NextStepperAction) -> bool { match req { NextStepperAction::Idle => false, NextStepperAction::DirectionChanged(Direction::CW) => { self.direction.set_high(); self.current_direction_cw = true; true } NextStepperAction::DirectionChanged(Direction::CCW) => { self.direction.set_low(); self.current_direction_cw = false; true } NextStepperAction::StepRequired(i) => { self.current_step += i as i32; self.state = !self.state; self.step.set_high(); cortex_m::asm::nop(); cortex_m::asm::nop(); cortex_m::asm::nop(); cortex_m::asm::nop(); cortex_m::asm::nop(); self.step.set_low(); (self.current_step - self.real_step).abs() < 3 } } } pub fn update_angle( &mut self, mag_val: i16, dt: Microseconds, motor_dt: Microseconds, ) -> Option<Microseconds> { let mmove = STEPS_PER_RESOLUTION_I16_HALF - self.last_mag_val; let mut new_val = mag_val + mmove; if new_val < 0 { new_val += STEPS_PER_RESOLUTION_I16; } else if new_val > STEPS_PER_RESOLUTION_I16 { new_val -= STEPS_PER_RESOLUTION_I16; } let mut dif: i32 = (STEPS_PER_RESOLUTION_I16_HALF - new_val) as i32; self.real_step -= dif; let dt_per_step = if dif == 0 { DT_MIN_U32 } else { (dt.0 / dif.abs() as u32).min(DT_MIN_U32) }; self.filtered_dt_per_step = ((self.filtered_dt_per_step + dt_per_step) / 2).min(DT_MIN_U32); let stuck = motor_dt.0 < self.filtered_dt_per_step - 1280; let dif = self.current_step - self.real_step; match dif.abs() { 1 => { } x if x > 1 => self.current_step -= (dif) / 2, _ => (), } self.last_mag_val = mag_val; if stuck { Some(self.filtered_dt_per_step.us()) } else { None } } }
use atsamd_hal::{ delay::Delay, gpio::v2::{Pin, PushPullOutput, PA04, PA10, PA11}, prelude::_atsamd_hal_embedded_hal_digital_v2_OutputPin, }; use embedded_hal::digital::v2::PinState; use utils::time::{Microseconds, U32Ext}; use crate::settings::{DT_MIN_U32, STEPS_PER_RESOLUTION_I16, STEPS_PER_RESOLUTION_I16_HALF}; pub enum Direction { CW, CCW, } pub enum NextStepperAction { Idle, DirectionChanged(Direction), StepRequired(i8), } pub struct Stepper { enable: Pin<PA04, PushPullOutput>, direction: Pin<PA10, PushPullOutput>, step: Pin<PA11, PushPullOutput>, state: PinState, turn_cw: bool, current_direction_cw: bool, target_step: i32, current_step: i32, real_step: i32, last_mag_val: i16, filtered_dt_per_step: u32, } impl Stepper { pub fn init( enable: Pin<PA04, PushPullOutput>, direction: Pin<PA10, PushPullOutput>, step: Pin<PA11, PushPullOutput>, ) -> Self { let mut motor = Self { enable, direction, step, state: PinState::Low, turn_cw: true, current_direction_cw: true, target_step: 0, current_step: 0, real_step: 0, last_mag_val: 0, filtered_dt_per_step: 0, }; motor.direction.set_high(); motor.disable(); motor.cw(); motor } pub fn enable(&mut self) { self.enable.set_low(); } pub fn disable(&mut self) { self.enable.set_high(); } pub fn cw(&mut self) { self.turn_cw = true; } pub fn ccw(&mut self) { self.turn_cw = false; } pub fn do_step(&mut self) { if self.turn_cw { self.target_step += 1; } else { self.target_step -= 1; } } pub fn init_stepper(&mut self, start_value: i16) { self.last_mag_val = start_value; } pub fn poll_next_action(&mut self) -> NextStepperAction { if self.current_step == self.target_step { NextStepperAction::Idle } else if self.current_step > self.target_step && self.current_direction_cw { NextStepperAction::DirectionChanged(Direction::CCW) } else if self.current_step < self.target_step && !self.current_direction_cw { NextStepperAction::DirectionChanged(Direction::CW) } else { if self.current_direction_cw { NextStepperAction::StepRequired(1) } else { NextStepperAction::StepRequired(-1) } } } pub fn execute(&mut self, req: NextStepperAction) -> bool { match req { NextStepperAction::Idle => false, NextStepperAction::DirectionChanged(Direction::CW) => { self.direction.set_high(); self.current_direction_cw = true; true } NextStepperAction::DirectionChanged(Direction::CCW) => { self.direction.set_low(); self.current_direction_cw = false; true } NextStepperAction::StepRequired(i) => { self.current_step += i as i32; self.state = !self.state; self.step.set_high(); cortex_m::asm::nop(); cortex_m::asm::nop(); cortex_m::asm::nop(); cortex_m::asm::nop(); cortex_m::asm::nop(); self.step.set_low(); (self.current_step - self.real_step).abs() < 3 } } } pub fn update_angle( &mut self, mag_val: i16, dt: Microseconds, motor_dt: Microseconds, ) -> Option<Microseconds> { let mmove = STEPS_PER_RESOLUTION_I16_HALF - self.last_mag_val; let mut new_val = mag_val + mmove; if new_val < 0 { new_val += STEPS_PER_RESOLUTION_I16; } else if new_val > STEPS_PER_RESOLUTION_I16 { new_val -= STEPS_PER_RESOLUTION_I16; } let mut dif: i32 = (STEPS_PER_RESOLUTION_I16_HALF - new_val) as i32; self.real_step -= dif;
self.filtered_dt_per_step = ((self.filtered_dt_per_step + dt_per_step) / 2).min(DT_MIN_U32); let stuck = motor_dt.0 < self.filtered_dt_per_step - 1280; let dif = self.current_step - self.real_step; match dif.abs() { 1 => { } x if x > 1 => self.current_step -= (dif) / 2, _ => (), } self.last_mag_val = mag_val; if stuck { Some(self.filtered_dt_per_step.us()) } else { None } } }
let dt_per_step = if dif == 0 { DT_MIN_U32 } else { (dt.0 / dif.abs() as u32).min(DT_MIN_U32) };
assignment_statement
[ { "content": "fn enabled_changed(state: bool) {\n\n unsafe { HARDWARE.as_mut() }.map(|hw| {\n\n if state {\n\n hw.stepper_enable();\n\n hw.led1.on()\n\n } else {\n\n hw.stepper_disable();\n\n hw.led1.off()\n\n }\n\n });\n\n}\n", "file_path": "main/src/main.rs", "rank": 0, "score": 118374.58225127938 }, { "content": "fn step_changed(state: bool) {\n\n unsafe { HARDWARE.as_mut() }.map(|hw| {\n\n hw.stepper_step();\n\n if state {\n\n hw.led2.on()\n\n } else {\n\n hw.led2.off()\n\n }\n\n });\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 1, "score": 118374.58225127938 }, { "content": "pub fn init(\n\n cs: &CriticalSection,\n\n clocks: &mut GenericClockController,\n\n nvic: &mut NVIC,\n\n pm: &mut pac::PM,\n\n eic: pac::EIC,\n\n) {\n\n // not supported by chip\n\n // let is_configured = EIC_SETUP.swap(true, core::sync::atomic::Ordering::SeqCst);\n\n\n\n // wrapping it at lest in a critical section.\n\n let is_configured = cortex_m::interrupt::free(|_| {\n\n let is_configured = EIC_SETUP.load(Ordering::SeqCst);\n\n EIC_SETUP.store(true, Ordering::SeqCst);\n\n is_configured\n\n });\n\n\n\n if !is_configured {\n\n // define clock generator 2 and connect it to the 8Mhz OSC\n\n let gclk2 = clocks\n", "file_path": "main/src/devices/ext_int_pin.rs", "rank": 2, "score": 97646.27712272338 }, { "content": "fn dir_changed(state: bool) {\n\n unsafe { HARDWARE.as_mut() }.map(|hw| {\n\n if state {\n\n hw.stepper_cw();\n\n hw.led0.on()\n\n } else {\n\n hw.stepper_ccw();\n\n hw.led0.off()\n\n }\n\n });\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 3, "score": 96503.3360805542 }, { "content": "fn init(hw: &mut Devices) {\n\n hw.led0.on();\n\n hw.delay(100.ms());\n\n\n\n hw.poll_magnet_sensor_setup();\n\n\n\n hw.led0.off();\n\n hw.delay(400.ms());\n\n\n\n while hw.magnet_sensor.magnitude < 1000 {\n\n hw.led1.on();\n\n hw.led2.off();\n\n hw.delay(400.ms());\n\n hw.led1.off();\n\n hw.led2.on();\n\n hw.delay(400.ms());\n\n\n\n hw.poll_magnet_sensor_setup();\n\n }\n\n hw.led0.off();\n", "file_path": "main/src/main.rs", "rank": 4, "score": 94467.14318276984 }, { "content": "fn get_pin_state(in0: bool, shift: u8) -> bool {\n\n let p = unsafe { &*pac::PORT::ptr() };\n\n if in0 {\n\n (p.in0.read().in_().bits() & (1 << shift)) != 0\n\n } else {\n\n (p.in1.read().in_().bits() & (1 << shift)) != 0\n\n }\n\n}\n\n\n\n\n\nmacro_rules! eip {\n\n (\n\n $Pad:ident, \n\n $num:expr,\n\n $is_in0:expr\n\n ) => {\n\npaste::item! {\n\n impl ExtIntPin<pin_v2::$Pad> {\n\n pub fn enable<M: PinMode>(pin: Pin<pin_v2::$Pad, M>, cb: fn(bool) -> ()) -> Self {\n\n let mut extint = extInt::[<ExtInt $num>]::new(pin.into_pull_down_interrupt());\n", "file_path": "main/src/devices/ext_int_pin.rs", "rank": 5, "score": 76423.6298125239 }, { "content": "/// Extension trait that adds convenience methods to the `u32` type\n\npub trait U32Ext {\n\n /// Wrap in `Bps`\n\n fn bps(self) -> Bps;\n\n\n\n /// Wrap in `Hertz`\n\n fn hz(self) -> Hertz;\n\n\n\n /// Wrap in `KiloHertz`\n\n fn khz(self) -> KiloHertz;\n\n\n\n /// Wrap in `MegaHertz`\n\n fn mhz(self) -> MegaHertz;\n\n\n\n /// Wrap in `Seconds`\n\n fn s(self) -> Seconds;\n\n\n\n /// Wrap in `Milliseconds`\n\n fn ms(self) -> Milliseconds;\n\n\n\n /// Wrap in `Microseconds`\n", "file_path": "utils/src/time.rs", "rank": 6, "score": 73683.51354016896 }, { "content": "fn _debug_timer(_d_t: u32) {\n\n #[cfg(feature = \"serial\")]\n\n {\n\n unsafe { HARDWARE.as_mut() }.map(|hw| {\n\n let _ = hw.serial_write_num(hw.get_step() as usize);\n\n // let _ = hw.serial_write(b\" \");\n\n // let _ = hw.serial_write_num(hw.magnet_sensor.magnitude as usize);\n\n let _ = hw.serial_write(b\"\\r\\n\");\n\n });\n\n }\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 7, "score": 65087.91751073139 }, { "content": "pub fn num_length(n: usize) -> usize {\n\n if n > 9 {\n\n num_length(n / 10) + 1\n\n } else {\n\n 1\n\n }\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 8, "score": 56668.75778144675 }, { "content": "pub fn num_to_string(num: usize) -> (usize, [u8; 10]) {\n\n let lng = num_length(num);\n\n let mut value = num;\n\n\n\n let mut buf = [0; 10];\n\n for i in 0..lng {\n\n let dig = value % 10;\n\n value /= 10;\n\n\n\n let idx = (lng - i) as usize - 1;\n\n buf[idx] = b'0' + dig as u8;\n\n }\n\n (lng, buf)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn num_length_test() {\n", "file_path": "utils/src/lib.rs", "rank": 9, "score": 51041.92979331965 }, { "content": "enum ReadPhase {\n\n Query,\n\n Read1,\n\n Read2,\n\n}\n\n\n\npub struct MagnetSensor {\n\n status: u8,\n\n pub detected: bool,\n\n low: bool,\n\n heigh: bool,\n\n pub raw_angle: u16,\n\n pub agc: u8,\n\n pub magnitude: u16,\n\n\n\n read_phase: ReadPhase,\n\n read_phase_buf: u16,\n\n}\n\n\n\nimpl MagnetSensor {\n", "file_path": "main/src/devices/magnet_sensor.rs", "rank": 10, "score": 38540.81529472124 }, { "content": "/// This is a helper trait to make it easier to make most of the\n\n/// TimerCounter impl generic. It doesn't make too much sense to\n\n/// to try to implement this trait outside of this module.\n\npub trait Count32 {\n\n fn count_32(&self) -> &COUNT32;\n\n}\n\n\n\nimpl<TC> CountDown for TimerCounter32<TC>\n\nwhere\n\n TC: Count32,\n\n{\n\n type Time = Nanoseconds;\n\n\n\n fn start<T>(&mut self, timeout: T)\n\n where\n\n T: Into<Self::Time>,\n\n {\n\n let params = TimerParams::new_us(timeout, self.freq.0);\n\n let divider = params.divider;\n\n let cycles = params.cycles;\n\n\n\n let count = self.tc.count_32();\n\n\n", "file_path": "main/src/devices/tc32.rs", "rank": 11, "score": 36210.2990450168 }, { "content": "fn run() -> ! {\n\n let hardware = unsafe {\n\n cortex_m::interrupt::free(|cs| {\n\n HARDWARE = Some(Devices::init(cs));\n\n HARDWARE.as_mut().unwrap()\n\n })\n\n };\n\n\n\n init(hardware);\n\n //hardware.stepper_enable();\n\n hardware.delay_us(100_000.us());\n\n\n\n let f = |t: f32| SLOPE * t + DT_MIN;\n\n let g = |dt: f32| (dt - DT_MIN) / SLOPE;\n\n let h = |dt: f32| g(dt) + dt;\n\n let calc_dt_min = |dt: f32| f(h(dt.min(DT_MIN).max(DT_MAX))).max(DT_MAX);\n\n\n\n let mut sensor_poll_delay = 0.us();\n\n let mut sensor_poll_duration = 0.us();\n\n let sensor_poll_timeout = 1_000.us();\n", "file_path": "main/src/main.rs", "rank": 12, "score": 33485.0710210453 }, { "content": "#[interrupt]\n\nfn RTC() {\n\n let hwo = unsafe { HARDWARE.as_mut() };\n\n hwo.map(|hw| {\n\n hw.handle_rtc_overflow();\n\n });\n\n}\n\n// #[interrupt]\n\n// fn TC3() {\n\n// unsafe {\n\n// HARDWARE.as_mut().map(|hw| {\n\n// hw.led1.toggle();\n\n// });\n\n// };\n\n// }\n\n\n\n// #[interrupt]\n\n// fn TC3() {\n\n// let p = unsafe { pac::TC3::ptr().as_ref() }.unwrap();\n\n// p.count16().intflag.modify(|_, w| w.ovf().set_bit());\n\n// step_timer(0);\n", "file_path": "main/src/main.rs", "rank": 13, "score": 33485.0710210453 }, { "content": "#[cfg(feature = \"serial\")]\n\n#[interrupt]\n\nfn USB() {\n\n let hwo = unsafe { HARDWARE.as_mut() };\n\n hwo.map(|hw| {\n\n hw.led2.on();\n\n hw.poll_serial();\n\n hw.serial_read_poll();\n\n });\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 14, "score": 33485.0710210453 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n run()\n\n}\n", "file_path": "main/src/main.rs", "rank": 15, "score": 33485.0710210453 }, { "content": "#[interrupt]\n\nfn EIC() {\n\n let hwo = unsafe { HARDWARE.as_mut() };\n\n hwo.map(|hw| {\n\n hw.handle_eic();\n\n });\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 16, "score": 33485.0710210453 }, { "content": "fn blink<A, B, C>(\n\n count: u8,\n\n led0: &mut Led<A>,\n\n led1: &mut Led<B>,\n\n led2: &mut Led<C>,\n\n delay: &mut Delay,\n\n) where\n\n A: atsamd_hal::gpio::PinId,\n\n B: atsamd_hal::gpio::PinId,\n\n C: atsamd_hal::gpio::PinId,\n\n{\n\n for i in 0..count {\n\n led0.on();\n\n led1.on();\n\n led2.on();\n\n delay.delay_ms(200u16);\n\n\n\n led0.off();\n\n led1.off();\n\n led2.off();\n\n delay.delay_ms(200u16);\n\n }\n\n delay.delay_ms(800u16);\n\n}\n", "file_path": "main/src/devices/mod.rs", "rank": 17, "score": 27528.52484726551 }, { "content": "#[cfg(not(test))]\n\n#[inline(never)]\n\n#[panic_handler]\n\nfn panic(_info: &core::panic::PanicInfo) -> ! {\n\n use cortex_m::asm::nop;\n\n unsafe {\n\n let hw = HARDWARE.as_mut().unwrap();\n\n\n\n loop {\n\n for _ in 0..0xffff {\n\n nop();\n\n }\n\n hw.led0.off();\n\n hw.led1.off();\n\n\n\n for _ in 0..0xffff {\n\n nop();\n\n }\n\n hw.led0.on();\n\n hw.led1.on();\n\n }\n\n }\n\n}\n", "file_path": "main/src/main.rs", "rank": 18, "score": 25212.13859912147 }, { "content": "# Stepper Motor driver\n\n\n\nWith I²C A5600 feedback loop running on a small scale Seeed Studio Seeeduino XIAO.\n\n\n\nThe AS5600 is used to verify that the triggered step is executed and the step motor is in the desired position.\n\n\n\n## Features \n\n\n\n- 1250Hz max speed\n\n- not inferring I²C connection to AS5600\n\n- ramp-up to avoid overdrive\n\n- Stuck detection to ramp-up motor again\n\n\n\n## Config\n\n\n\n- max speed \n\n- min speed (no ramp-up)\n\n- ramp-up slope\n\n- steps per evolution\n\n\n\n## Todo / nice to have\n\n\n\n- Test with 48Mhz\n\n- Test all calculations for edge-cases\n\n- better panic handling\n\n\n\n## Hardware: Seeeduino XIAO\n\n\n\nThis project should (!!) work with all cortex_m0+ MPUs I used the [Seeed Studio Seeeduino XIAO](http://wiki.seeedstudio.com/Seeeduino-XIAO/) for this project.\n\nI run it on 8Mhz you could go up to 48Mhz if you need more speed (not tested)\n\n\n\nI used the AS5600 to read the motor position.\n\n\n\n3D printable mount for the sensor to the motor: (coming soon, update to V3 desired)\n\n\n\n\n\n## Development / Setup\n\n\n\n### Prerequisites\n\n\n\n- Install the cross compile toolchain `rustup target add thumbv6m-none-eabi`\n\n- Install the [cargo-hf2 tool](https://crates.io/crates/cargo-hf2) however your\n\n platform requires\n\n\n\n### Uploading the software\n\n\n\n- Be in the project directory\n\n- Put your device in bootloader mode by bridging the `RST` pads _twice_ in\n\n quick succession. The orange LED will pulse when the device is in bootloader\n\n mode.\n\n- Build and upload in one step: `cargo hf2 --release`\n\n - Note that if you're using an older `cargo-hf2` that you'll need to specify\n\n the VID/PID when flashing: `cargo hf2 --vid 0x2886 --pid 0x002f --release`\n\n\n\nCheck out [the\n\nrepository](https://github.com/atsamd-rs/atsamd/tree/master/boards/xiao_m0/examples)\n\nfor examples.\n\n\n\n\n\n### Debugging manually - openOCD + GDB\n\n\n", "file_path": "README.md", "rank": 29, "score": 16261.976858012586 }, { "content": "#### start arm gdb\n\n\n\nYou can use gdb to debug your chip. First instal the arm gdb version `arm-none-eabi-gdb`.\n\n\n\nIf you load gdb with the source including the symbols you can run step by step through your code.\n\n\n\nThere are to options. \n\n1. load the whole bin into gdb `arm-none-eabi-gdb target/thumbv6m-none-eabi/debug/stepper`\n\n2. load teh symboles only `arm-none-eabi-gdb -q target/thumbv6m-none-eabi/debug/stepper`\n\n\n\nafter launching gdb, you have to connet to the target like so:\n\n\n\n```sh\n\n> arm-none-eabi-gdb target/thumbv6m-none-eabi/debug/stepper\n\n> target extended-remote :3333\n\n```\n\n\n\n### Use VS-Code extension\n\n\n\nI used the vscode extension: marus25.cortex-debug . If openOCD and Arm-GDB is installed, it will start automatically the required services and connect to your MCU.\n\n\n\n#### Setup:\n\n\n\n- add a config to the `.vscode/launch.json` file in the project\n\n\n\n```json\n\n\"configurations\": [\n\n {\n\n \"name\": \"Remote Cortex_m\",\n\n \"type\": \"cortex-debug\",\n\n \"request\": \"launch\",\n\n \"cwd\": \"${workspaceRoot}\",\n\n \"executable\": \"${workspaceRoot}/target/thumbv6m-none-eabi/debug/stepper\",\n\n \"servertype\": \"openocd\",\n\n \"runToMain\": true,\n\n \"gdbpath\": \"arm-none-eabi-gdb\",\n\n \"configFiles\": [\n\n \"interface/ftdi/ft232h-module-swd.cfg\",\n\n \"target/at91samdXX.cfg\"\n\n ],\n\n }\n\n]\n\n``` \n\n\n\n#### Hint:\n\n\n\n1. If you double click the On/Off button, you could turn your board into the bootloader mode.\n\n2. restart your debugging session after deploying a new firmware to fix the debug symbols.\n\n3. use `--release` if you like to test the performance of your code.\n\n\n\n## Any questions:\n\n\n\ndon't be afraid to Open an issue.\n", "file_path": "README.md", "rank": 30, "score": 16261.415474478043 }, { "content": "#### OpenOCD Interface\n\n\n\nFind your debuging module in the list of interfaces in the interface folder. (e.g.: fedora 35 `/usr/share/openocd/scripts/interface/`). \n\n\n\nTested with the FTDI FT232H USB module: `interface/ftdi/ft232h-module-swd.cfg`\n\n\n\nIf you don't find your module, you could create a new definition or modify an existing one to your module.\n\n\n\n#### OpenOCD target\n\n\n\nFind your target in the targets list in the target folder. (e.g.: fedora 35 `/usr/share/openocd/scripts/target/`). \n\n\n\nTested with the seeeduino XIAO cortex_m0+ module: `target/at91samdXX.cfg`\n\n\n\nIf you don't find your target, you could create a new definition or modify an existing one to your target.\n\n\n\n\n\n#### start openOCD\n\n\n\nstart openOCD with your interface and the target.\n\n\n\n`openOCD -f <interface> -d <target>`\n\n\n\ne.g.\n\n```sh\n\n> openocd -f interface/ftdi/ft232h-module-swd.cfg -f target/at91samdXX.cfg\n\n```\n\n\n\nyour expected output looks like this:\n\n\n\n```\n\n○ → openocd -f interface/ftdi/ft232h-module-swd.cfg -f target/at91samdXX.cfg\n\nOpen On-Chip Debugger 0.11.0\n\nLicensed under GNU GPL v2\n\nFor bug reports, read\n\n\thttp://openocd.org/doc/doxygen/bugs.html\n\nInfo : FTDI SWD mode enabled\n\nswd\n\nInfo : Listening on port 6666 for tcl connections\n\nInfo : Listening on port 4444 for telnet connections\n\nInfo : clock speed 400 kHz\n\nInfo : SWD DPIDR 0x0bc11477\n\nInfo : at91samd.cpu: hardware has 4 breakpoints, 2 watchpoints\n\nInfo : starting gdb server for at91samd.cpu on 3333\n\nInfo : Listening on port 3333 for gdb connections\n\n```\n\n\n\nIf you get something else, check your wiring. I missed the second line with the 470R resistor to the SWDIO pin at the first try.\n\n\n\n#### telnet\n\n\n\nYou could use telnet to control the board.\n\n\n\ne.g.:\n\n\n\n- switch to bootloader mode: `reset {enter} {up} {enter}`\n\n- halt: `halt`\n\n\n", "file_path": "README.md", "rank": 31, "score": 16257.161851234298 }, { "content": " {\n\n self.serial.serial_write_len(bytes, len);\n\n self.delay_us((15 * (len as u32)).us());\n\n self.poll_serial();\n\n }\n\n }\n\n}\n\n\n\nimpl Devices {\n\n pub fn init_stepper(&mut self, start_value: i16) {\n\n self.stepper.init_stepper(start_value)\n\n }\n\n pub fn stepper_step(&mut self) {\n\n self.stepper.do_step();\n\n }\n\n pub fn stepper_enable(&mut self) {\n\n self.stepper.enable();\n\n }\n\n pub fn stepper_disable(&mut self) {\n\n self.stepper.disable();\n", "file_path": "main/src/devices/mod.rs", "rank": 32, "score": 21.857599442417044 }, { "content": " }\n\n pub fn stepper_cw(&mut self) {\n\n self.stepper.cw();\n\n }\n\n pub fn stepper_ccw(&mut self) {\n\n self.stepper.ccw();\n\n }\n\n pub fn poll_stepper(&mut self) -> stepper::NextStepperAction {\n\n self.stepper.poll_next_action()\n\n }\n\n pub fn execute_stepper(&mut self, req: stepper::NextStepperAction) -> bool {\n\n self.stepper.execute(req)\n\n }\n\n}\n\n\n\nimpl Devices {\n\n pub fn init(cs: &CriticalSection) -> Self {\n\n let mut peripherals = Peripherals::take().unwrap();\n\n let mut core = CorePeripherals::take().unwrap();\n\n let mut clocks = GenericClockController::with_internal_8mhz(\n", "file_path": "main/src/devices/mod.rs", "rank": 33, "score": 21.775603304440374 }, { "content": " motor_dt: Microseconds,\n\n ) -> (bool, Option<Microseconds>) {\n\n if let Ok((true)) = self.magnet_sensor.stepwise_read(&mut self.i2c) {\n\n if self.magnet_sensor.detected {\n\n return (\n\n true,\n\n self.stepper.update_angle(self.get_step(), dt, motor_dt),\n\n );\n\n }\n\n }\n\n (false, None)\n\n }\n\n\n\n pub fn poll_magnet_sensor(\n\n &mut self,\n\n dt: Microseconds,\n\n motor_dt: Microseconds,\n\n ) -> Option<Microseconds> {\n\n self.query_magnet_sensor();\n\n self.read_magnet_sensor_result(dt, motor_dt)\n", "file_path": "main/src/devices/mod.rs", "rank": 34, "score": 20.619136640771497 }, { "content": "use utils::time::Microseconds;\n\n\n\npub const STEPS_PER_RESOLUTION_I16: i16 = 200;\n\npub const STEPS_PER_RESOLUTION_I16_HALF: i16 = 100;\n\npub const STEPS_PER_RESOLUTION_I16_QUARTER: i32 = 50;\n\n\n\npub const EXECUTE_STEPPER_TIME_CONSUMPTIONS_US: Microseconds = Microseconds(50); // measured in debugger :-(\n\n\n\npub const SLOPE_DELTA_T: f32 = 1_500_000f32; // us to ramp up (1 sec)\n\npub const SLOPE: f32 = (DT_MAX - DT_MIN) as f32 / SLOPE_DELTA_T;\n\npub const DT_MIN: f32 = 1_000_000.0 / 200.0; // 200 steps per sec\n\npub const DT_MIN_U32: u32 = DT_MIN as u32;\n\npub const DT_MAX: f32 = 1_000_000.0 / 1250.0; // 950 steps per sec\n", "file_path": "main/src/settings.rs", "rank": 35, "score": 20.596344405153037 }, { "content": "\n\n pub fn query_magnet_sensor(&mut self) {\n\n self.magnet_sensor.query(&mut self.i2c);\n\n }\n\n\n\n pub fn read_magnet_sensor_result(\n\n &mut self,\n\n dt: Microseconds,\n\n motor_dt: Microseconds,\n\n ) -> Option<Microseconds> {\n\n self.magnet_sensor.read(&mut self.i2c);\n\n if self.magnet_sensor.detected {\n\n return self.stepper.update_angle(self.get_step(), dt, motor_dt);\n\n }\n\n None\n\n }\n\n\n\n pub fn stepwise_read(\n\n &mut self,\n\n dt: Microseconds,\n", "file_path": "main/src/devices/mod.rs", "rank": 36, "score": 17.87799389262971 }, { "content": " sensor_poll_delay = 0.us();\n\n }\n\n\n\n // poll motor for next step?\n\n match hardware.poll_stepper() {\n\n // nothing to do, just poll again and again and again\n\n devices::NextStepperAction::Idle => {}\n\n\n\n // hardware requires a little delay after changing the direction\n\n req @ devices::NextStepperAction::DirectionChanged(_) => {\n\n hardware.execute_stepper(req);\n\n // @WARNING - Program waits here // no polling in this time\n\n hardware.delay_us(150.us());\n\n\n\n // drop the speed to 0 steps/sec, to ramp up again.\n\n // 0xFFFF_FFFF ~~~ 1/0 // (1/V)\n\n last_dt = 100_000.us();\n\n }\n\n // if a step is required, check if we have to wait before we can do it\n\n req @ devices::NextStepperAction::StepRequired(_) => {\n", "file_path": "main/src/main.rs", "rank": 37, "score": 15.701939978438705 }, { "content": "\n\n pub led0: Led<PA17>,\n\n pub led1: Led<PA18>,\n\n pub led2: Led<PA19>,\n\n\n\n dir: ExtIntPin<PB09>, // pin 7\n\n step: ExtIntPin<PA05>, // pin 9\n\n enable: ExtIntPin<PA06>, // pin 10\n\n\n\n timer_0_buffer: u32,\n\n timer_1_buffer: u32,\n\n}\n\n\n\nimpl Devices {\n\n pub fn get_step(&self) -> i16 {\n\n let r = ((self.magnet_sensor.raw_angle as i32 * STEPS_PER_RESOLUTION_I16_QUARTER + 512)\n\n / 1024) as i16;\n\n if r >= STEPS_PER_RESOLUTION_I16 {\n\n r - STEPS_PER_RESOLUTION_I16\n\n } else {\n", "file_path": "main/src/devices/mod.rs", "rank": 38, "score": 15.281298659873535 }, { "content": "use crate::settings::{STEPS_PER_RESOLUTION_I16, STEPS_PER_RESOLUTION_I16_QUARTER};\n\n\n\nuse self::{\n\n led::Led,\n\n magnet_sensor::MagnetSensor,\n\n tc32::{Count32, TimerCounter32},\n\n};\n\n\n\npub struct Devices {\n\n clocks: GenericClockController,\n\n i2c: I2c,\n\n #[cfg(feature = \"serial\")]\n\n serial: UsbSerial,\n\n stepper: Stepper,\n\n pub magnet_sensor: MagnetSensor,\n\n\n\n delay: Delay,\n\n\n\n pub rtc: Rtc<Count32Mode>,\n\n rtc_ovl: bool,\n", "file_path": "main/src/devices/mod.rs", "rank": 39, "score": 13.041971168531399 }, { "content": "use atsamd_hal::{\n\n gpio::{\n\n v2::{Pin, PushPullOutput},\n\n Output, PinId,\n\n },\n\n prelude::*,\n\n};\n\nuse embedded_hal::digital::v2::PinState;\n\n\n\npub struct Led<P: PinId> {\n\n pin: Pin<P, PushPullOutput>,\n\n state: PinState,\n\n}\n\n\n\nimpl<P: PinId> Led<P> {\n\n pub fn init(pin: Pin<P, PushPullOutput>) -> Self {\n\n Self {\n\n pin,\n\n state: PinState::Low,\n\n }\n", "file_path": "main/src/devices/led.rs", "rank": 40, "score": 12.736785069715683 }, { "content": " // configure ExtInt\n\n cortex_m::interrupt::free(|cs| {\n\n EIC.borrow(cs).borrow_mut().as_mut().map(|e| {\n\n extint.sense(e, Sense::BOTH);\n\n extint.filter(e, true);\n\n extint.enable_interrupt(e);\n\n });\n\n });\n\n\n\n let ext_id = $num;\n\n let state = get_pin_state($is_in0, ext_id);\n\n Self {\n\n _p: PhantomData::default(),\n\n ext_id,\n\n state,\n\n cb,\n\n }\n\n }\n\n\n\n pub fn poll(&mut self) {\n", "file_path": "main/src/devices/ext_int_pin.rs", "rank": 41, "score": 12.274768258504654 }, { "content": " }\n\n}\n\n\n\nimpl From<Hertz> for Microseconds {\n\n fn from(item: Hertz) -> Self {\n\n Microseconds(1_000_000_u32 / item.0)\n\n }\n\n}\n\n\n\nimpl From<Hertz> for Nanoseconds {\n\n fn from(item: Hertz) -> Self {\n\n Nanoseconds(1_000_000_000u32 / item.0)\n\n }\n\n}\n\n\n\nimpl From<KiloHertz> for Nanoseconds {\n\n fn from(item: KiloHertz) -> Self {\n\n Nanoseconds(1_000_000u32 / item.0)\n\n }\n\n}\n", "file_path": "utils/src/time.rs", "rank": 42, "score": 12.146774330155129 }, { "content": " fn from(item: Nanoseconds) -> Self {\n\n Hertz(1_000_000_000_u32 / item.0)\n\n }\n\n}\n\n\n\nimpl From<Microseconds> for Hertz {\n\n fn from(item: Microseconds) -> Self {\n\n Hertz(1_000_000_u32 / item.0)\n\n }\n\n}\n\n\n\nimpl From<Nanoseconds> for KiloHertz {\n\n fn from(item: Nanoseconds) -> Self {\n\n KiloHertz(1_000_000_u32 / item.0)\n\n }\n\n}\n\n\n\nimpl From<Nanoseconds> for MegaHertz {\n\n fn from(item: Nanoseconds) -> Self {\n\n MegaHertz(1_000_u32 / item.0)\n", "file_path": "utils/src/time.rs", "rank": 43, "score": 11.95549658722894 }, { "content": " }\n\n\n\n pub fn handle_rtc_overflow(&mut self) {\n\n self.rtc_ovl = true;\n\n self.rtc.set_count32(0);\n\n }\n\n\n\n pub fn handle_eic(&mut self) {\n\n // let eic = unsafe { &*pac::EIC::ptr() };\n\n // while eic.status.read().syncbusy().bit_is_set() {}\n\n\n\n self.dir.poll();\n\n self.enable.poll();\n\n self.step.poll();\n\n }\n\n pub fn execute_ext_int_pins(&mut self) {\n\n self.dir.execute();\n\n self.enable.execute();\n\n self.step.execute();\n\n }\n", "file_path": "main/src/devices/mod.rs", "rank": 44, "score": 11.955241360741791 }, { "content": " // get delta-time to the last step.\n\n let dt = hardware.peek_delta_us_0();\n\n\n\n // check if we are slower than the last step\n\n // execute_stepper_time_consumptions_us add some µs the execute_stepper\n\n // would roughly take.\n\n if dt + EXECUTE_STEPPER_TIME_CONSUMPTIONS_US > last_dt {\n\n hardware.execute_stepper(req);\n\n // Get delta again to get the most accurate delta between the steps\n\n last_dt = hardware.get_delta_us_0();\n\n } else {\n\n // @todo why last_dt\n\n let next_dt_min = (calc_dt_min(last_dt.0 as f32) as u32).us();\n\n\n\n if dt + EXECUTE_STEPPER_TIME_CONSUMPTIONS_US < next_dt_min {\n\n let delta_t_last_step = hardware.peek_delta_us_0();\n\n\n\n if next_dt_min > delta_t_last_step {\n\n let open_delay = next_dt_min - delta_t_last_step;\n\n\n", "file_path": "main/src/main.rs", "rank": 45, "score": 11.384514453974584 }, { "content": " main: I2C,\n\n}\n\n\n\nimpl I2c {\n\n pub fn init(\n\n clocks: &mut GenericClockController,\n\n sercom: pac::SERCOM0,\n\n pm: &mut pac::PM,\n\n sda: impl Into<Sda>,\n\n scl: impl Into<Scl>,\n\n ) -> Self {\n\n let gclk0 = clocks.gclk0();\n\n let clock = &clocks.sercom0_core(&gclk0).unwrap();\n\n let freq: Hertz = 1.mhz().into();\n\n let main = I2CMaster0::new(clock, freq, sercom, pm, sda.into(), scl.into());\n\n Self { main }\n\n }\n\n\n\n pub fn i2c_read_some(\n\n &mut self,\n", "file_path": "main/src/devices/i2c.rs", "rank": 46, "score": 10.89565019960168 }, { "content": "#![no_main]\n\n#![cfg_attr(not(test), no_std)]\n\n#![allow(deprecated)]\n\n\n\nmod devices;\n\nmod settings;\n\n\n\nuse devices::Devices;\n\nuse hal::pac::{self, interrupt};\n\n\n\nuse settings::{DT_MAX, DT_MIN, EXECUTE_STEPPER_TIME_CONSUMPTIONS_US, SLOPE};\n\nuse utils::time::U32Ext;\n\nuse xiao_m0::{entry, hal};\n\n\n\nstatic mut HARDWARE: Option<Devices> = None;\n\n\n\n#[entry]\n", "file_path": "main/src/main.rs", "rank": 47, "score": 10.477824178472032 }, { "content": " ) -> Result<(), I2CError> {\n\n self.i2c.i2c_read_some(address, from, count, buffer)\n\n }\n\n}\n\n\n\nimpl Devices {\n\n pub fn poll_serial(&mut self) {\n\n #[cfg(feature = \"serial\")]\n\n self.serial.poll()\n\n }\n\n pub fn serial_read_poll(&mut self) {\n\n #[cfg(feature = \"serial\")]\n\n {\n\n let res = self.serial.read_poll();\n\n if let Ok((size, buf)) = res {\n\n if size > 1 && buf[0] == b'd' {\n\n self.stepper.disable()\n\n } else if size > 1 && buf[0] == b'e' {\n\n self.stepper.enable()\n\n }\n", "file_path": "main/src/devices/mod.rs", "rank": 48, "score": 10.206009987705396 }, { "content": "\n\nimpl From<Milliseconds> for Microseconds {\n\n fn from(item: Milliseconds) -> Self {\n\n Microseconds(item.0 * 1_000)\n\n }\n\n}\n\n\n\nimpl From<Microseconds> for Nanoseconds {\n\n fn from(item: Microseconds) -> Self {\n\n Nanoseconds(item.0 * 1_000)\n\n }\n\n}\n\n\n\nimpl From<Milliseconds> for Seconds {\n\n fn from(item: Milliseconds) -> Self {\n\n Seconds(item.0 / 1_000)\n\n }\n\n}\n\n\n\nimpl From<Microseconds> for Seconds {\n", "file_path": "utils/src/time.rs", "rank": 49, "score": 10.111644705407889 }, { "content": " }\n\n\n\n pub fn on(&mut self) {\n\n self.pin.set_low();\n\n self.state = PinState::Low;\n\n }\n\n pub fn off(&mut self) {\n\n self.pin.set_high();\n\n self.state = PinState::High;\n\n }\n\n pub fn toggle(&mut self) {\n\n self.state = !self.state;\n\n self.pin.set_state(self.state);\n\n }\n\n}\n", "file_path": "main/src/devices/led.rs", "rank": 50, "score": 10.052669116809849 }, { "content": " fn from(item: Microseconds) -> Self {\n\n Seconds(item.0 / 1_000_000)\n\n }\n\n}\n\n\n\nimpl From<Microseconds> for Milliseconds {\n\n fn from(item: Microseconds) -> Self {\n\n Milliseconds(item.0 / 1_000)\n\n }\n\n}\n\n\n\nimpl From<Milliseconds> for Nanoseconds {\n\n fn from(item: Milliseconds) -> Self {\n\n Nanoseconds(item.0 * 1_000_000)\n\n }\n\n}\n\n\n\n// Frequency <-> Period\n\n\n\nimpl From<Nanoseconds> for Hertz {\n", "file_path": "utils/src/time.rs", "rank": 51, "score": 9.562511451567005 }, { "content": " num_to_string,\n\n time::{Microseconds, Milliseconds, U32Ext},\n\n};\n\nuse xiao_m0::hal::{\n\n clock::GenericClockController,\n\n delay::Delay,\n\n eic::pin::ExtInt1,\n\n gpio::v2::{PA02, PA05, PA06, PA07, PA09, PA17, PA18, PA19},\n\n pac::{self, interrupt, CorePeripherals, Peripherals},\n\n sercom::{v2::uart::Clock, I2CError},\n\n target_device::{tc3::COUNT32, NVIC, RTC, TC3, TC4, TC5},\n\n timer,\n\n};\n\n\n\nuse ext_int_pin::ExtIntPin;\n\nuse i2c::I2c;\n\npub use stepper::NextStepperAction;\n\nuse stepper::Stepper;\n\nuse usb_serial::UsbSerial;\n\n\n", "file_path": "main/src/devices/mod.rs", "rank": 52, "score": 9.2739617082883 }, { "content": " hw.led1.on();\n\n\n\n hw.poll_magnet_sensor(u32::MAX.us(), u32::MAX.us());\n\n let steps = hw.get_step();\n\n hw.init_stepper(steps);\n\n\n\n hw.delay(400.ms());\n\n hw.led0.on();\n\n hw.led1.off();\n\n hw.execute_ext_int_pins();\n\n\n\n hw.delay(400.ms());\n\n hw.led0.off();\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 53, "score": 9.234408499424532 }, { "content": " pub fn init() -> Self {\n\n Self {\n\n status: 0,\n\n detected: false,\n\n low: false,\n\n heigh: false,\n\n raw_angle: 0,\n\n agc: 0,\n\n magnitude: 0,\n\n read_phase: ReadPhase::Query,\n\n read_phase_buf: 0,\n\n }\n\n }\n\n\n\n pub fn poll(&mut self, i2c: &mut I2c) -> Result<(), I2CError> {\n\n let mut buf = [0u8; 5];\n\n\n\n self.detected = false;\n\n\n\n i2c.i2c_read_some(0x36, 0x0Bu8, 3, &mut buf)?;\n", "file_path": "main/src/devices/magnet_sensor.rs", "rank": 54, "score": 9.195355214276113 }, { "content": "}\n\n\n\n// Period based\n\n\n\nimpl From<Seconds> for Milliseconds {\n\n fn from(item: Seconds) -> Self {\n\n Milliseconds(item.0 * 1_000)\n\n }\n\n}\n\nimpl From<Seconds> for Microseconds {\n\n fn from(item: Seconds) -> Self {\n\n Microseconds(item.0 * 1_000_000)\n\n }\n\n}\n\n\n\nimpl From<Seconds> for Nanoseconds {\n\n fn from(item: Seconds) -> Self {\n\n Nanoseconds(item.0 * 1_000_000_000)\n\n }\n\n}\n", "file_path": "utils/src/time.rs", "rank": 55, "score": 9.18491919983206 }, { "content": " }\n\n}\n\nimpl From<Milliseconds> for ats_time::Milliseconds {\n\n fn from(item: Milliseconds) -> Self {\n\n ats_time::Milliseconds(item.0)\n\n }\n\n}\n\n\n\nimpl From<Microseconds> for ats_time::Microseconds {\n\n fn from(item: Microseconds) -> Self {\n\n ats_time::Microseconds(item.0)\n\n }\n\n}\n\n\n\nimpl From<Nanoseconds> for ats_time::Nanoseconds {\n\n fn from(item: Nanoseconds) -> Self {\n\n ats_time::Nanoseconds(item.0)\n\n }\n\n}\n\n\n", "file_path": "utils/src/time.rs", "rank": 56, "score": 9.150204527418833 }, { "content": " Nanoseconds(self.0 + item.into().0)\n\n }\n\n}\n\n\n\nimpl<T> AddAssign<T> for Microseconds\n\nwhere\n\n T: Into<Microseconds>,\n\n{\n\n fn add_assign(&mut self, rhs: T) {\n\n self.0 += rhs.into().0;\n\n }\n\n}\n\n\n\nimpl<T> Sub<T> for Seconds\n\nwhere\n\n T: Into<Seconds>,\n\n{\n\n type Output = Seconds;\n\n fn sub(self, item: T) -> Self::Output {\n\n Seconds(self.0 - item.into().0)\n", "file_path": "utils/src/time.rs", "rank": 57, "score": 9.028803038854274 }, { "content": "\n\n let mut last_dt = 0.us(); // us / steps\n\n\n\n loop {\n\n sensor_poll_delay += hardware.get_delta_us_1();\n\n // poll I²C - magnet sensor.\n\n if sensor_poll_delay >= sensor_poll_timeout {\n\n sensor_poll_duration += sensor_poll_delay;\n\n // split magnet sensor write and read into two functions for two cycle runs\n\n match hardware.stepwise_read(sensor_poll_duration, last_dt) {\n\n (true, Some(expected_dt_min)) => {\n\n last_dt = expected_dt_min;\n\n\n\n sensor_poll_duration = 0.us();\n\n }\n\n (true, None) => {\n\n sensor_poll_duration = 0.us();\n\n }\n\n _ => (),\n\n }\n", "file_path": "main/src/main.rs", "rank": 58, "score": 9.008941854983881 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct MegaHertz(pub u32);\n\n\n\n// Period based\n\n\n\n/// Seconds\n\n#[derive(Clone, Copy, Eq, Debug)]\n\npub struct Seconds(pub u32);\n\n\n\n/// Milliseconds\n\n#[derive(Clone, Copy, Eq, Debug)]\n\npub struct Milliseconds(pub u32);\n\n\n\n/// Microseconds\n\n#[derive(Clone, Copy, Eq, Debug)]\n\npub struct Microseconds(pub u32);\n\n\n\n/// Nanoseconds\n\n#[derive(Clone, Copy, Eq, Debug)]\n\npub struct Nanoseconds(pub u32);\n\n\n\n/// Extension trait that adds convenience methods to the `u32` type\n", "file_path": "utils/src/time.rs", "rank": 59, "score": 8.782880725964803 }, { "content": " } else {\n\n Microseconds(self.0 - o.0)\n\n }\n\n }\n\n}\n\nimpl<T> Sub<T> for Nanoseconds\n\nwhere\n\n T: Into<Nanoseconds>,\n\n{\n\n type Output = Nanoseconds;\n\n fn sub(self, item: T) -> Self::Output {\n\n Nanoseconds(self.0 - item.into().0)\n\n }\n\n}\n\n\n\nimpl<T> PartialEq<T> for Seconds\n\nwhere\n\n T: Into<Seconds> + Copy,\n\n{\n\n fn eq(&self, other: &T) -> bool {\n", "file_path": "utils/src/time.rs", "rank": 60, "score": 8.749190752096682 }, { "content": " type Output = Milliseconds;\n\n fn add(self, item: T) -> Self::Output {\n\n Milliseconds(self.0 + item.into().0)\n\n }\n\n}\n\nimpl<T> Add<T> for Microseconds\n\nwhere\n\n T: Into<Microseconds>,\n\n{\n\n type Output = Microseconds;\n\n fn add(self, item: T) -> Self::Output {\n\n Microseconds(self.0 + item.into().0)\n\n }\n\n}\n\nimpl<T> Add<T> for Nanoseconds\n\nwhere\n\n T: Into<Nanoseconds>,\n\n{\n\n type Output = Nanoseconds;\n\n fn add(self, item: T) -> Self::Output {\n", "file_path": "utils/src/time.rs", "rank": 61, "score": 8.562255373703778 }, { "content": " }\n\n}\n\nimpl<T> Sub<T> for Milliseconds\n\nwhere\n\n T: Into<Milliseconds>,\n\n{\n\n type Output = Milliseconds;\n\n fn sub(self, item: T) -> Self::Output {\n\n Milliseconds(self.0 - item.into().0)\n\n }\n\n}\n\nimpl<T> Sub<T> for Microseconds\n\nwhere\n\n T: Into<Microseconds>,\n\n{\n\n type Output = Microseconds;\n\n fn sub(self, item: T) -> Self::Output {\n\n let o: Microseconds = item.into();\n\n if o.0 > self.0 {\n\n 0.us()\n", "file_path": "utils/src/time.rs", "rank": 62, "score": 8.530629488022184 }, { "content": " &mut core.NVIC,\n\n );\n\n\n\n let i2c = I2c::init(\n\n &mut clocks,\n\n peripherals.SERCOM0,\n\n &mut peripherals.PM,\n\n pins.a4,\n\n pins.a5,\n\n );\n\n\n\n let stepper = Stepper::init(\n\n pins.a1.into_push_pull_output(),\n\n pins.a2.into_push_pull_output(),\n\n pins.a3.into_push_pull_output(),\n\n );\n\n\n\n let magnet_sensor = MagnetSensor::init();\n\n\n\n ext_int_pin::init(\n", "file_path": "main/src/devices/mod.rs", "rank": 63, "score": 8.342552536735779 }, { "content": "\n\nimpl UsbSerial {\n\n pub fn init(\n\n clocks: &mut GenericClockController,\n\n usb: pac::USB,\n\n pm: &mut pac::PM,\n\n dm: impl Into<UsbDm>,\n\n dp: impl Into<UsbDp>,\n\n nvic: &mut NVIC,\n\n ) -> UsbSerial {\n\n interrupt::free(|_| {\n\n let bus_allocator = unsafe {\n\n BUS_ALLOCATOR = Some(xiao_m0::usb_allocator(\n\n usb, clocks, pm, //&mut peripherals.PM,\n\n dm, //pins.usb_dm,\n\n dp, // pins.usb_dp,\n\n ));\n\n BUS_ALLOCATOR.as_mut().unwrap()\n\n };\n\n\n", "file_path": "main/src/devices/usb_serial.rs", "rank": 64, "score": 8.251790499327754 }, { "content": " rtc.reset_and_compute_prescaler::<atsamd_hal::time::Milliseconds>(64.ms().into());\n\n rtc.enable_interrupt();\n\n\n\n unsafe {\n\n // core.NVIC.set_priority(interrupt::RTC, 2);\n\n // NVIC::unmask(interrupt::RTC);\n\n }\n\n blink(2, &mut led0, &mut led1, &mut led2, &mut delay);\n\n\n\n Self {\n\n clocks,\n\n\n\n i2c,\n\n #[cfg(feature = \"serial\")]\n\n serial,\n\n stepper,\n\n magnet_sensor,\n\n\n\n led0,\n\n led1,\n", "file_path": "main/src/devices/mod.rs", "rank": 65, "score": 7.98652320261731 }, { "content": " }\n\n\n\n pub fn poll_magnet_sensor_setup(&mut self) {\n\n self.magnet_sensor.poll_setup(&mut self.i2c);\n\n }\n\n\n\n pub fn delay(&mut self, time: Milliseconds) {\n\n self.delay.delay_ms(time.0);\n\n }\n\n\n\n pub fn delay_us(&mut self, time: Microseconds) {\n\n self.delay.delay_us(time.0);\n\n }\n\n\n\n pub fn i2c_read_some(\n\n &mut self,\n\n address: u8,\n\n from: u8,\n\n count: usize,\n\n buffer: &mut [u8],\n", "file_path": "main/src/devices/mod.rs", "rank": 66, "score": 7.895562447494212 }, { "content": " let o: Microseconds = (*other).into();\n\n self.0 == o.0\n\n }\n\n}\n\n\n\nimpl<T> PartialOrd<T> for Microseconds\n\nwhere\n\n T: Into<Microseconds> + Copy,\n\n{\n\n fn partial_cmp(&self, other: &T) -> Option<core::cmp::Ordering> {\n\n let o: Microseconds = (*other).into();\n\n self.0.partial_cmp(&o.0)\n\n }\n\n}\n\n\n\nimpl<T> PartialEq<T> for Nanoseconds\n\nwhere\n\n T: Into<Nanoseconds> + Copy,\n\n{\n\n fn eq(&self, other: &T) -> bool {\n", "file_path": "utils/src/time.rs", "rank": 67, "score": 7.894931989474012 }, { "content": "//! Time units\n\n\n\nuse atsamd_hal::time as ats_time;\n\nuse core::ops::{Add, AddAssign, Sub};\n\n\n\n// Frequency based\n\n\n\n/// Bits per second\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct Bps(pub u32);\n\n\n\n/// Hertz\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct Hertz(pub u32);\n\n\n\n/// KiloHertz\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug)]\n\npub struct KiloHertz(pub u32);\n\n\n\n/// MegaHertz\n", "file_path": "utils/src/time.rs", "rank": 68, "score": 7.838927530470698 }, { "content": " let l = self.timer_1_buffer + v;\n\n self.timer_1_buffer = 0;\n\n l.us()\n\n }\n\n }\n\n #[inline]\n\n pub fn peek_delta_us_0(&mut self) -> Microseconds {\n\n if self.rtc_ovl {\n\n 1_000_000.us()\n\n } else {\n\n (self.timer_0_buffer + self.rtc.count32()).us()\n\n }\n\n }\n\n #[inline]\n\n pub fn peek_delta_us_1(&mut self) -> Microseconds {\n\n if self.rtc_ovl {\n\n 1_000_000.us()\n\n } else {\n\n (self.timer_1_buffer + self.rtc.count32()).us()\n\n }\n", "file_path": "main/src/devices/mod.rs", "rank": 69, "score": 7.503739670263243 }, { "content": "\n\nimpl From<MegaHertz> for Nanoseconds {\n\n fn from(item: MegaHertz) -> Self {\n\n Nanoseconds(1_000u32 / item.0)\n\n }\n\n}\n\n\n\nimpl<T> Add<T> for Seconds\n\nwhere\n\n T: Into<Seconds>,\n\n{\n\n type Output = Seconds;\n\n fn add(self, item: T) -> Self::Output {\n\n Seconds(self.0 + item.into().0)\n\n }\n\n}\n\nimpl<T> Add<T> for Milliseconds\n\nwhere\n\n T: Into<Milliseconds>,\n\n{\n", "file_path": "utils/src/time.rs", "rank": 70, "score": 7.501112780658661 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::time::*;\n\n\n\n #[test]\n\n fn convert_us_to_hz() {\n\n let as_us: Microseconds = 3.hz().into();\n\n assert_eq!(as_us.0, 333_333_u32);\n\n }\n\n\n\n #[test]\n\n fn convert_ms_to_us() {\n\n let as_us: Microseconds = 3.ms().into();\n\n assert_eq!(as_us.0, 3_000_u32);\n\n }\n\n\n\n #[test]\n", "file_path": "utils/src/time.rs", "rank": 71, "score": 7.4698919635397925 }, { "content": " },\n\n prelude::*,\n\n target_device::NVIC,\n\n target_device::{self, eic::RegisterBlock, CorePeripherals, Peripherals},\n\n};\n\nuse xiao_m0::{\n\n hal,\n\n pac::{self, interrupt},\n\n};\n\n\n\nstatic EIC_SETUP: AtomicBool = AtomicBool::new(false);\n\nstatic EIC: Mutex<RefCell<Option<EIC>>> = Mutex::new(RefCell::new(None));\n\npub struct ExtIntPin<I: PinId> {\n\n _p: PhantomData<I>,\n\n ext_id: u8,\n\n state: bool,\n\n cb: fn(bool) -> (),\n\n}\n", "file_path": "main/src/devices/ext_int_pin.rs", "rank": 72, "score": 7.4118787552046435 }, { "content": " &cs,\n\n &mut clocks,\n\n &mut core.NVIC,\n\n &mut peripherals.PM,\n\n peripherals.EIC,\n\n );\n\n let enable = ExtIntPin::<PA06>::enable(pins.a10, super::enabled_changed);\n\n let step = ExtIntPin::<PA05>::enable(pins.a9, super::step_changed);\n\n let dir = ExtIntPin::<PB09>::enable(pins.a7, super::dir_changed);\n\n\n\n let gclk3 = clocks\n\n .configure_gclk_divider_and_source(\n\n ClockGenId::GCLK3,\n\n 1,\n\n pac::gclk::genctrl::SRC_A::OSC8M,\n\n false,\n\n )\n\n .unwrap();\n\n let rtc_clock_gen = &clocks.rtc(&gclk3).unwrap();\n\n let mut rtc = Rtc::count32_mode(peripherals.RTC, rtc_clock_gen.freq(), &mut peripherals.PM);\n", "file_path": "main/src/devices/mod.rs", "rank": 73, "score": 7.376697347429328 }, { "content": " let eic = unsafe { &*pac::EIC::ptr() };\n\n if self.is_triggered(eic) {\n\n self.execute();\n\n cortex_m::interrupt::free(|_| {\n\n self.clear_flag(eic)\n\n });\n\n }\n\n }\n\n\n\n pub fn execute(&mut self) {\n\n self.state = get_pin_state($is_in0, self.ext_id);\n\n (self.cb)(self.state);\n\n }\n\n\n\n fn is_triggered(&self, eic: &RegisterBlock) -> bool {\n\n eic.intflag.read().[<extint $num>]().bit_is_set()\n\n }\n\n fn clear_flag(&self, eic: &RegisterBlock) {\n\n eic.intflag.modify(|_, w| w.[<extint $num>]().set_bit());\n\n }\n", "file_path": "main/src/devices/ext_int_pin.rs", "rank": 74, "score": 7.348909883646488 }, { "content": " // HACK jump out to the beginning of the loop is not really nice here\n\n if open_delay > 1200.us() {\n\n continue;\n\n }\n\n // @WARNING - Program waits here\n\n hardware.delay_us(open_delay);\n\n }\n\n hardware.execute_stepper(req);\n\n\n\n // set last speed to calculated v_max value to avoid inaccuracy in calculation\n\n hardware.get_delta_us_0();\n\n // HACK us calculated delta T to pretend that the result is accurate\n\n last_dt = next_dt_min;\n\n } else {\n\n if hardware.execute_stepper(req) {\n\n last_dt = hardware.get_delta_us_0();\n\n } else {\n\n last_dt = 100.ms().into();\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "main/src/main.rs", "rank": 75, "score": 7.256487214752454 }, { "content": " let o: Milliseconds = (*other).into();\n\n self.0 == o.0\n\n }\n\n}\n\n\n\nimpl<T> PartialOrd<T> for Milliseconds\n\nwhere\n\n T: Into<Milliseconds> + Copy,\n\n{\n\n fn partial_cmp(&self, other: &T) -> Option<core::cmp::Ordering> {\n\n let o: Milliseconds = (*other).into();\n\n self.0.partial_cmp(&o.0)\n\n }\n\n}\n\n\n\nimpl<T> PartialEq<T> for Microseconds\n\nwhere\n\n T: Into<Microseconds> + Copy,\n\n{\n\n fn eq(&self, other: &T) -> bool {\n", "file_path": "utils/src/time.rs", "rank": 76, "score": 7.249136990862376 }, { "content": " fn us(self) -> Microseconds;\n\n\n\n /// Wrap in `NanoSeconds`\n\n fn ns(self) -> Nanoseconds;\n\n}\n\n\n\nimpl U32Ext for u32 {\n\n // Frequency based\n\n\n\n fn bps(self) -> Bps {\n\n Bps(self)\n\n }\n\n\n\n fn hz(self) -> Hertz {\n\n Hertz(self)\n\n }\n\n\n\n fn khz(self) -> KiloHertz {\n\n KiloHertz(self)\n\n }\n", "file_path": "utils/src/time.rs", "rank": 77, "score": 7.17631952117008 }, { "content": "\n\n count.ctrla.modify(|_, w| {\n\n match divider {\n\n 1 => w.prescaler().div1(),\n\n 2 => w.prescaler().div2(),\n\n 4 => w.prescaler().div4(),\n\n 8 => w.prescaler().div8(),\n\n 16 => w.prescaler().div16(),\n\n 64 => w.prescaler().div64(),\n\n 256 => w.prescaler().div256(),\n\n 1024 => w.prescaler().div1024(),\n\n _ => unreachable!(),\n\n };\n\n // Enable Match Frequency Waveform generation\n\n w.enable().set_bit();\n\n w.runstdby().set_bit()\n\n });\n\n }\n\n\n\n fn wait(&mut self) -> nb::Result<(), void::Void> {\n", "file_path": "main/src/devices/tc32.rs", "rank": 78, "score": 6.878919275331013 }, { "content": "}\n\n\n\nimpl TimerCounter32<$TC>\n\n{\n\n pub fn get_count(&self) -> &COUNT32 {\n\n self.tc.count_32()\n\n }\n\n\n\n /// Configure this timer counter instance.\n\n /// The clock is obtained from the `GenericClockController` instance\n\n /// and its frequency impacts the resolution and maximum range of\n\n /// the timeout values that can be passed to the `start` method.\n\n /// Note that some hardware timer instances share the same clock\n\n /// generator instance and thus will be clocked at the same rate.\n\n pub fn $pm(clock: &clock::$clock, tc: $TC, pm: &mut PM, periodic: bool) -> Self {\n\n // this is safe because we're constrained to just the tc3 bit\n\n pm.apbcmask.modify(|_, w| w.$pm().set_bit());\n\n {\n\n let count = tc.count_32();\n\n\n", "file_path": "main/src/devices/tc32.rs", "rank": 79, "score": 6.831454809804731 }, { "content": "use core::marker::PhantomData;\n\nuse core::sync::atomic::AtomicBool;\n\nuse core::{cell::RefCell, sync::atomic::Ordering};\n\nuse core::borrow::BorrowMut;\n\n\n\nuse cortex_m::interrupt::{CriticalSection, Mutex};\n\nuse embedded_hal::digital::v2::PinState;\n\nuse hal::clock::GClock;\n\nuse hal::{\n\n clock::{ClockGenId, GenericClockController},\n\n eic::{\n\n pin::{self as extInt, ExternalInterrupt, Sense},\n\n EIC,\n\n },\n\n gpio::{\n\n v2::{\n\n self as pin_v2, AnyPin, DynPinId, Pin, PinId, PullDownInterrupt, PullUpInterrupt,\n\n PushPullOutput, PA01,\n\n },\n\n Output, Pa5, PinMode,\n", "file_path": "main/src/devices/ext_int_pin.rs", "rank": 80, "score": 6.769850862604255 }, { "content": " #[allow(dead_code)]\n\n pub fn poll(&mut self) {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut serial = self.serial.borrow(cs).borrow_mut();\n\n let mut classes: [&mut dyn UsbClass<UsbBus>; 1] = [&mut *serial];\n\n self.bus.poll(&mut classes);\n\n })\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn read_poll(&mut self) -> core::result::Result<(usize, [u8; 100]), ()> {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut buf = [0u8; 100];\n\n let mut serial = self.serial.borrow(cs).borrow_mut();\n\n match serial.read(&mut buf) {\n\n Ok(count) if count > 0 => Ok((count, buf)),\n\n _ => Err(()),\n\n }\n\n })\n\n }\n", "file_path": "main/src/devices/usb_serial.rs", "rank": 81, "score": 6.693704482835594 }, { "content": "\n\n self.read_phase = ReadPhase::Read2;\n\n Ok(false)\n\n }\n\n ReadPhase::Read2 => {\n\n let v = i2c.i2c_read_one(0x36)? as u16;\n\n\n\n self.raw_angle = ((self.read_phase_buf << 8) + v) & 0x0FFF;\n\n self.read_phase = ReadPhase::Query;\n\n Ok(true)\n\n }\n\n }\n\n }\n\n\n\n pub fn query(&mut self, i2c: &mut I2c) -> Result<(), I2CError> {\n\n i2c.i2c_query(0x36, 0x0Cu8)\n\n }\n\n\n\n pub fn read(&mut self, i2c: &mut I2c) -> Result<(), I2CError> {\n\n let mut buf = [0u8; 4];\n", "file_path": "main/src/devices/magnet_sensor.rs", "rank": 82, "score": 6.413856712361733 }, { "content": " peripherals.GCLK,\n\n &mut peripherals.PM,\n\n &mut peripherals.SYSCTRL,\n\n &mut peripherals.NVMCTRL,\n\n );\n\n\n\n let mut pins = xiao_m0::Pins::new(peripherals.PORT);\n\n let mut led0 = Led::init(pins.led0.into_push_pull_output());\n\n let mut led1 = Led::init(pins.led1.into_push_pull_output());\n\n let mut led2 = Led::init(pins.led2.into_push_pull_output());\n\n\n\n let mut delay = Delay::new(core.SYST, &mut clocks);\n\n\n\n #[cfg(feature = \"serial\")]\n\n let serial = UsbSerial::init(\n\n &mut clocks,\n\n peripherals.USB,\n\n &mut peripherals.PM,\n\n pins.usb_dm,\n\n pins.usb_dp,\n", "file_path": "main/src/devices/mod.rs", "rank": 83, "score": 6.156796937865577 }, { "content": "use core::cell::RefCell;\n\n\n\nuse atsamd_hal::{clock::GenericClockController, target_device::NVIC, usb::UsbBus};\n\nuse cortex_m::interrupt::{self, Mutex};\n\nuse usb_device::{\n\n class::UsbClass,\n\n class_prelude::UsbBusAllocator,\n\n device::{UsbDevice, UsbDeviceBuilder, UsbVidPid},\n\n};\n\nuse usbd_serial::{SerialPort, USB_CLASS_CDC};\n\nuse xiao_m0::{pac, UsbDm, UsbDp};\n\n\n\nstatic mut BUS_ALLOCATOR: Option<UsbBusAllocator<UsbBus>> = None;\n\n\n\npub struct UsbSerial {\n\n bus: UsbDevice<'static, UsbBus>,\n\n serial: Mutex<RefCell<SerialPort<'static, UsbBus>>>,\n\n}\n\n\n\nimpl UsbSerial {\n", "file_path": "main/src/devices/usb_serial.rs", "rank": 84, "score": 6.123999854433503 }, { "content": " Nanoseconds(self)\n\n }\n\n}\n\n\n\n// Frequency based\n\n\n\nimpl From<KiloHertz> for Hertz {\n\n fn from(item: KiloHertz) -> Self {\n\n Hertz(item.0 * 1_000)\n\n }\n\n}\n\n\n\nimpl From<MegaHertz> for Hertz {\n\n fn from(item: MegaHertz) -> Self {\n\n Hertz(item.0 * 1_000_000)\n\n }\n\n}\n\n\n\nimpl From<MegaHertz> for KiloHertz {\n\n fn from(item: MegaHertz) -> Self {\n", "file_path": "utils/src/time.rs", "rank": 85, "score": 6.101777348239937 }, { "content": "\n\n self.status = (buf[0] & 0b111000) >> 3;\n\n self.detected = (self.status & 0b100) != 0;\n\n self.low = (self.status & 0b10) != 0;\n\n self.heigh = (self.status & 0b1) != 0;\n\n self.raw_angle = (((buf[1] as u16) << 8) + (buf[2] as u16)) & 0x0FFF;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn stepwise_read(&mut self, i2c: &mut I2c) -> Result<bool, I2CError> {\n\n match self.read_phase {\n\n ReadPhase::Query => {\n\n i2c.i2c_query(0x36, 0x0Cu8)?;\n\n\n\n self.read_phase = ReadPhase::Read1;\n\n Ok(false)\n\n }\n\n ReadPhase::Read1 => {\n\n self.read_phase_buf = i2c.i2c_read_one(0x36)? as u16;\n", "file_path": "main/src/devices/magnet_sensor.rs", "rank": 86, "score": 6.005472885559677 }, { "content": " KiloHertz(item.0 * 1_000)\n\n }\n\n}\n\n\n\nimpl From<Hertz> for KiloHertz {\n\n fn from(item: Hertz) -> Self {\n\n KiloHertz(item.0 / 1_000)\n\n }\n\n}\n\n\n\nimpl From<Hertz> for MegaHertz {\n\n fn from(item: Hertz) -> Self {\n\n MegaHertz(item.0 / 1_000_000)\n\n }\n\n}\n\n\n\nimpl From<KiloHertz> for MegaHertz {\n\n fn from(item: KiloHertz) -> Self {\n\n MegaHertz(item.0 / 1_000)\n\n }\n", "file_path": "utils/src/time.rs", "rank": 87, "score": 5.972606248934025 }, { "content": "impl From<Bps> for ats_time::Bps {\n\n fn from(item: Bps) -> Self {\n\n ats_time::Bps(item.0)\n\n }\n\n}\n\nimpl From<Hertz> for ats_time::Hertz {\n\n fn from(item: Hertz) -> Self {\n\n ats_time::Hertz(item.0)\n\n }\n\n}\n\n\n\nimpl From<KiloHertz> for ats_time::KiloHertz {\n\n fn from(item: KiloHertz) -> Self {\n\n ats_time::KiloHertz(item.0)\n\n }\n\n}\n\n\n\nimpl From<MegaHertz> for ats_time::MegaHertz {\n\n fn from(item: MegaHertz) -> Self {\n\n ats_time::MegaHertz(item.0)\n", "file_path": "utils/src/time.rs", "rank": 88, "score": 5.785832912004296 }, { "content": " r\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn get_delta_us_0(&mut self) -> Microseconds {\n\n if self.rtc_ovl {\n\n self.rtc_ovl = false;\n\n // hmmmmmm\n\n self.timer_1_buffer = 1_000_000;\n\n self.timer_0_buffer = 0;\n\n self.rtc.set_count32(0);\n\n 1_000_000.us()\n\n } else {\n\n // no swap possible\n\n let v = self.rtc.count32();\n\n self.rtc.set_count32(0);\n\n\n\n self.timer_1_buffer += v;\n\n let l = self.timer_0_buffer + v;\n", "file_path": "main/src/devices/mod.rs", "rank": 89, "score": 5.542577236515639 }, { "content": " self.timer_0_buffer = 0;\n\n l.us()\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn get_delta_us_1(&mut self) -> Microseconds {\n\n if self.rtc_ovl {\n\n self.rtc_ovl = false;\n\n // hmmmmmm\n\n self.timer_0_buffer = 1_000_000;\n\n self.timer_1_buffer = 0;\n\n self.rtc.set_count32(0);\n\n 1_000_000.us()\n\n } else {\n\n // no swap possible\n\n let v = self.rtc.count32();\n\n self.rtc.set_count32(0);\n\n\n\n self.timer_0_buffer += v;\n", "file_path": "main/src/devices/mod.rs", "rank": 90, "score": 5.468714402710116 }, { "content": "\n\n #[allow(dead_code)]\n\n pub fn serial_write(&mut self, bytes: &[u8]) {\n\n self.serial_write_len(&bytes, bytes.len())\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn serial_write_num(&mut self, num: usize) {\n\n let (len, bytes) = utils::num_to_string(num);\n\n self.serial_write_len(&bytes, len)\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn serial_write_len(&mut self, bytes: &[u8], len: usize) {\n\n cortex_m::interrupt::free(|cs| {\n\n let mut serial = self.serial.borrow(cs).borrow_mut();\n\n serial.write(&bytes[0..len]);\n\n });\n\n }\n\n}\n", "file_path": "main/src/devices/usb_serial.rs", "rank": 91, "score": 5.2586791945313935 }, { "content": " led2,\n\n\n\n delay,\n\n\n\n enable,\n\n step,\n\n dir,\n\n\n\n rtc,\n\n rtc_ovl: false,\n\n\n\n timer_0_buffer: 0,\n\n timer_1_buffer: 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "main/src/devices/mod.rs", "rank": 92, "score": 5.238995455431113 }, { "content": " self.led2.toggle();\n\n }\n\n }\n\n }\n\n\n\n pub fn serial_write(&mut self, bytes: &[u8]) {\n\n #[cfg(feature = \"serial\")]\n\n self.serial_write_len(&bytes, bytes.len())\n\n }\n\n\n\n pub fn serial_write_num(&mut self, num: usize) {\n\n #[cfg(feature = \"serial\")]\n\n {\n\n let (len, bytes) = num_to_string(num);\n\n self.serial_write_len(&bytes, len)\n\n }\n\n }\n\n\n\n pub fn serial_write_len(&mut self, bytes: &[u8], len: usize) {\n\n #[cfg(feature = \"serial\")]\n", "file_path": "main/src/devices/mod.rs", "rank": 93, "score": 5.191705631692926 }, { "content": "// TC5 + TC6 can be paired to make a 32-bit counter\n\n\n\n/// A generic hardware timer counter.\n\n/// The counters are exposed in 16-bit mode only.\n\n/// The hardware allows configuring the 8-bit mode\n\n/// and pairing up some instances to run in 32-bit\n\n/// mode, but that functionality is not currently\n\n/// exposed by this hal implementation.\n\n/// TimerCounter implements both the `Periodic` and\n\n/// the `CountDown` embedded_hal timer traits.\n\n/// Before a hardware timer can be used, it must first\n\n/// have a clock configured.\n\npub struct TimerCounter32<TC: Count32> {\n\n freq: Hertz,\n\n tc: TC,\n\n periodic: bool,\n\n}\n\n\n\n/// This is a helper trait to make it easier to make most of the\n\n/// TimerCounter impl generic. It doesn't make too much sense to\n\n/// to try to implement this trait outside of this module.\n", "file_path": "main/src/devices/tc32.rs", "rank": 94, "score": 5.039667537540431 }, { "content": " address: u8,\n\n from: u8,\n\n count: usize,\n\n buffer: &mut [u8],\n\n ) -> Result<(), I2CError> {\n\n interrupt::free(|_| self.main.write(address, &[from]))?;\n\n for i in 0..count {\n\n let mut res = [0u8];\n\n interrupt::free(|_| self.main.read(address, &mut res))?;\n\n buffer[i] = res[0];\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn i2c_query(&mut self, address: u8, from: u8) -> Result<(), I2CError> {\n\n interrupt::free(|_| self.main.write(address, &[from]))?;\n\n Ok(())\n\n }\n\n\n\n pub fn i2c_read(\n", "file_path": "main/src/devices/i2c.rs", "rank": 95, "score": 4.988946100959458 }, { "content": " let ns: Nanoseconds = 123.ns() + 1.s();\n\n assert_eq!(ns.0, 1_000_000_123_u32);\n\n }\n\n\n\n #[test]\n\n fn add_sec_to_micro() {\n\n let us: Microseconds = 123.us() + 1.s();\n\n assert_eq!(us.0, 1_000_123_u32);\n\n }\n\n\n\n #[test]\n\n fn add_sec_to_milli() {\n\n let ms: Milliseconds = 123.ms() + 1.s();\n\n assert_eq!(ms.0, 1_123_u32);\n\n }\n\n\n\n #[test]\n\n fn add_milli_to_sec() {\n\n let ms: Seconds = 1.s() + 123.ms();\n\n assert_eq!(ms.0, 1u32);\n", "file_path": "utils/src/time.rs", "rank": 96, "score": 4.935485831905721 }, { "content": " &mut self,\n\n address: u8,\n\n count: usize,\n\n buffer: &mut [u8],\n\n ) -> Result<(), I2CError> {\n\n for i in 0..count {\n\n let mut res = [0u8];\n\n interrupt::free(|_| self.main.read(address, &mut res))?;\n\n buffer[i] = res[0];\n\n }\n\n Ok(())\n\n }\n\n pub fn i2c_read_one(&mut self, address: u8) -> Result<u8, I2CError> {\n\n let mut res = [0u8];\n\n interrupt::free(|_| self.main.read(address, &mut res))?;\n\n Ok(res[0])\n\n }\n\n}\n", "file_path": "main/src/devices/i2c.rs", "rank": 97, "score": 4.7711619458941 }, { "content": " }\n\n\n\n // Test Sub\n\n #[test]\n\n fn sub_sec_to_nano() {\n\n let ns: Nanoseconds = 1_000_123.ns() - 1.ms();\n\n assert_eq!(ns.0, 123_u32);\n\n }\n\n\n\n #[test]\n\n fn sub_sec_to_micro() {\n\n let us: Microseconds = 1_000_123.us() - 1.s();\n\n assert_eq!(us.0, 123_u32);\n\n }\n\n\n\n #[test]\n\n fn sub_sec_to_milli() {\n\n let ms: Milliseconds = 1_000.ms() - 2_345.us();\n\n assert_eq!(ms.0, 998_u32);\n\n }\n\n\n\n #[test]\n\n fn sub_milli_to_sec() {\n\n let s: Seconds = 10.s() - 1_230.ms();\n\n assert_eq!(s.0, 9u32);\n\n }\n\n}\n", "file_path": "utils/src/time.rs", "rank": 98, "score": 4.714101100296141 }, { "content": " let o: Seconds = (*other).into();\n\n self.0 == o.0\n\n }\n\n}\n\n\n\nimpl<T> PartialOrd<T> for Seconds\n\nwhere\n\n T: Into<Seconds> + Copy,\n\n{\n\n fn partial_cmp(&self, other: &T) -> Option<core::cmp::Ordering> {\n\n let o: Seconds = (*other).into();\n\n self.0.partial_cmp(&o.0)\n\n }\n\n}\n\n\n\nimpl<T> PartialEq<T> for Milliseconds\n\nwhere\n\n T: Into<Milliseconds> + Copy,\n\n{\n\n fn eq(&self, other: &T) -> bool {\n", "file_path": "utils/src/time.rs", "rank": 99, "score": 4.600197361914267 } ]
Rust
fruity_core/fruity_ecs/src/entity/entity_query/serialized/mod.rs
DoYouRockBaby/fruity_game_engine
299a8fe641efb142a551640f6d1aa4868e1ad670
use crate::entity::archetype::Archetype; use crate::entity::archetype::ArchetypeArcRwLock; use crate::entity::entity_query::serialized::params::With; use crate::entity::entity_query::serialized::params::WithEnabled; use crate::entity::entity_query::serialized::params::WithEntity; use crate::entity::entity_query::serialized::params::WithId; use crate::entity::entity_query::serialized::params::WithName; use crate::entity::entity_query::serialized::params::WithOptional; use crate::entity::entity_reference::EntityReference; use fruity_any::*; use fruity_core::convert::FruityInto; use fruity_core::introspect::FieldInfo; use fruity_core::introspect::IntrospectObject; use fruity_core::introspect::MethodCaller; use fruity_core::introspect::MethodInfo; use fruity_core::serialize::serialized::Callback; use fruity_core::serialize::serialized::SerializableObject; use fruity_core::serialize::serialized::Serialized; use fruity_core::utils::introspect::cast_introspect_mut; use fruity_core::utils::introspect::ArgumentCaster; use fruity_core::RwLock; use itertools::Itertools; use std::fmt::Debug; use std::sync::Arc; pub(crate) mod params; pub trait SerializedQueryParam: FruityAny { fn duplicate(&self) -> Box<dyn SerializedQueryParam>; fn filter_archetype(&self, archetype: &Archetype) -> bool; fn get_entity_components(&self, entity_reference: EntityReference) -> Vec<Serialized>; } #[derive(FruityAny)] pub(crate) struct SerializedQuery { pub archetypes: Arc<RwLock<Vec<ArchetypeArcRwLock>>>, pub params: Vec<Box<dyn SerializedQueryParam>>, } impl Clone for SerializedQuery { fn clone(&self) -> Self { Self { archetypes: self.archetypes.clone(), params: self .params .iter() .map(|param| param.duplicate()) .collect::<Vec<_>>(), } } } impl Debug for SerializedQuery { fn fmt( &self, _formatter: &mut std::fmt::Formatter<'_>, ) -> std::result::Result<(), std::fmt::Error> { Ok(()) } } impl SerializedQuery { pub fn with_entity(&mut self) { self.params.push(Box::new(WithEntity {})); } pub fn with_id(&mut self) { self.params.push(Box::new(WithId {})); } pub fn with_name(&mut self) { self.params.push(Box::new(WithName {})); } pub fn with_enabled(&mut self) { self.params.push(Box::new(WithEnabled {})); } pub fn with(&mut self, component_identifier: &str) { self.params.push(Box::new(With { identifier: component_identifier.to_string(), })); } pub fn with_optional(&mut self, component_identifier: &str) { self.params.push(Box::new(WithOptional { identifier: component_identifier.to_string(), })); } pub fn for_each(&self, callback: impl Fn(&[Serialized]) + Send + Sync) { let archetypes = self.archetypes.read(); let mut archetype_iter: Box<dyn Iterator<Item = &ArchetypeArcRwLock>> = Box::new(archetypes.iter()); for param in self.params.iter() { archetype_iter = Box::new( archetype_iter.filter(|archetype| param.filter_archetype(&archetype.read())), ); } let entities = archetype_iter .map(|archetype| archetype.iter(false)) .flatten() .collect::<Vec<_>>(); entities .into_iter() /*.par_bridge()*/ .for_each(|entity| { let serialized_params = self .params .iter() .map(|param| param.get_entity_components(entity.clone())) .multi_cartesian_product(); serialized_params.for_each(|params| callback(&params)) }); } } impl FruityInto<Serialized> for SerializedQuery { fn fruity_into(self) -> Serialized { Serialized::NativeObject(Box::new(self)) } } impl SerializableObject for SerializedQuery { fn duplicate(&self) -> Box<dyn SerializableObject> { Box::new(self.clone()) } } impl IntrospectObject for SerializedQuery { fn get_class_name(&self) -> String { "Query".to_string() } fn get_method_infos(&self) -> Vec<MethodInfo> { vec![ MethodInfo { name: "with_entity".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_entity(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_id".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_id(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_name".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_name(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_enabled".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_enabled(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with".to_string(), call: MethodCaller::Mut(Arc::new(|this, args| { let this = cast_introspect_mut::<SerializedQuery>(this); let mut caster = ArgumentCaster::new("with", args); let arg1 = caster.cast_next::<String>()?; this.with(&arg1); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_optional".to_string(), call: MethodCaller::Mut(Arc::new(|this, args| { let this = cast_introspect_mut::<SerializedQuery>(this); let mut caster = ArgumentCaster::new("with_optional", args); let arg1 = caster.cast_next::<String>()?; this.with_optional(&arg1); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "for_each".to_string(), call: MethodCaller::Mut(Arc::new(|this, args| { let this = cast_introspect_mut::<SerializedQuery>(this); let mut caster = ArgumentCaster::new("for_each", args); let arg1 = caster.cast_next::<Callback>()?; let callback = arg1.callback; this.for_each(|args| { callback(args.to_vec()).ok(); }); Ok(None) })), }, ] } fn get_field_infos(&self) -> Vec<FieldInfo> { vec![] } }
use crate::entity::archetype::Archetype; use crate::entity::archetype::ArchetypeArcRwLock; use crate::entity::entity_query::serialized::params::With; use crate::entity::entity_query::serialized::params::WithEnabled; use crate::entity::entity_query::serialized::params::WithEntity; use crate::entity::entity_query::serialized::params::WithId; use crate::entity::entity_query::serialized::params::WithName; use crate::entity::entity_query::serialized::params::WithOptional; use crate::entity::entity_reference::EntityReference; use fruity_any::*; use fruity_core::convert::FruityInto; use fruity_core::introspect::FieldInfo; use fruity_core::introspect::IntrospectObject; use fruity_core::introspect::MethodCaller; use fruity_core::introspect::MethodInfo; use fruity_core::serialize::serialized::Callback; use fruity_core::serialize::serialized::SerializableObject; use fruity_core::serialize::serialized::Serialized; use fruity_core::utils::introspect::cast_introspect_mut; use fruity_core::utils::introspect::ArgumentCaster; use fruity_core::RwLock; use itertools::Itertools; use std::fmt::Debug; use std::sync::Arc; pub(crate) mod params; pub trait SerializedQueryParam: FruityAny { fn duplicate(&self) -> Box<dyn SerializedQueryParam>; fn filter_archetype(&self, archetype: &Archetype) -> bool; fn get_entity_components(&self, entity_reference: EntityReference) -> Vec<Serialized>; } #[derive(FruityAny)] pub(crate) struct SerializedQuery { pub archetypes: Arc<RwLock<Vec<ArchetypeArcRwLock>>>, pub params: Vec<Box<dyn SerializedQueryParam>>, } impl Clone for SerializedQuery { fn clone(&self) -> Self { Self { archetypes: self.archetypes.clone(), params: self .params .iter() .map(|param| param.duplicate()) .collect::<Vec<_>>(), } } } impl Debug for SerializedQuery { fn fmt( &self, _formatter: &mut std::fmt::Formatter<'_>, ) -> std::result::Result<(), std::fmt::Error> { Ok(()) } } impl SerializedQuery { pub fn with_entity(&mut self) { self.params.push(Box::new(WithEntity {})); } pub fn with_id(&mut self) { self.params.push(Box::new(WithId {})); } pub fn with_name(&mut self) { self.params.push(Box::new(WithName {})); } pub fn with_enabled(&mut self) { self.params.push(Box::new(WithEnabled {})); } pub fn with(&mut self, component_identifier: &str) { self.params.push(Box::new(With { identifier: component_identifier.to_string(), })); } pub fn with_optional(&mut self, component_identifier: &str) { self.params.push(Box::new(WithOptional { identifier: component_identifier.to_string(), })); } pub fn for_each(&self, callback: impl Fn(&[Serialized]) + Send + Sync) { let archetypes = self.archetypes.read(); let mut archetype_iter: Box<dyn Iterator<Item = &ArchetypeArcRwLock>> = Box::new(archetypes.iter()); for param in self.params.iter() { archetype_iter = Box::new( archetype_iter.filter(|archetype| param.filter_archetype(&archetype.read())), ); } let entities = archetype_iter .map(|archetype| archetype.iter(false)) .flatten() .collect::<Vec<_>>(); entities .into_iter() /*.par_bridge()*/ .for_each(|entity| { let serialized_params = self .params .iter() .map(|param| param.get_entity_components(entity.clone())) .multi_cartesian_product(); serialized_params.for_each(|params| callback(&params)) }); } } impl FruityInto<Serialized> for SerializedQuery { fn fruity_into(self) -> Serialized { Serialized::NativeObject(Box::new(self)) } } impl SerializableObject for SerializedQuery { fn duplicate(&self) -> Box<dyn SerializableObject> { Box::new(self.clone()) } } impl IntrospectObject for SerializedQuery { fn get_class_name(&self) -> String { "Query".to_string() }
fn get_field_infos(&self) -> Vec<FieldInfo> { vec![] } }
fn get_method_infos(&self) -> Vec<MethodInfo> { vec![ MethodInfo { name: "with_entity".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_entity(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_id".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_id(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_name".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_name(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_enabled".to_string(), call: MethodCaller::Mut(Arc::new(|this, _args| { let this = cast_introspect_mut::<SerializedQuery>(this); this.with_enabled(); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with".to_string(), call: MethodCaller::Mut(Arc::new(|this, args| { let this = cast_introspect_mut::<SerializedQuery>(this); let mut caster = ArgumentCaster::new("with", args); let arg1 = caster.cast_next::<String>()?; this.with(&arg1); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "with_optional".to_string(), call: MethodCaller::Mut(Arc::new(|this, args| { let this = cast_introspect_mut::<SerializedQuery>(this); let mut caster = ArgumentCaster::new("with_optional", args); let arg1 = caster.cast_next::<String>()?; this.with_optional(&arg1); Ok(Some(Serialized::NativeObject(this.duplicate()))) })), }, MethodInfo { name: "for_each".to_string(), call: MethodCaller::Mut(Arc::new(|this, args| { let this = cast_introspect_mut::<SerializedQuery>(this); let mut caster = ArgumentCaster::new("for_each", args); let arg1 = caster.cast_next::<Callback>()?; let callback = arg1.callback; this.for_each(|args| { callback(args.to_vec()).ok(); }); Ok(None) })), }, ] }
function_block-full_function
[ { "content": "pub fn use_global<'a, T: Send + Sync + 'static>() -> &'a mut T {\n\n let mut globals = GLOBALS.lock();\n\n let globals = globals.get_mut(&TypeId::of::<T>()).unwrap().deref_mut();\n\n let result = globals.downcast_mut::<T>().unwrap();\n\n\n\n // TODO: Try to find a way to remove that\n\n unsafe { std::mem::transmute::<&mut T, &mut T>(result) }\n\n}\n\n\n", "file_path": "fruity_editor/src/hooks/mod.rs", "rank": 0, "score": 401849.34391077264 }, { "content": "/// A abstraction of a collection over components\n\npub trait ComponentCollection: Sync + Send {\n\n /// Get a single component by index\n\n fn get(&self, index: &usize) -> Option<&dyn Component>;\n\n\n\n /// Add components to the collection\n\n ///\n\n /// # Arguments\n\n /// * `components` - The components that will be added\n\n ///\n\n fn add_many(&mut self, components: Vec<AnyComponent>);\n\n\n\n /// Remove components from the collection\n\n ///\n\n /// # Arguments\n\n /// * `index` - The index of the first component to remove\n\n /// * `count` - The number of components that will be removed\n\n ///\n\n fn remove_many(&mut self, index: usize, count: usize) -> Vec<AnyComponent>;\n\n}\n", "file_path": "fruity_core/fruity_ecs/src/entity/archetype/component_collection.rs", "rank": 1, "score": 367768.2963347585 }, { "content": "/// Provides trait to implement a self duplication for an introspect object that can be stored in serialized\n\npub trait SerializableObject: IntrospectObject {\n\n /// Create a copy of self\n\n fn duplicate(&self) -> Box<dyn SerializableObject>;\n\n}\n\n\n\nimpl<T: IntrospectObject + ?Sized> SerializableObject for Arc<T> {\n\n fn duplicate(&self) -> Box<dyn SerializableObject> {\n\n Box::new(self.clone())\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn SerializableObject> {\n\n fn clone(&self) -> Self {\n\n self.duplicate()\n\n }\n\n}\n", "file_path": "fruity_core/src/serialize/serialized/mod.rs", "rank": 3, "score": 311878.0669125274 }, { "content": "type Callback<T> = dyn FnOnce(&mut T) + Send + Sync + 'static;\n\n\n", "file_path": "fruity_core/src/utils/single_thread_wrapper.rs", "rank": 4, "score": 306227.7654752013 }, { "content": "pub fn declare_global<T: Send + Sync + 'static>(value: T) {\n\n let mut globals = GLOBALS.lock();\n\n globals.insert(TypeId::of::<T>(), Box::new(value));\n\n}\n\n\n", "file_path": "fruity_editor/src/hooks/mod.rs", "rank": 5, "score": 301521.8188506087 }, { "content": "/// Trait to implement static introspection to an object\n\npub trait IntrospectObject: Debug + FruityAny {\n\n /// Return the class type name\n\n fn get_class_name(&self) -> String;\n\n\n\n /// Get a list of fields with many informations\n\n fn get_field_infos(&self) -> Vec<FieldInfo>;\n\n\n\n /// Get a list of fields with many informations\n\n fn get_method_infos(&self) -> Vec<MethodInfo>;\n\n}\n\n\n\nimpl<T: IntrospectObject + ?Sized> IntrospectObject for Box<T> {\n\n fn get_class_name(&self) -> String {\n\n self.as_ref().get_class_name()\n\n }\n\n\n\n fn get_field_infos(&self) -> Vec<FieldInfo> {\n\n self.as_ref()\n\n .get_field_infos()\n\n .into_iter()\n", "file_path": "fruity_core/src/introspect/mod.rs", "rank": 6, "score": 298720.02094647137 }, { "content": "/// The any trait\n\npub trait FruityAny: Any + Send + Sync {\n\n /// Return self as an Any ref\n\n fn as_any_ref(&self) -> &dyn Any;\n\n\n\n /// Return self as an Any mutable ref\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n\n\n /// Return self as an Any box\n\n fn as_any_box(self: Box<Self>) -> Box<dyn Any>;\n\n\n\n /// Return self as an Any arc\n\n fn as_any_arc(self: Arc<Self>) -> Arc<dyn Any + Send + Sync>;\n\n}\n\n\n\nimpl<T: FruityAny + ?Sized> FruityAny for Box<T> {\n\n fn as_any_ref(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any {\n", "file_path": "fruity_core/fruity_any/src/lib.rs", "rank": 7, "score": 297382.65661782905 }, { "content": "#[topo::nested]\n\npub fn use_memo<T: Clone + 'static, U: Clone + Eq + 'static>(\n\n mut data_fn: impl FnMut(U) -> T,\n\n dependency: U,\n\n) -> T {\n\n let value_state = use_state(|| data_fn(dependency.clone()));\n\n let dependency_state = use_state(|| dependency.clone());\n\n\n\n // We update the value if dependency changed\n\n if dependency != dependency_state.get() {\n\n value_state.set(data_fn(dependency.clone()));\n\n dependency_state.set(dependency);\n\n }\n\n\n\n value_state.get()\n\n}\n", "file_path": "fruity_editor/src/hooks/mod.rs", "rank": 8, "score": 287537.06298983213 }, { "content": "/// A trait that should be implement for everything that can be queried from ['EntityService']\n\npub trait QueryParam<'a> {\n\n /// The type of the query callback parameter\n\n type Item: Clone;\n\n\n\n /// A filter over the archetypes\n\n fn filter_archetype(archetype: &Archetype) -> bool;\n\n\n\n /// Does this require a read guard over the reference\n\n fn require_read() -> bool;\n\n\n\n /// Does this require a write guard over the reference\n\n fn require_write() -> bool;\n\n\n\n /// Iter over the queried components into a given entity\n\n fn iter_entity_components(\n\n entity_reference: EntityReference,\n\n entity_guard: &'a RequestedEntityGuard<'a>,\n\n ) -> Box<dyn Iterator<Item = Self::Item> + 'a>;\n\n}\n\n\n", "file_path": "fruity_core/fruity_ecs/src/entity/entity_query/mod.rs", "rank": 9, "score": 279429.3812049128 }, { "content": "/// Extract the file type from a file path\n\n///\n\n/// # Arguments\n\n/// * `file_path` - The file path\n\n///\n\npub fn get_file_type_from_path(file_path: &str) -> Option<String> {\n\n let path = Path::new(file_path);\n\n Some(path.extension()?.to_str()?.to_string())\n\n}\n", "file_path": "fruity_core/src/utils/string.rs", "rank": 10, "score": 278410.2606968208 }, { "content": "#[topo::nested]\n\npub fn do_once<F: FnMut() -> ()>(mut func: F) -> StateAccess<bool> {\n\n let has_done = use_state(|| false);\n\n if !has_done.get() {\n\n func();\n\n has_done.set(true);\n\n }\n\n has_done\n\n}\n", "file_path": "deps/comp_state/src/helpers.rs", "rank": 11, "score": 277444.54589284386 }, { "content": "pub fn format_function_name_from_rust_to_js(name: &str) -> String {\n\n name.to_case(Case::Camel)\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 12, "score": 266197.55455019197 }, { "content": "#[inline]\n\npub fn clone<T>(dst: &mut [T], src: &[T]) -> usize\n\nwhere\n\n T: Clone,\n\n{\n\n let len = min(src.len(), dst.len());\n\n (&mut dst[..len]).clone_from_slice(&src[..len]);\n\n len\n\n}\n\n\n", "file_path": "fruity_core/src/utils/slice.rs", "rank": 13, "score": 257298.73360455746 }, { "content": "pub fn get_origin(scope: &mut v8::HandleScope) -> String {\n\n let context = scope.get_current_context();\n\n let global_object = context.global(scope);\n\n\n\n let origin_key = v8::String::new(scope, \"__origin\").unwrap();\n\n\n\n let origin_value = match global_object.get(scope, origin_key.into()) {\n\n Some(origin_value) => match v8::Local::<v8::String>::try_from(origin_value) {\n\n Ok(origin_value) => origin_value.to_rust_string_lossy(scope),\n\n Err(_) => String::default(),\n\n },\n\n None => String::default(),\n\n };\n\n\n\n origin_value\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 14, "score": 256800.93303555012 }, { "content": "/// A trait that should be implemented by every resources\n\npub trait Resource: IntrospectObject + Debug {}\n\n\n\nimpl<T: Resource + ?Sized> Resource for RwLock<Box<T>> {}\n", "file_path": "fruity_core/src/resource/resource.rs", "rank": 15, "score": 255952.8594243186 }, { "content": "/// A trait that implements a function to serialize an object to a [’Serialized’]\n\npub trait Serialize {\n\n /// A function to serialize an object to a [’Serialized’]\n\n fn serialize(&self) -> Option<Serialized>;\n\n}\n\n\n", "file_path": "fruity_core/src/serialize/mod.rs", "rank": 16, "score": 255855.00819719722 }, { "content": "#[topo::nested]\n\npub fn inspect_entity(entity: &mut EntityReference) -> UIElement {\n\n let inspector_state = use_global::<InspectorState>();\n\n let component_search_text = use_state(|| \"\".to_string());\n\n let display_add_component_popup = use_state(|| false);\n\n\n\n let world_state = use_global::<WorldState>();\n\n let editor_component_service = world_state\n\n .resource_container\n\n .require::<EditorComponentService>();\n\n let editor_component_service = editor_component_service.read();\n\n\n\n let entity_reader = entity.read();\n\n let entity_id = entity_reader.get_entity_id();\n\n let entity_2 = entity.clone();\n\n let entity_3 = entity.clone();\n\n let head = Column {\n\n children: vec![Row {\n\n children: vec![\n\n RowItem {\n\n size: UISize::Units(50.0),\n", "file_path": "fruity_editor/src/inspect/inspect_entity.rs", "rank": 17, "score": 253974.64165210206 }, { "content": "/// An abstraction over a component, should be implemented for every component\n\npub trait Component: IntrospectObject + Debug {\n\n /// Get a collection to store this component in the archetype\n\n fn get_collection(&self) -> Box<dyn ComponentCollection>;\n\n\n\n /// Create a new component that is a clone of self\n\n fn duplicate(&self) -> Box<dyn Component>;\n\n}\n\n\n\nimpl Serialize for &dyn Component {\n\n fn serialize(&self) -> Option<Serialized> {\n\n let native_serialized =\n\n Serialized::NativeObject(Box::new(AnyComponent::from_box(self.duplicate())));\n\n let serialized = native_serialized.serialize_native_objects();\n\n Some(serialized)\n\n }\n\n}\n\n\n\n/// An container for a component without knowing the instancied type\n\n#[derive(FruityAny, Debug)]\n\npub struct AnyComponent {\n", "file_path": "fruity_core/fruity_ecs/src/component/component.rs", "rank": 18, "score": 251385.74116746138 }, { "content": "pub fn set_origin(scope: &mut v8::HandleScope, origin: &str) {\n\n let context = scope.get_current_context();\n\n let global_object = context.global(scope);\n\n\n\n let origin_key = v8::String::new(scope, \"__origin\").unwrap();\n\n let origin_value = v8::String::new(scope, origin).unwrap();\n\n\n\n global_object.set(scope, origin_key.into(), origin_value.into());\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 19, "score": 250453.19588309847 }, { "content": "/// A trait that implements a function to deserialize an object from a [’Serialized’]\n\npub trait Deserialize {\n\n /// The deserialize ouput type\n\n type Output;\n\n\n\n /// A function to deserialize an object from a [’Serialized’]\n\n fn deserialize(\n\n serialized: &Serialized,\n\n object_factory_service: &ObjectFactoryService,\n\n ) -> Option<Self::Output>;\n\n}\n\n\n\nimpl Serialized {\n\n /// This returns an other serialized value, the difference between both is that the output converts\n\n /// the [’Serialized::NativeObject] that into [’Serialized::SerializedObject’]\n\n ///\n\n /// # Arguments\n\n /// * `object_factory` - The object factory that will instantiate the objects\n\n ///\n\n pub fn serialize_native_objects(&self) -> Serialized {\n\n if let Serialized::NativeObject(native_object) = self {\n", "file_path": "fruity_core/src/serialize/mod.rs", "rank": 20, "score": 245686.31317767577 }, { "content": "/// Deserialize a [’Serialized’] from a yaml file\n\n///\n\n/// # Arguments\n\n/// * `reader` - The read io stream\n\n///\n\npub fn deserialize_yaml(reader: &mut dyn Read) -> Option<Serialized> {\n\n let mut buffer = String::new();\n\n if let Err(err) = reader.read_to_string(&mut buffer) {\n\n log::error!(\"{}\", err.to_string());\n\n return None;\n\n }\n\n\n\n let docs = YamlLoader::load_from_str(&buffer).unwrap();\n\n let yaml = &docs[0];\n\n\n\n intern_deserialize_yaml(yaml)\n\n}\n\n\n", "file_path": "fruity_core/src/serialize/yaml.rs", "rank": 21, "score": 242688.9549753744 }, { "content": "/// Get the entity type identifier from a list of components\n\npub fn get_type_identifier_by_any(components: &[AnyComponent]) -> EntityTypeIdentifier {\n\n let identifier = components\n\n .iter()\n\n .map(|component| component.get_class_name())\n\n .collect::<Vec<_>>();\n\n\n\n EntityTypeIdentifier(identifier)\n\n}\n\n\n", "file_path": "fruity_core/fruity_ecs/src/entity/entity.rs", "rank": 22, "score": 231625.69629992772 }, { "content": "type SystemCallback = dyn Fn(Arc<ResourceContainer>) + Sync + Send + 'static;\n\n\n\n/// Params for a system\n\n#[derive(Debug, Clone, FruityAny, SerializableObject, IntrospectObject, InstantiableObject)]\n\npub struct SystemParams {\n\n /// The pool index\n\n pub pool_index: usize,\n\n\n\n /// If true, the system is still running while pause\n\n pub ignore_pause: bool,\n\n}\n\n\n\nimpl Default for SystemParams {\n\n fn default() -> Self {\n\n Self {\n\n pool_index: 50,\n\n ignore_pause: false,\n\n }\n\n }\n\n}\n", "file_path": "fruity_core/fruity_ecs/src/system/system_service.rs", "rank": 23, "score": 230706.60053532105 }, { "content": "/// Get the entity type identifier from a list of components\n\npub fn get_type_identifier(components: &[&dyn Component]) -> EntityTypeIdentifier {\n\n let identifier = components\n\n .iter()\n\n .map(|component| component.get_class_name())\n\n .collect::<Vec<_>>();\n\n\n\n EntityTypeIdentifier(identifier)\n\n}\n", "file_path": "fruity_core/fruity_ecs/src/entity/entity.rs", "rank": 24, "score": 227508.15004634805 }, { "content": "pub trait JsValue: Debug {\n\n fn as_v8<'a>(&mut self, scope: &mut v8::HandleScope<'a>) -> v8::Local<'a, v8::Value>;\n\n}\n", "file_path": "fruity_core/fruity_javascript/src/js_value/value.rs", "rank": 25, "score": 221434.5691142852 }, { "content": "#[topo::nested]\n\npub fn draw_element<'a>(elem: UIElement, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let type_id = elem.root.as_ref().type_id();\n\n\n\n if type_id == TypeId::of::<Text>() {\n\n draw_text(*elem.root.downcast::<Text>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<Button>() {\n\n draw_button(*elem.root.downcast::<Button>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<ImageButton>() {\n\n draw_image_button(*elem.root.downcast::<ImageButton>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<Checkbox>() {\n\n draw_checkbox(*elem.root.downcast::<Checkbox>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<FloatInput>() {\n\n draw_float_input(*elem.root.downcast::<FloatInput>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<Input>() {\n\n draw_input(*elem.root.downcast::<Input>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<IntegerInput>() {\n\n draw_integer_input(*elem.root.downcast::<IntegerInput>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<Column>() {\n\n draw_column(*elem.root.downcast::<Column>().unwrap(), ui, ctx)\n\n } else if type_id == TypeId::of::<Row>() {\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/mod.rs", "rank": 26, "score": 218343.13107130385 }, { "content": "/// Cast an any introspect object with mutability\n\n///\n\n/// # Arguments\n\n/// * `any` - The introspect object as an any mutable reference\n\n///\n\npub fn cast_introspect_mut<T: Any>(any: &mut dyn Any) -> &mut T {\n\n any.downcast_mut::<T>().unwrap()\n\n}\n\n\n\n/// A tool that is used to cast serialized arguments, intended to be used into IntrospectMethod implementations\n\npub struct ArgumentCaster<'s> {\n\n method: &'s str,\n\n args_count: usize,\n\n iter: Enumerate<VecIntoIter<Serialized>>,\n\n last_index: usize,\n\n}\n\n\n\nimpl<'s> ArgumentCaster<'s> {\n\n /// Return an ArgumentCaster\n\n pub fn new<'a>(method: &'a str, args: Vec<Serialized>) -> ArgumentCaster<'a> {\n\n ArgumentCaster::<'a> {\n\n method,\n\n args_count: args.len(),\n\n iter: args.into_iter().enumerate(),\n\n last_index: 1,\n", "file_path": "fruity_core/src/utils/introspect.rs", "rank": 27, "score": 212325.05694929513 }, { "content": "pub fn on_selected_image(file_path: &str) {\n\n let world_state = use_global::<WorldState>();\n\n\n\n if let Some(texture) = world_state\n\n .resource_container\n\n .get::<dyn TextureResource>(file_path)\n\n {\n\n let inspector_state = use_global::<InspectorState>();\n\n inspector_state.select(Box::new(texture.clone()));\n\n } else {\n\n if let Err(_) = world_state\n\n .resource_container\n\n .load_resource_file(file_path, \"png\")\n\n {\n\n return;\n\n }\n\n\n\n if let Some(texture) = world_state\n\n .resource_container\n\n .get::<dyn TextureResource>(file_path)\n\n {\n\n let inspector_state = use_global::<InspectorState>();\n\n inspector_state.select(Box::new(texture.clone()));\n\n }\n\n };\n\n}\n", "file_path": "fruity_editor/fruity_editor_graphic/src/file_type/image.rs", "rank": 28, "score": 202292.14631996327 }, { "content": "pub fn on_selected_shader(file_path: &str) {\n\n let world_state = use_global::<WorldState>();\n\n\n\n if let Some(texture) = world_state\n\n .resource_container\n\n .get::<dyn ShaderResource>(file_path)\n\n {\n\n let inspector_state = use_global::<InspectorState>();\n\n inspector_state.select(Box::new(texture.clone()));\n\n } else {\n\n if let Err(_) = world_state\n\n .resource_container\n\n .load_resource_file(file_path, \"wgsl\")\n\n {\n\n return;\n\n }\n\n\n\n if let Some(texture) = world_state\n\n .resource_container\n\n .get::<dyn ShaderResource>(file_path)\n\n {\n\n let inspector_state = use_global::<InspectorState>();\n\n inspector_state.select(Box::new(texture.clone()));\n\n }\n\n };\n\n}\n", "file_path": "fruity_editor/fruity_editor_graphic/src/file_type/shader.rs", "rank": 29, "score": 202292.14631996327 }, { "content": "pub fn on_selected_js(file_path: &str) {\n\n // TODO: Display an error popup if failed\n\n edit::edit_file(file_path).unwrap();\n\n}\n", "file_path": "fruity_editor/fruity_editor_javascript/src/file_type/js.rs", "rank": 30, "score": 202292.14631996327 }, { "content": "pub fn configure_console(runtime: &mut JsRuntime) {\n\n let mut global_object = runtime.global_object();\n\n let scope = &mut runtime.handle_scope();\n\n let mut console_object = JsObject::new(scope);\n\n\n\n console_object.set_func(\n\n scope,\n\n \"log\",\n\n |scope: &mut v8::HandleScope,\n\n args: v8::FunctionCallbackArguments,\n\n mut _retval: v8::ReturnValue| {\n\n print_args(scope, args, |message| log::debug!(\"{}\", message));\n\n },\n\n None,\n\n );\n\n\n\n console_object.set_func(\n\n scope,\n\n \"debug\",\n\n |scope: &mut v8::HandleScope,\n", "file_path": "fruity_core/fruity_javascript/src/bridge/console.rs", "rank": 31, "score": 199953.1954633094 }, { "content": "pub fn read_mut_state_with_topo_id<T: 'static, F: FnOnce(&mut T) -> R, R>(\n\n id: topo::CallId,\n\n func: F,\n\n) -> R {\n\n let mut item = remove_state_with_topo_id::<T>(id)\n\n .expect(\"You are trying to read a type state that doesnt exist in this context!\");\n\n let read = func(&mut item);\n\n set_state_with_topo_id(item, id);\n\n read\n\n}\n\n\n", "file_path": "deps/comp_state/src/state_functions.rs", "rank": 32, "score": 198960.69642267804 }, { "content": "/// Serialize a [’Serialized’] as a yaml file\n\n///\n\n/// # Arguments\n\n/// * `reader` - The read io stream\n\n///\n\npub fn serialize_yaml(\n\n writer: &mut dyn Write,\n\n serialized: &Serialized,\n\n) -> Result<(), std::io::Error> {\n\n let yaml = intern_serialize_yaml(serialized);\n\n\n\n let mut write_buf = String::new();\n\n let mut emitter = YamlEmitter::new(&mut write_buf);\n\n emitter.dump(&yaml).unwrap();\n\n\n\n writer.write_all(write_buf.as_bytes())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "fruity_core/src/serialize/yaml.rs", "rank": 33, "score": 198746.77548357527 }, { "content": "/// Build a Settings by reading a yaml document\n\npub fn read_settings(reader: &mut dyn Read) -> Settings {\n\n let mut buffer = String::new();\n\n if let Err(err) = reader.read_to_string(&mut buffer) {\n\n log::error!(\"{}\", err.to_string());\n\n return Settings::Object(HashMap::new());\n\n }\n\n\n\n let docs = YamlLoader::load_from_str(&buffer).unwrap();\n\n let root = &docs[0];\n\n\n\n if let Some(settings) = build_settings_from_yaml(root) {\n\n settings\n\n } else {\n\n return Settings::Object(HashMap::new());\n\n }\n\n}\n\n\n", "file_path": "fruity_core/src/settings.rs", "rank": 34, "score": 198124.5644503595 }, { "content": "/// Trait to implement static introspection to an object\n\npub trait InstantiableObject {\n\n /// Get a constructor to instantiate an introspect object\n\n fn get_constructor() -> Constructor;\n\n}\n\n\n", "file_path": "fruity_core/src/introspect/mod.rs", "rank": 35, "score": 197526.81023072905 }, { "content": "pub fn serialize_v8<'a>(\n\n scope: &mut v8::HandleScope<'a>,\n\n value: Serialized,\n\n) -> Option<v8::Local<'a, v8::Value>> {\n\n match value {\n\n Serialized::I8(value) => Some(v8::Integer::new(scope, value as i32).into()),\n\n Serialized::I16(value) => Some(v8::Integer::new(scope, value as i32).into()),\n\n Serialized::I32(value) => Some(v8::Integer::new(scope, value).into()),\n\n Serialized::I64(value) => Some(v8::BigInt::new_from_i64(scope, value).into()),\n\n Serialized::ISize(value) => Some(v8::Integer::new(scope, value as i32).into()),\n\n Serialized::U8(value) => Some(v8::Integer::new_from_unsigned(scope, value as u32).into()),\n\n Serialized::U16(value) => Some(v8::Integer::new_from_unsigned(scope, value as u32).into()),\n\n Serialized::U32(value) => Some(v8::Integer::new_from_unsigned(scope, value).into()),\n\n Serialized::U64(value) => Some(v8::BigInt::new_from_u64(scope, value).into()),\n\n Serialized::USize(value) => {\n\n Some(v8::Integer::new_from_unsigned(scope, value as u32).into())\n\n }\n\n Serialized::F32(value) => Some(v8::Number::new(scope, value as f64).into()),\n\n Serialized::F64(value) => Some(v8::Number::new(scope, value).into()),\n\n Serialized::Bool(value) => Some(v8::Boolean::new(scope, value).into()),\n", "file_path": "fruity_core/fruity_javascript/src/serialize/serialize.rs", "rank": 36, "score": 196690.86443569948 }, { "content": "pub fn draw_profiling(ui: &mut egui::Ui, _ctx: &mut DrawContext) {\n\n puffin_egui::profiler_ui(ui);\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/profiling.rs", "rank": 37, "score": 195534.12328024258 }, { "content": "#[topo::nested]\n\npub fn draw_scene(ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n // Initialize local state\n\n let world_state = use_global::<WorldState>();\n\n let center_state = use_state(|| Vector2d::default());\n\n let zoom_state = use_state(|| 4.0 as f32);\n\n\n\n // Get available dimensions\n\n let rect = ui.available_rect_before_wrap();\n\n let width = (ui.available_width() / ui.input().physical_pixel_size()) as u32;\n\n let height = (ui.available_height() / ui.input().physical_pixel_size()) as u32;\n\n let ratio = ui.available_width() / ui.available_height();\n\n\n\n // Update viewport properties\n\n {\n\n let graphic_service = world_state\n\n .resource_container\n\n .require::<dyn GraphicService>();\n\n let graphic_service = graphic_service.read();\n\n\n\n graphic_service.set_viewport_offset(rect.left() as u32 * 2, rect.top() as u32 * 2);\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/scene.rs", "rank": 38, "score": 195534.12328024258 }, { "content": "pub trait UIWidget: Any {\n\n fn elem(self) -> UIElement;\n\n}\n\n\n\npub struct UIElement {\n\n pub root: Box<dyn Any>,\n\n}\n\n\n\nimpl Default for UIElement {\n\n fn default() -> Self {\n\n Empty {}.elem()\n\n }\n\n}\n", "file_path": "fruity_editor/src/ui_element/mod.rs", "rank": 39, "score": 192202.18993995828 }, { "content": "pub fn draw_entity_line(\n\n entity: EntityReference,\n\n all_entities: &Vec<EntityReference>,\n\n entity_service: ResourceReference<EntityService>,\n\n) -> UIElement {\n\n let entity_2 = entity.clone();\n\n let entity_3 = entity.clone();\n\n let entity_reader = entity.read();\n\n let entity_id = entity_reader.get_entity_id();\n\n\n\n let children = all_entities\n\n .iter()\n\n .filter(|entity| {\n\n if let Some(parent) = entity.read().read_single_component::<Parent>() {\n\n if let Some(parent_id) = *parent.parent_id {\n\n parent_id == entity_id\n\n } else {\n\n false\n\n }\n\n } else {\n", "file_path": "fruity_editor/fruity_editor_hierarchy/src/components/entity/entity_list.rs", "rank": 40, "score": 191081.3834575919 }, { "content": "pub trait CloneState<T>\n\nwhere\n\n T: Clone + 'static,\n\n{\n\n fn get(&self) -> T;\n\n\n\n fn soft_get(&self) -> Option<T>;\n\n}\n\n\n\nimpl<T> CloneState<T> for StateAccess<T>\n\nwhere\n\n T: Clone + 'static,\n\n{\n\n /// returns a clone of the stored state panics if not stored.\n\n fn get(&self) -> T {\n\n clone_state_with_topo_id::<T>(self.id).expect(\"state should be present\")\n\n }\n\n\n\n fn soft_get(&self) -> Option<T> {\n\n clone_state_with_topo_id::<T>(self.id)\n\n }\n\n}\n\n\n", "file_path": "deps/comp_state/src/state_access.rs", "rank": 41, "score": 190473.65159956887 }, { "content": "pub fn push_thread_scope_stack(scope: &mut v8::HandleScope) {\n\n THREAD_SCOPE_STACK.with(|scope_stack| {\n\n let scope_stack = unsafe { &mut *scope_stack.get() };\n\n let scope =\n\n unsafe { std::mem::transmute::<&mut v8::HandleScope, &mut v8::HandleScope>(scope) };\n\n scope_stack.push(scope);\n\n });\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/thread_scope_stack.rs", "rank": 42, "score": 189185.5660219224 }, { "content": "#[inline]\n\npub fn copy<T>(dst: &mut [T], src: &[T]) -> usize\n\nwhere\n\n T: Copy,\n\n{\n\n #[cfg(feature = \"nightly\")]\n\n {\n\n dst.copy(src)\n\n }\n\n #[cfg(not(feature = \"nightly\"))]\n\n {\n\n let len = min(src.len(), dst.len());\n\n (&mut dst[..len]).copy_from_slice(&src[..len]);\n\n len\n\n }\n\n}\n\n\n\n/// Clones as many `T` as possible from `src` into `dst`, returning the number of `T` cloned. This\n\n/// function is short form for `dst.clone_from_slice(src)`, but accounts for if their lengths are\n\n/// unequal to avoid panics.\n\n///\n", "file_path": "fruity_core/src/utils/slice.rs", "rank": 43, "score": 188705.75212056283 }, { "content": "/// Clones the state of type T keyed to the given TopoId\n\npub fn clone_state_with_topo_id<T: 'static + Clone>(id: topo::CallId) -> Option<T> {\n\n STORE.with(|store_refcell| {\n\n store_refcell\n\n .borrow_mut()\n\n .get_state_with_topo_id::<T>(id)\n\n .cloned()\n\n })\n\n}\n\n\n", "file_path": "deps/comp_state/src/state_functions.rs", "rank": 44, "score": 188195.8766617318 }, { "content": "pub fn edit_introspect_fields(introspect_object: Box<dyn SerializableObject>) -> UIElement {\n\n let fields_edit = introspect_object\n\n .deref()\n\n .get_field_infos()\n\n .into_iter()\n\n .map(|field_info| {\n\n let field_value = (field_info.getter)(introspect_object.deref().as_any_ref());\n\n let introspect_object = introspect_object.duplicate();\n\n\n\n let name = field_info.name.clone();\n\n field_editor(\n\n &name,\n\n field_value,\n\n Box::new(move |new_value| {\n\n match &field_info.setter {\n\n SetterCaller::Const(call) => {\n\n call(introspect_object.deref().as_any_ref(), new_value)\n\n }\n\n SetterCaller::Mut(call) => {\n\n let mut introspect_object = introspect_object.duplicate();\n", "file_path": "fruity_editor/src/components/fields/mod.rs", "rank": 45, "score": 186720.77888942295 }, { "content": "pub fn draw_empty<'a>(_ui: &mut egui::Ui) {}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/layout.rs", "rank": 46, "score": 186280.49563633977 }, { "content": "#[topo::nested]\n\npub fn draw_button<'a>(elem: Button, ui: &mut egui::Ui, _ctx: &mut DrawContext) {\n\n let response = ui.add_enabled(elem.enabled, egui::Button::new(elem.label.clone()));\n\n\n\n if response.clicked() {\n\n (elem.on_click)()\n\n }\n\n\n\n if elem.secondary_actions.len() > 0 {\n\n if response.secondary_clicked() {\n\n let secondary_action_state = use_global::<SecondaryActionState>();\n\n secondary_action_state.display_secondary_actions(\n\n ui,\n\n response.clone(),\n\n elem.secondary_actions.clone(),\n\n )\n\n }\n\n }\n\n\n\n // Handle drag & drop\n\n if let Some(drag_item) = &elem.drag_item {\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/input.rs", "rank": 47, "score": 186213.61275687913 }, { "content": "pub fn draw_column<'a>(elem: Column, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n ui.with_layout(\n\n Layout::top_down(match elem.align {\n\n UIAlign::Start => Align::Min,\n\n UIAlign::Center => Align::Center,\n\n UIAlign::End => Align::Max,\n\n }),\n\n |ui| {\n\n elem.children.into_iter().for_each(|child| {\n\n draw_element(child, ui, ctx);\n\n });\n\n },\n\n );\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/layout.rs", "rank": 48, "score": 186213.61275687913 }, { "content": "pub fn draw_text<'a>(elem: Text, ui: &mut egui::Ui, _ctx: &mut DrawContext) {\n\n ui.add(egui::Label::new(elem.text));\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/display.rs", "rank": 49, "score": 186213.61275687913 }, { "content": "pub fn draw_scroll<'a>(elem: Scroll, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let scroll_area = match (elem.horizontal, elem.vertical) {\n\n (false, false) => ScrollArea::neither().auto_shrink([false; 2]),\n\n (true, false) => ScrollArea::horizontal().auto_shrink([false; 2]),\n\n (false, true) => ScrollArea::vertical().auto_shrink([false; 2]),\n\n (true, true) => ScrollArea::both().auto_shrink([false; 2]),\n\n };\n\n\n\n scroll_area.show(ui, |ui| draw_element(elem.child, ui, ctx));\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/layout.rs", "rank": 50, "score": 186213.61275687913 }, { "content": "pub fn draw_checkbox<'a>(elem: Checkbox, ui: &mut egui::Ui, _ctx: &mut DrawContext) {\n\n let mut new_value = elem.value;\n\n ui.add(egui::Checkbox::new(&mut new_value, &elem.label));\n\n\n\n if new_value != elem.value {\n\n (elem.on_change)(new_value);\n\n }\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/input.rs", "rank": 51, "score": 186213.61275687913 }, { "content": "#[topo::nested]\n\npub fn draw_popup<'a>(elem: Popup, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let popup_id = ui.make_persistent_id(CallId::current());\n\n\n\n let response =\n\n ui.allocate_response(egui::vec2(ui.available_size().x, 0.0), egui::Sense::click());\n\n egui::popup::popup_below_widget(ui, popup_id, &response, |ui| {\n\n draw_element(elem.content, ui, ctx)\n\n });\n\n ui.memory().open_popup(popup_id);\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/display.rs", "rank": 52, "score": 186213.61275687913 }, { "content": "pub fn draw_row<'a>(elem: Row, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n // Get the base available space informations\n\n let available_width = ui.available_size_before_wrap().x;\n\n let origin_pos = ui.available_rect_before_wrap().left_top();\n\n\n\n // If the size have changed or new child has been added, we the child line break should be changes\n\n let mut allocated_rect = egui::Rect::from_min_size(origin_pos, egui::Vec2::new(0.0, 0.0));\n\n let mut relative_pos = egui::Vec2::new(0.0, 0.0);\n\n let mut current_non_units_width = available_width;\n\n let mut current_line_height = 0.0;\n\n\n\n for child in elem.children.into_iter() {\n\n // Get the elem width\n\n let child_width = match child.size {\n\n UISize::Fill => current_non_units_width,\n\n UISize::FillPortion(portion) => current_non_units_width * portion,\n\n UISize::Units(units) => {\n\n current_non_units_width -= units;\n\n units\n\n }\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/layout.rs", "rank": 53, "score": 186213.61275687913 }, { "content": "#[topo::nested]\n\npub fn draw_input<'a>(elem: Input, ui: &mut egui::Ui, _ctx: &mut DrawContext) {\n\n let input_value = use_state(|| String::default());\n\n\n\n let mut new_value = input_value.get();\n\n let response = ui.add(egui::TextEdit::singleline(&mut new_value).hint_text(&elem.placeholder));\n\n\n\n if response.lost_focus() {\n\n (elem.on_change)(&new_value);\n\n }\n\n\n\n if response.changed() {\n\n (elem.on_edit)(&new_value);\n\n input_value.set(new_value);\n\n }\n\n\n\n if !response.has_focus() {\n\n input_value.set(elem.value);\n\n }\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/input.rs", "rank": 54, "score": 186213.61275687913 }, { "content": "pub fn draw_collapsible<'a>(elem: Collapsible, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let title = elem.title.clone();\n\n let on_click = elem.on_click.clone();\n\n let response = CollapsingHeader::new(title)\n\n .selectable(true)\n\n .show(ui, |ui| draw_element(elem.child, ui, ctx));\n\n\n\n if response.header_response.clicked() {\n\n if let Some(on_click) = on_click {\n\n on_click();\n\n }\n\n }\n\n\n\n if elem.secondary_actions.len() > 0 {\n\n if response.header_response.secondary_clicked() {\n\n let secondary_action_state = use_global::<SecondaryActionState>();\n\n secondary_action_state.display_secondary_actions(\n\n ui,\n\n response.header_response.clone(),\n\n elem.secondary_actions.clone(),\n\n )\n\n }\n\n }\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/layout.rs", "rank": 55, "score": 186213.61275687913 }, { "content": "pub fn field_editor(\n\n name: &str,\n\n value: Serialized,\n\n on_update: Box<dyn Fn(Serialized) + Send + Sync + 'static>,\n\n) -> UIElement {\n\n match value {\n\n Serialized::U8(value) => draw_editor_u8(name, Serialized::U8(value), on_update),\n\n Serialized::U16(value) => draw_editor_u16(name, Serialized::U16(value), on_update),\n\n Serialized::U32(value) => draw_editor_u32(name, Serialized::U32(value), on_update),\n\n Serialized::U64(value) => draw_editor_u64(name, Serialized::U64(value), on_update),\n\n Serialized::USize(value) => draw_editor_usize(name, Serialized::USize(value), on_update),\n\n Serialized::I8(value) => draw_editor_i8(name, Serialized::I8(value), on_update),\n\n Serialized::I16(value) => draw_editor_i16(name, Serialized::I16(value), on_update),\n\n Serialized::I32(value) => draw_editor_i32(name, Serialized::I32(value), on_update),\n\n Serialized::I64(value) => draw_editor_i64(name, Serialized::I64(value), on_update),\n\n Serialized::ISize(value) => draw_editor_isize(name, Serialized::ISize(value), on_update),\n\n Serialized::F32(value) => draw_editor_f32(name, Serialized::F32(value), on_update),\n\n Serialized::F64(value) => draw_editor_f64(name, Serialized::F64(value), on_update),\n\n Serialized::Bool(value) => draw_editor_bool(name, Serialized::Bool(value), on_update),\n\n Serialized::String(value) => draw_editor_string(name, Serialized::String(value), on_update),\n", "file_path": "fruity_editor/src/components/fields/mod.rs", "rank": 56, "score": 185575.86412663214 }, { "content": "pub fn entity_list_component() -> UIElement {\n\n let world_state = use_global::<WorldState>();\n\n\n\n let resource_container = world_state.resource_container.clone();\n\n let entity_service = resource_container.require::<EntityService>();\n\n let entity_service_reader = entity_service.read();\n\n\n\n let all_entities = entity_service_reader\n\n .iter_all_entities()\n\n .collect::<Vec<_>>();\n\n\n\n let root_entities = all_entities\n\n .iter()\n\n .filter(|entity| {\n\n if let Some(parent) = entity.read().read_single_component::<Parent>() {\n\n if let Some(_) = *parent.parent_id {\n\n false\n\n } else {\n\n true\n\n }\n", "file_path": "fruity_editor/fruity_editor_hierarchy/src/components/entity/entity_list.rs", "rank": 57, "score": 185125.0671534439 }, { "content": "pub fn state_exists_for_topo_id<T: 'static>(id: topo::CallId) -> bool {\n\n STORE.with(|store_refcell| store_refcell.borrow().state_exists_with_topo_id::<T>(id))\n\n}\n\n\n", "file_path": "deps/comp_state/src/state_functions.rs", "rank": 58, "score": 184580.73915775417 }, { "content": "pub fn draw_editor_bool(\n\n name: &str,\n\n value: Serialized,\n\n on_update: impl Fn(Serialized) + Send + Sync + 'static,\n\n) -> UIElement {\n\n let value = if let Ok(value) = bool::fruity_try_from(value) {\n\n value\n\n } else {\n\n bool::default()\n\n };\n\n\n\n Checkbox {\n\n label: name.to_string(),\n\n value: value,\n\n on_change: Arc::new(move |value| {\n\n on_update(value.fruity_into());\n\n }),\n\n }\n\n .elem()\n\n}\n\n\n", "file_path": "fruity_editor/src/components/fields/primitive.rs", "rank": 59, "score": 183867.49838070065 }, { "content": "pub fn draw_editor_string(\n\n name: &str,\n\n value: Serialized,\n\n on_update: impl Fn(Serialized) + Send + Sync + 'static,\n\n) -> UIElement {\n\n let value = if let Ok(value) = String::fruity_try_from(value) {\n\n value\n\n } else {\n\n String::default()\n\n };\n\n\n\n Row {\n\n children: vec![\n\n RowItem {\n\n size: UISize::Units(40.0),\n\n child: Text {\n\n text: name.to_string(),\n\n ..Default::default()\n\n }\n\n .elem(),\n", "file_path": "fruity_editor/src/components/fields/primitive.rs", "rank": 60, "score": 183854.235026585 }, { "content": "pub fn draw_float_input<'a>(elem: FloatInput, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let input = Input {\n\n value: elem.value.to_string(),\n\n placeholder: \"\".to_string(),\n\n on_change: Arc::new(move |value: &str| {\n\n if let Ok(value) = value.parse::<f64>() {\n\n (elem.on_change)(value)\n\n }\n\n }),\n\n ..Default::default()\n\n };\n\n\n\n draw_input(input, ui, ctx)\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/input.rs", "rank": 61, "score": 183398.6888243354 }, { "content": "#[topo::nested]\n\npub fn draw_menu_bar<'a>(elem: MenuBar, _ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n egui::TopBottomPanel::top(\"menu_bar\").show(&ctx.platform.context(), |ui| {\n\n menu::bar(ui, |ui| {\n\n elem.children\n\n .into_iter()\n\n .for_each(|child| draw_element(child, ui, ctx));\n\n });\n\n });\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/menu.rs", "rank": 62, "score": 183398.68882433538 }, { "content": "pub fn draw_integer_input<'a>(elem: IntegerInput, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let input = Input {\n\n value: elem.value.to_string(),\n\n placeholder: \"\".to_string(),\n\n on_change: Arc::new(move |value: &str| {\n\n if let Ok(value) = value.parse::<i64>() {\n\n (elem.on_change)(value)\n\n }\n\n }),\n\n ..Default::default()\n\n };\n\n\n\n draw_input(input, ui, ctx)\n\n}\n\n\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/input.rs", "rank": 63, "score": 183398.6888243354 }, { "content": "pub fn draw_pane_grid<'a>(elem: PaneGrid, _ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n // Initialize the pane grid state\n\n let panes = elem.panes.clone();\n\n let left_panes = use_state(|| {\n\n panes\n\n .into_iter()\n\n .filter(|pane| pane.default_side == UIPaneSide::Left)\n\n .collect::<Vec<_>>()\n\n });\n\n\n\n let panes = elem.panes.clone();\n\n let right_panes = use_state(|| {\n\n panes\n\n .into_iter()\n\n .filter(|pane| pane.default_side == UIPaneSide::Right)\n\n .collect::<Vec<_>>()\n\n });\n\n\n\n let panes = elem.panes.clone();\n\n let bottom_panes = use_state(|| {\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/pane.rs", "rank": 64, "score": 183398.68882433538 }, { "content": "#[topo::nested]\n\npub fn draw_menu_section<'a>(elem: MenuSection, ui: &mut egui::Ui, _ctx: &mut DrawContext) {\n\n menu::menu(ui, elem.label, {\n\n let items = elem.items;\n\n |ui| {\n\n items.into_iter().for_each({\n\n |item| {\n\n if ui.button(item.label).clicked() {\n\n (item.on_click)()\n\n }\n\n }\n\n });\n\n }\n\n });\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/menu.rs", "rank": 65, "score": 183398.68882433538 }, { "content": "#[topo::nested]\n\npub fn draw_image_button<'a>(elem: ImageButton, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let egui_texture_id = {\n\n let image = elem.image.read();\n\n let image = image.downcast_ref::<WgpuTextureResource>();\n\n\n\n ctx.egui_rpass.egui_texture_from_wgpu_texture(\n\n ctx.device,\n\n &image.texture,\n\n wgpu::FilterMode::Linear,\n\n )\n\n };\n\n\n\n let response = ui.add(egui::ImageButton::new(\n\n egui_texture_id,\n\n egui::Vec2::new(elem.width, elem.height),\n\n ));\n\n\n\n if response.clicked() {\n\n (elem.on_click)()\n\n }\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/input.rs", "rank": 66, "score": 183398.68882433538 }, { "content": "#[topo::nested]\n\npub fn draw_list_view<'a>(elem: ListView, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let scroll_area = ScrollArea::vertical().auto_shrink([false; 2]);\n\n\n\n let render_item = elem.render_item.clone();\n\n scroll_area.show(ui, |ui| {\n\n ui.vertical(|ui| {\n\n elem.items.into_iter().for_each(|item| {\n\n let item = render_item(item.deref());\n\n\n\n draw_element(item, ui, ctx)\n\n })\n\n });\n\n });\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/list.rs", "rank": 67, "score": 183398.68882433538 }, { "content": "pub fn draw_pane<'a>(panes: Vec<Pane>, ui: &mut egui::Ui, ctx: &mut DrawContext) {\n\n let current_tab = use_state(|| usize::default());\n\n let mut current_tab_value = current_tab.get();\n\n\n\n ui.horizontal(|ui| {\n\n panes.iter().enumerate().for_each(|(index, pane)| {\n\n ui.selectable_value(&mut current_tab_value, index, &pane.title);\n\n });\n\n });\n\n ui.end_row();\n\n current_tab.set(current_tab_value);\n\n\n\n if let Some(current_pane) = panes.get(current_tab.get()) {\n\n draw_element((current_pane.render)(), ui, ctx)\n\n }\n\n}\n", "file_path": "fruity_platform/pc_mac/fruity_egui_editor/src/ui_element/pane.rs", "rank": 68, "score": 182899.821274199 }, { "content": "pub fn configure_constructors(runtime: &mut JsRuntime, resource_container: Arc<ResourceContainer>) {\n\n let mut global_object = runtime.global_object();\n\n let scope = &mut runtime.handle_scope();\n\n\n\n let object_factory_service = resource_container.require::<ObjectFactoryService>();\n\n let object_factory_service_reader = object_factory_service.read();\n\n\n\n object_factory_service_reader.iter().for_each(|(key, ..)| {\n\n let mut data_fields = HashMap::new();\n\n\n\n data_fields.insert(\n\n \"object_factory_service\".to_string(),\n\n Serialized::NativeObject(Box::new(object_factory_service.clone())),\n\n );\n\n\n\n data_fields.insert(\n\n \"object_identifier\".to_string(),\n\n Serialized::String(key.clone()),\n\n );\n\n\n", "file_path": "fruity_core/fruity_javascript/src/bridge/constructors.rs", "rank": 69, "score": 181828.78149698148 }, { "content": "pub fn store_callback(\n\n scope: &mut v8::HandleScope,\n\n v8_value: v8::Local<v8::Function>,\n\n) -> CallbackIdentifier {\n\n let (storage, last_id) = get_callback_storage(scope);\n\n let last_id = last_id.value();\n\n let callback_id = (last_id + 1) as i32;\n\n let v8_callback_id = v8::Integer::new(scope, callback_id);\n\n\n\n storage.set(scope, v8_callback_id.into(), v8_value.into());\n\n\n\n set_callback_storage(scope, storage.into(), v8_callback_id.into());\n\n CallbackIdentifier(callback_id)\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 70, "score": 181521.77987152836 }, { "content": "/// Called by V8 during `JsRuntime::instantiate_module`.\n\n///\n\n/// This function is made to load module files recursively\n\npub fn module_resolve_callback<'s>(\n\n context: v8::Local<'s, v8::Context>,\n\n specifier: v8::Local<'s, v8::String>,\n\n _import_assertions: v8::Local<'s, v8::FixedArray>,\n\n referrer: v8::Local<'s, v8::Module>,\n\n) -> Option<v8::Local<'s, v8::Module>> {\n\n let scope = &mut unsafe { v8::CallbackScope::new(context) };\n\n\n\n // Get included module path\n\n let referrer_directory = get_referrer_directory(scope, referrer);\n\n let included_module_path = get_specifier_filename(scope, specifier, &referrer_directory);\n\n\n\n // Create the module\n\n let module = match included_module_path {\n\n Ok(filepath) => match compile_module(scope, &filepath) {\n\n Ok(module) => Some(module),\n\n Err(_err) => None,\n\n },\n\n Err(_err) => None,\n\n };\n\n\n\n // Return the newly created module\n\n module\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/runtime.rs", "rank": 71, "score": 181375.6895284332 }, { "content": "pub fn pop_thread_scope_stack<'a>() -> Option<&'a mut v8::HandleScope<'a>> {\n\n let scope = THREAD_SCOPE_STACK.with(|scope_stack| {\n\n let scope_stack = unsafe { &mut *scope_stack.get() };\n\n scope_stack.pop()\n\n });\n\n\n\n scope.map(|scope| unsafe {\n\n std::mem::transmute::<&mut v8::HandleScope, &mut v8::HandleScope>(scope)\n\n })\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/thread_scope_stack.rs", "rank": 72, "score": 181079.02310289774 }, { "content": "pub fn top_thread_scope_stack<'a>() -> Option<&'a mut v8::HandleScope<'a>> {\n\n let scope = THREAD_SCOPE_STACK.with(|scope_stack| {\n\n let scope_stack = unsafe { &mut *scope_stack.get() };\n\n scope_stack.last_mut()\n\n });\n\n\n\n scope.map(|scope| unsafe {\n\n std::mem::transmute::<&mut v8::HandleScope, &mut v8::HandleScope>(scope)\n\n })\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/thread_scope_stack.rs", "rank": 73, "score": 181079.02310289774 }, { "content": "pub fn deserialize_v8<'a>(\n\n scope: &mut v8::HandleScope<'a>,\n\n v8_value: v8::Local<v8::Value>,\n\n) -> Option<Serialized> {\n\n if v8_value.is_int32() {\n\n return Some(Serialized::I32(v8_value.int32_value(scope).unwrap()));\n\n }\n\n\n\n if v8_value.is_uint32() {\n\n return Some(Serialized::U32(v8_value.uint32_value(scope).unwrap()));\n\n }\n\n\n\n if v8_value.is_big_int() {\n\n let big_int = v8_value.to_big_int(scope).unwrap();\n\n return Some(Serialized::I64(big_int.i64_value().0));\n\n }\n\n\n\n if v8_value.is_number() {\n\n return Some(Serialized::F64(v8_value.number_value(scope).unwrap()));\n\n }\n", "file_path": "fruity_core/fruity_javascript/src/serialize/deserialize.rs", "rank": 74, "score": 181044.6980099394 }, { "content": "pub fn inspector_component() -> UIElement {\n\n let inspector_state = use_global::<InspectorState>();\n\n inspector_state.inspect()\n\n}\n", "file_path": "fruity_editor/src/components/inspector/mod.rs", "rank": 75, "score": 178448.18501627463 }, { "content": "pub fn get_thumbnail_js(_file_path: &str) -> Option<ResourceReference<dyn TextureResource>> {\n\n let world_state = use_global::<WorldState>();\n\n\n\n world_state\n\n .resource_container\n\n .get::<dyn TextureResource>(\"Editor/Icons/js\")\n\n}\n\n\n", "file_path": "fruity_editor/fruity_editor_javascript/src/file_type/js.rs", "rank": 76, "score": 177679.5517248013 }, { "content": "pub fn get_thumbnail_image(file_path: &str) -> Option<ResourceReference<dyn TextureResource>> {\n\n let world_state = use_global::<WorldState>();\n\n\n\n if let Some(texture) = world_state\n\n .resource_container\n\n .get::<dyn TextureResource>(file_path)\n\n {\n\n Some(texture)\n\n } else {\n\n world_state\n\n .resource_container\n\n .load_resource_file(file_path, \"png\")\n\n .ok()?;\n\n\n\n world_state\n\n .resource_container\n\n .get::<dyn TextureResource>(file_path)\n\n }\n\n}\n\n\n", "file_path": "fruity_editor/fruity_editor_graphic/src/file_type/image.rs", "rank": 77, "score": 177679.5517248013 }, { "content": "pub fn get_thumbnail_shader(_file_path: &str) -> Option<ResourceReference<dyn TextureResource>> {\n\n let world_state = use_global::<WorldState>();\n\n\n\n world_state\n\n .resource_container\n\n .get::<dyn TextureResource>(\"Editor/Icons/shader\")\n\n}\n\n\n", "file_path": "fruity_editor/fruity_editor_graphic/src/file_type/shader.rs", "rank": 78, "score": 177679.5517248013 }, { "content": "pub fn get_stored_callback<'a>(\n\n scope: &mut v8::HandleScope<'a>,\n\n identifier: CallbackIdentifier,\n\n) -> Option<v8::Local<'a, v8::Function>> {\n\n let (storage, ..) = get_callback_storage(scope);\n\n let callback_id = v8::Integer::new(scope, identifier.0);\n\n let callback = storage.get(scope, callback_id.into())?;\n\n v8::Local::<v8::Function>::try_from(callback).ok()\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 79, "score": 176844.6220701408 }, { "content": "pub fn draw_menu_component() -> UIElement {\n\n let mut children = menu_sections_component();\n\n children.append(&mut run_controls_component());\n\n\n\n MenuBar { children }.elem()\n\n}\n", "file_path": "fruity_editor/src/components/menu/mod.rs", "rank": 80, "score": 176234.3396528871 }, { "content": "/// Provides mutable access to the stored state type T.\n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// update_state_with_topo_id::<Vec<String>>( topo::CallId::current(), |v|\n\n/// v.push(\"foo\".to_string()\n\n/// )\n\n///\n\npub fn update_state_with_topo_id<T: 'static, F: FnOnce(&mut T) -> ()>(id: topo::CallId, func: F) {\n\n let mut item = remove_state_with_topo_id::<T>(id)\n\n .expect(\"You are trying to update a type state that doesnt exist in this context!\");\n\n func(&mut item);\n\n set_state_with_topo_id(item, id);\n\n}\n\n\n", "file_path": "deps/comp_state/src/state_functions.rs", "rank": 81, "score": 176228.06634088577 }, { "content": "pub fn get_resource_container(scope: &mut v8::HandleScope) -> Option<Arc<ResourceContainer>> {\n\n let context = scope.get_current_context();\n\n let global_object = context.global(scope);\n\n let resource_container_string =\n\n v8::String::new(scope, RESOURCE_MANAGER_GLOBAL_VAR_NAME).unwrap();\n\n let resource_container_v8 = global_object.get(scope, resource_container_string.into())?;\n\n let resource_container_v8 = v8::Local::<v8::Object>::try_from(resource_container_v8).ok()?;\n\n let resource_container: &Box<dyn SerializableObject> =\n\n get_intern_value_from_v8_object(scope, resource_container_v8)?;\n\n let resource_container = resource_container\n\n .as_any_ref()\n\n .downcast_ref::<Arc<ResourceContainer>>()\n\n .unwrap();\n\n\n\n Some(resource_container.clone())\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 82, "score": 175968.99762191984 }, { "content": "pub fn check_object_intern_identifier<'a>(\n\n scope: &mut v8::HandleScope,\n\n v8_value: v8::Local<'a, v8::Value>,\n\n identifier: &str,\n\n) -> Option<v8::Local<'a, v8::Object>> {\n\n if !v8_value.is_object() {\n\n return None;\n\n }\n\n\n\n let v8_object = v8::Local::<v8::Object>::try_from(v8_value).ok()?;\n\n if v8_object.internal_field_count() < 2 {\n\n return None;\n\n }\n\n\n\n let intern_identifier = v8_object.get_internal_field(scope, 1)?;\n\n let intern_identifier = v8::Local::<v8::String>::try_from(intern_identifier).ok()?;\n\n let intern_identifier = intern_identifier.to_rust_string_lossy(scope);\n\n\n\n if intern_identifier == identifier {\n\n Some(v8_object)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 83, "score": 174730.19113122852 }, { "content": "/// Encode an object as bytes into a byte array\n\n///\n\n/// # Arguments\n\n/// * `bytes` - The bytes buffer that will be written\n\n/// * `offset` - An offset\n\n/// * `size` - Ths size of the obj that will be written\n\n/// * `obj` - The object that will be written\n\n///\n\npub fn encode_into_bytes<T>(bytes: &mut [u8], offset: usize, size: usize, obj: T) {\n\n let buffer = &mut bytes[offset..(offset + size)];\n\n\n\n let encoded = unsafe {\n\n std::slice::from_raw_parts((&obj as *const T) as *const u8, std::mem::size_of::<T>())\n\n };\n\n\n\n copy(buffer, encoded);\n\n}\n", "file_path": "fruity_core/src/utils/slice.rs", "rank": 84, "score": 174509.89785301348 }, { "content": "pub fn file_explorer_component() -> UIElement {\n\n let file_explorer_state = use_global::<FileExplorerState>();\n\n let files = file_explorer_state.get_files();\n\n\n\n Scroll {\n\n child: Row {\n\n children: files\n\n .into_iter()\n\n .map(|file| RowItem {\n\n size: UISize::Units(64.0),\n\n child: file_item_component(file),\n\n })\n\n .collect::<Vec<_>>(),\n\n ..Default::default()\n\n }\n\n .elem(),\n\n ..Default::default()\n\n }\n\n .elem()\n\n}\n", "file_path": "fruity_editor/src/components/file_explorer/mod.rs", "rank": 85, "score": 174097.31378629958 }, { "content": "/// Display in log an error related with introspection\n\npub fn log_introspect_error(err: &IntrospectError) {\n\n match err {\n\n IntrospectError::UnknownMethod(method) => {\n\n log::error!(\"Failed to call an unknown method named {}\", method)\n\n }\n\n IntrospectError::IncorrectArgument { method, arg_index } => {\n\n log::error!(\n\n \"Failed to call method {} cause the argument n°{} have a wrong type\",\n\n method,\n\n arg_index\n\n )\n\n }\n\n IntrospectError::WrongNumberArguments {\n\n method,\n\n have,\n\n expected,\n\n } => {\n\n log::error!(\n\n \"Failed to call method {} cause you provided {} arguments, expected {}\",\n\n method,\n", "file_path": "fruity_core/src/introspect/mod.rs", "rank": 86, "score": 173989.4966519971 }, { "content": "pub fn inject_serialized_into_v8_return_value<'a>(\n\n scope: &mut v8::HandleScope,\n\n serialized: Serialized,\n\n return_value: &mut v8::ReturnValue,\n\n) {\n\n let serialized = serialize_v8(scope, serialized);\n\n\n\n if let Some(serialized) = serialized {\n\n return_value.set(serialized.into());\n\n }\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 87, "score": 172314.23149277698 }, { "content": "pub fn inject_option_serialized_into_v8_return_value<'a>(\n\n scope: &mut v8::HandleScope,\n\n serialized: Option<Serialized>,\n\n return_value: &mut v8::ReturnValue,\n\n) {\n\n if let Some(serialized) = serialized {\n\n inject_serialized_into_v8_return_value(scope, serialized, return_value);\n\n }\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 88, "score": 170314.60487010356 }, { "content": "pub fn use_list<T, F>(initial_list_fn: F) -> ListControl<T>\n\nwhere\n\n F: FnOnce() -> Vec<T>,\n\n T: Clone,\n\n{\n\n let list_access = use_state(|| List::new(initial_list_fn()));\n\n\n\n ListControl::new(list_access)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ListControl<T>\n\nwhere\n\n T: Clone + 'static,\n\n{\n\n list_access: StateAccess<List<T>>,\n\n}\n\n\n\nimpl<T> ListControl<T>\n\nwhere\n", "file_path": "deps/comp_state/src/list.rs", "rank": 89, "score": 168726.29099152164 }, { "content": "fn iterator_for_each_callback(\n\n scope: &mut v8::HandleScope,\n\n args: v8::FunctionCallbackArguments,\n\n mut _return_value: v8::ReturnValue,\n\n) {\n\n let this = args.this();\n\n\n\n // Get callback\n\n let callback = args.get(0);\n\n match v8::Local::<v8::Function>::try_from(callback) {\n\n Ok(callback) => {\n\n // Get next function\n\n let next_string = v8::String::new(scope, \"next\").unwrap();\n\n let next = this.get(scope, next_string.into()).unwrap();\n\n let next = v8::Local::<v8::Function>::try_from(next).unwrap();\n\n\n\n // Execute next function\n\n let next_value = next.call(scope, this.into(), &[]).unwrap();\n\n let mut next_value_object = v8::Local::<v8::Object>::try_from(next_value).unwrap();\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/object/iterator.rs", "rank": 90, "score": 167287.62120727196 }, { "content": "#[topo::nested]\n\npub fn use_state<T: 'static, F: FnOnce() -> T>(data_fn: F) -> StateAccess<T> {\n\n use_state_current(data_fn)\n\n}\n\n\n\n///\n\n\n", "file_path": "deps/comp_state/src/state_functions.rs", "rank": 91, "score": 165245.94693731237 }, { "content": "fn iterator_next_callback(\n\n scope: &mut v8::HandleScope,\n\n args: v8::FunctionCallbackArguments,\n\n mut return_value: v8::ReturnValue,\n\n) {\n\n // Get this a as an iterator\n\n let intern_value = get_intern_value_from_v8_object::<\n\n Arc<RwLock<dyn Iterator<Item = Serialized> + Send + Sync>>,\n\n >(scope, args.this());\n\n\n\n if let Some(iterator) = intern_value {\n\n // Call the function\n\n let mut iterator = iterator.write();\n\n let result = iterator.next();\n\n\n\n // Return the result\n\n //let serialized = serialize_v8(scope, &serialized);\n\n let result = match result {\n\n Some(value) => serialize_v8(scope, value),\n\n None => None,\n", "file_path": "fruity_core/fruity_javascript/src/js_value/object/iterator.rs", "rank": 92, "score": 165198.917511416 }, { "content": "pub fn get_intern_value_from_v8_object_mut<'a, T: Any>(\n\n scope: &mut v8::HandleScope,\n\n v8_object: v8::Local<'a, v8::Object>,\n\n) -> Option<&'a mut T> {\n\n let this = v8_object.get_internal_field(scope, 0)?;\n\n let internal_field = unsafe { v8::Local::<v8::External>::cast(this) };\n\n let internal_object = internal_field.value() as *mut T;\n\n unsafe { internal_object.as_mut() }\n\n}\n\n\n", "file_path": "fruity_core/fruity_javascript/src/js_value/utils.rs", "rank": 93, "score": 164244.70753594546 }, { "content": "///\n\n/// Uses the current topological id to create a new state accessor\n\n///\n\n///\n\npub fn use_state_current<T: 'static, F: FnOnce() -> T>(data_fn: F) -> StateAccess<T> {\n\n let current_id = topo::CallId::current();\n\n if !state_exists_for_topo_id::<T>(current_id) {\n\n set_state_with_topo_id::<T>(data_fn(), current_id);\n\n }\n\n mark_id_as_active(current_id);\n\n StateAccess::new(current_id)\n\n}\n\n\n", "file_path": "deps/comp_state/src/state_functions.rs", "rank": 94, "score": 163737.51857089897 }, { "content": "#[cfg(feature = \"nightly\")]\n\ntrait Cpy<T = Self>\n\nwhere\n\n T: ?Sized,\n\n{\n\n fn copy(&mut self, src: &T) -> usize;\n\n}\n\n\n\n#[cfg(feature = \"nightly\")]\n\ndefault impl<T> Cpy<[T]> for [T]\n\nwhere\n\n T: Copy,\n\n{\n\n #[inline]\n\n fn copy(&mut self, src: &Self) -> usize {\n\n let len = min(src.len(), self.len());\n\n (&mut self[..len]).copy_from_slice(&src[..len]);\n\n len\n\n }\n\n}\n\n\n", "file_path": "fruity_core/src/utils/slice.rs", "rank": 95, "score": 160620.47786700548 }, { "content": "fn print_args<F: Fn(&str)>(\n\n scope: &mut v8::HandleScope,\n\n args: v8::FunctionCallbackArguments,\n\n print: F,\n\n) {\n\n let mut message = String::new();\n\n for i in 0..args.length() {\n\n let arg_str = &args\n\n .get(i)\n\n .to_string(scope)\n\n .unwrap()\n\n .to_rust_string_lossy(scope);\n\n\n\n if i == 0 {\n\n message += &arg_str;\n\n } else {\n\n message += \" \";\n\n message += &arg_str;\n\n }\n\n }\n\n\n\n print(&message);\n\n}\n", "file_path": "fruity_core/fruity_javascript/src/bridge/console.rs", "rank": 96, "score": 159532.1155120486 }, { "content": "#[proc_macro_derive(InstantiableObject)]\n\npub fn derive_instantiable_object_trait(input: TokenStream) -> TokenStream {\n\n let DeriveInput { ident, .. } = parse_macro_input!(input);\n\n\n\n let output = quote! {\n\n impl fruity_core::introspect::InstantiableObject for #ident {\n\n fn get_constructor() -> fruity_core::introspect::Constructor {\n\n use fruity_core::convert::FruityInto;\n\n use fruity_core::introspect::IntrospectObject;\n\n\n\n std::sync::Arc::new(|_resource_container: std::sync::Arc<fruity_core::resource::resource_container::ResourceContainer>, mut args: Vec<fruity_core::serialize::serialized::Serialized>| {\n\n let mut new_object = #ident::default();\n\n\n\n if args.len() > 0 {\n\n let serialized = args.remove(0);\n\n let new_object_fields = new_object.get_field_infos();\n\n\n\n if let fruity_core::serialize::serialized::Serialized::SerializedObject { fields, .. } =\n\n serialized\n\n {\n\n fields.into_iter().for_each(|(key, value)| {\n", "file_path": "fruity_core/fruity_ecs/fruity_ecs_derive/src/lib.rs", "rank": 97, "score": 158842.14420812507 }, { "content": "#[proc_macro_derive(IntrospectObject)]\n\npub fn derive_introspect_object_trait(input: TokenStream) -> TokenStream {\n\n let DeriveInput { ident, data, .. } = parse_macro_input!(input);\n\n let struct_name = ident.to_string();\n\n\n\n let body = match data {\n\n Data::Struct(ref data) => {\n\n // Create a list with all field names,\n\n let fields: Vec<_> = match data.fields {\n\n Fields::Named(ref fields) => fields\n\n .named\n\n .iter()\n\n .map(|field| {\n\n let ty = &field.ty;\n\n match &field.ident {\n\n Some(ident) => (quote! { #ident }, quote! { #ty }),\n\n None => unimplemented!(),\n\n }\n\n })\n\n .collect(),\n\n Fields::Unnamed(ref fields) => {\n", "file_path": "fruity_core/fruity_ecs/fruity_ecs_derive/src/lib.rs", "rank": 98, "score": 158842.14420812507 }, { "content": "/// Insert an element in an hashmap that contains a vec\n\n///\n\n/// # Arguments\n\n/// * `hashmap` - The hashmap\n\n/// * `key` - The key of the value that is added\n\n/// * `value` - The value that will be inserted\n\n///\n\npub fn insert_in_hashmap_vec<K: Eq + Hash, T>(hashmap: &mut HashMap<K, Vec<T>>, key: K, value: T) {\n\n if let Some(vec) = hashmap.get_mut(&key) {\n\n vec.push(value);\n\n } else {\n\n hashmap.insert(key, vec![value]);\n\n }\n\n}\n", "file_path": "fruity_core/src/utils/collection.rs", "rank": 99, "score": 157698.16470786207 } ]
Rust
src/beatmap/mod.rs
LunarCoffee/osurate
06684a4ddef5a579903e794245d98cea5d9883c8
use std::io::BufRead; pub use crate::beatmap::parser::ParseError; use crate::beatmap::parser::Parser; mod parser; #[derive(Clone, Debug)] pub struct Beatmap { pub general_info: GeneralInfo, pub editor_info: EditorInfo, pub metadata: Metadata, pub difficulty: DifficultyInfo, pub events: Events, pub timing_points: Vec<TimingPoint>, pub colors: Option<Colors>, pub hit_objects: Vec<HitObject>, } impl Beatmap { pub fn parse(reader: impl BufRead) -> parser::Result<Beatmap> { Parser::new(reader).parse() } pub fn change_rate(&mut self, rate: f64) -> bool { let transform_f64 = |n| n / rate + 75.; let transform = |n| transform_f64(n as f64) as i32; let preview = self.general_info.preview_time; self.general_info.preview_time = if preview >= 0 { transform(preview) } else { preview }; self.metadata.diff_name += &format!(" ({}x)", rate); for mut point in &mut self.timing_points { point.time = transform_f64(point.time); if point.beat_len.is_sign_positive() { point.beat_len /= rate; } } for mut object in &mut self.hit_objects { object.time = transform(object.time); match object.params { HitObjectParams::Spinner(end_time) => object.params = HitObjectParams::Spinner(transform(end_time)), HitObjectParams::LongNote(end_time) => { let rest = match object.rest_parts[2].split_once(':') { Some((_, rest)) => rest, _ => return false, }; object.rest_parts[2] = transform(end_time).to_string() + ":" + rest; } _ => {} } } true } pub fn into_string(self) -> String { format!( "osu file format v14\n\n{}\n{}\n{}\n{}\n{}\n[TimingPoints]\n{}\n\n{}\n[HitObjects]\n{}", self.general_info.into_string(), self.editor_info.into_string(), self.metadata.into_string(), self.difficulty.into_string(), self.events.into_string(), self.timing_points.into_iter().map(|p| p.into_string()).collect::<Vec<_>>().join("\n"), self.colors.map(|c| c.into_string()).unwrap_or(String::new()), self.hit_objects.into_iter().map(|p| p.into_string()).collect::<Vec<_>>().join("\n"), ) } } #[derive(Clone, Debug)] pub struct GeneralInfo { pub audio_file: String, pub preview_time: i32, rest: String, } impl GeneralInfo { fn into_string(self) -> String { format!("[General]\nAudioFilename: {}\nPreviewTime: {}\n{}", self.audio_file, self.preview_time, self.rest) } } #[derive(Clone, Debug)] pub struct EditorInfo(String); impl EditorInfo { fn into_string(self) -> String { format!("[Editor]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct Metadata { pub diff_name: String, rest: String, } impl Metadata { fn into_string(self) -> String { format!("[Metadata]\nVersion:{}\n{}", self.diff_name, self.rest) } } #[derive(Clone, Debug)] pub struct DifficultyInfo(String); impl DifficultyInfo { fn into_string(self) -> String { format!("[Difficulty]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct Events(String); impl Events { fn into_string(self) -> String { format!("[Events]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct TimingPoint { pub time: f64, pub beat_len: f64, rest: String, } impl TimingPoint { fn into_string(self) -> String { format!("{},{},{}", self.time as i32, self.beat_len, self.rest) } } #[derive(Clone, Debug)] pub struct Colors(String); impl Colors { fn into_string(self) -> String { format!("[Colours]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct HitObject { pub time: i32, pub params: HitObjectParams, rest_parts: Vec<String>, } impl HitObject { fn into_string(self) -> String { format!( "{},{},{}{}{}", self.rest_parts[0], self.time, self.rest_parts[1], self.params.into_string(), self.rest_parts[2], ) } } #[derive(Clone, Debug)] pub enum HitObjectParams { NoneUseful, Spinner(i32), LongNote(i32), } impl HitObjectParams { fn into_string(self) -> String { match self { HitObjectParams::NoneUseful | HitObjectParams::LongNote(_) => ",".to_string(), HitObjectParams::Spinner(end_time) => format!(",{},", end_time), } } }
use std::io::BufRead; pub use crate::beatmap::parser::ParseError; use crate::beatmap::parser::Parser; mod parser; #[derive(Clone, Debug)] pub struct Beatmap { pub general_info: GeneralInfo, pub editor_info: EditorInfo, pub metadata: Metadata, pub difficulty: DifficultyInfo, pub events: Events, pub timing_points: Vec<TimingPoint>, pub colors: Option<Colors>, pub hit_objects: Vec<HitObject>, } impl Beatmap { pub fn parse(reader: impl BufRead) -> parser::Result<Beatmap> { Parser::new(reader).parse() }
pub fn into_string(self) -> String { format!( "osu file format v14\n\n{}\n{}\n{}\n{}\n{}\n[TimingPoints]\n{}\n\n{}\n[HitObjects]\n{}", self.general_info.into_string(), self.editor_info.into_string(), self.metadata.into_string(), self.difficulty.into_string(), self.events.into_string(), self.timing_points.into_iter().map(|p| p.into_string()).collect::<Vec<_>>().join("\n"), self.colors.map(|c| c.into_string()).unwrap_or(String::new()), self.hit_objects.into_iter().map(|p| p.into_string()).collect::<Vec<_>>().join("\n"), ) } } #[derive(Clone, Debug)] pub struct GeneralInfo { pub audio_file: String, pub preview_time: i32, rest: String, } impl GeneralInfo { fn into_string(self) -> String { format!("[General]\nAudioFilename: {}\nPreviewTime: {}\n{}", self.audio_file, self.preview_time, self.rest) } } #[derive(Clone, Debug)] pub struct EditorInfo(String); impl EditorInfo { fn into_string(self) -> String { format!("[Editor]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct Metadata { pub diff_name: String, rest: String, } impl Metadata { fn into_string(self) -> String { format!("[Metadata]\nVersion:{}\n{}", self.diff_name, self.rest) } } #[derive(Clone, Debug)] pub struct DifficultyInfo(String); impl DifficultyInfo { fn into_string(self) -> String { format!("[Difficulty]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct Events(String); impl Events { fn into_string(self) -> String { format!("[Events]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct TimingPoint { pub time: f64, pub beat_len: f64, rest: String, } impl TimingPoint { fn into_string(self) -> String { format!("{},{},{}", self.time as i32, self.beat_len, self.rest) } } #[derive(Clone, Debug)] pub struct Colors(String); impl Colors { fn into_string(self) -> String { format!("[Colours]\n{}", self.0) } } #[derive(Clone, Debug)] pub struct HitObject { pub time: i32, pub params: HitObjectParams, rest_parts: Vec<String>, } impl HitObject { fn into_string(self) -> String { format!( "{},{},{}{}{}", self.rest_parts[0], self.time, self.rest_parts[1], self.params.into_string(), self.rest_parts[2], ) } } #[derive(Clone, Debug)] pub enum HitObjectParams { NoneUseful, Spinner(i32), LongNote(i32), } impl HitObjectParams { fn into_string(self) -> String { match self { HitObjectParams::NoneUseful | HitObjectParams::LongNote(_) => ",".to_string(), HitObjectParams::Spinner(end_time) => format!(",{},", end_time), } } }
pub fn change_rate(&mut self, rate: f64) -> bool { let transform_f64 = |n| n / rate + 75.; let transform = |n| transform_f64(n as f64) as i32; let preview = self.general_info.preview_time; self.general_info.preview_time = if preview >= 0 { transform(preview) } else { preview }; self.metadata.diff_name += &format!(" ({}x)", rate); for mut point in &mut self.timing_points { point.time = transform_f64(point.time); if point.beat_len.is_sign_positive() { point.beat_len /= rate; } } for mut object in &mut self.hit_objects { object.time = transform(object.time); match object.params { HitObjectParams::Spinner(end_time) => object.params = HitObjectParams::Spinner(transform(end_time)), HitObjectParams::LongNote(end_time) => { let rest = match object.rest_parts[2].split_once(':') { Some((_, rest)) => rest, _ => return false, }; object.rest_parts[2] = transform(end_time).to_string() + ":" + rest; } _ => {} } } true }
function_block-full_function
[ { "content": "// Stretches the audio associated with the given `map` by a factor of `rate`, updating metadata.\n\npub fn stretch_beatmap_audio(map: &mut Beatmap, dir: &Path, rate: f64) -> Result<()> {\n\n let old_path = dir.join(&map.general_info.audio_file);\n\n let old_audio = File::open(&old_path).or(Err(AudioStretchError::SourceNotFound))?;\n\n\n\n // This looks like \"audio.mp3\" -> \"audio_1_2.mp3\" for a rate of 1.2.\n\n let new_path = dir.join(format!(\n\n \"{}_{}.{}\",\n\n old_path.file_stem().ok_or(AudioStretchError::InvalidSource)?.to_string_lossy(),\n\n rate.to_string().replace('.', \"_\"),\n\n old_path.extension().ok_or(AudioStretchError::InvalidSource)?.to_string_lossy(),\n\n ));\n\n let mut new_audio = File::create(&new_path).or(Err(AudioStretchError::DestinationIoError))?;\n\n stretch(old_audio, &mut new_audio, rate)?;\n\n\n\n // This should be fine, since the file name was created just above.\n\n map.general_info.audio_file = new_path.file_name().unwrap().to_str().unwrap().to_string();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 0, "score": 65842.45456029395 }, { "content": "pub fn run_gui() -> ! {\n\n let main_window = WindowDesc::new(make_ui)\n\n .title(\"osurate | osu! Rate Generator\")\n\n .window_size((460., 380.))\n\n .resizable(false);\n\n\n\n let data = AppData { rates_str: Arc::new(String::new()), files: vec![], status: \"[Info] started\".to_string() };\n\n AppLauncher::with_window(main_window).delegate(Delegate {}).launch(data)\n\n .unwrap_or_else(|_| util::log_fatal(\"failed to start gui\"));\n\n process::exit(0)\n\n}\n\n\n", "file_path": "src/gui.rs", "rank": 1, "score": 61261.56398089921 }, { "content": "fn make_ui() -> impl Widget<AppData> {\n\n let rates_input = TextBox::new()\n\n .with_placeholder(\"Rates (i.e. 1.1,1.15,1.2)\")\n\n .lens(AppData::rates_str)\n\n .expand_width()\n\n .padding((6., 7., 6., 2.));\n\n\n\n let select_files_button = Button::new(\"Select Beatmap\")\n\n .on_click(|ctx, _, _| {\n\n // Opening multiple files is currently unsupported in Druid (#1067).\n\n let options = FileDialogOptions::new()\n\n .title(\"Select a beatmap to generate rates for\")\n\n .button_text(\"Select\")\n\n .allowed_types(vec![FileSpec::new(\"osu! beatmaps\", &[\"osu\"])]);\n\n ctx.submit_command(Command::new(commands::SHOW_OPEN_PANEL, options, Target::Auto));\n\n })\n\n .padding(4.);\n\n\n\n let undo_button = Button::new(\"Remove Last\")\n\n .on_click(|_, data: &mut AppData, _| { let _ = data.files.pop(); })\n", "file_path": "src/gui.rs", "rank": 2, "score": 53910.002811164915 }, { "content": "// Stretches MP3 audio read from `src` by a factor of `rate`, writing the output to `dest` as MP3 audio.\n\nfn stretch(src: impl Read, dest: &mut impl Write, rate: f64) -> Result<()> {\n\n // Decode source MP3 data into i16 PCM data.\n\n let mut decoder = Decoder::new(src);\n\n let mut frames = vec![];\n\n while let Ok(frame) = decoder.next_frame() {\n\n frames.push(frame);\n\n }\n\n match decoder.next_frame() {\n\n Err(minimp3::Error::Eof) | Err(minimp3::Error::SkippedData) => {}\n\n _ => return Err(AudioStretchError::InvalidSource),\n\n }\n\n\n\n let channels = frames[0].channels;\n\n util::verify(channels <= 2, AudioStretchError::UnsupportedChannelCount)?;\n\n let sample_rate = frames[0].sample_rate;\n\n let rate = rate * sample_rate as f64 / 44_100.;\n\n let bitrate = frames[0].bitrate;\n\n\n\n // Gather samples from each frame and resample.\n\n let samples = frames.into_iter().flat_map(|f| f.data).collect();\n", "file_path": "src/audio.rs", "rank": 3, "score": 52041.07008011952 }, { "content": "// Convenience wrapper over `util::verify` specifically for verifying parts of a beatmap.\n\nfn verify_ff(cond: bool) -> Result<()> {\n\n verify(cond, ParseError::InvalidBeatmap)\n\n}\n\n\n", "file_path": "src/beatmap/parser.rs", "rank": 4, "score": 51540.85059103986 }, { "content": "// Checks if `line` is a section header (i.e. \"[Metadata]\") or was the result of reaching EOF.\n\nfn is_section_header_or_eof(line: &str) -> bool {\n\n line.chars().next() == Some('[') && line.chars().last() == Some(']') || line.is_empty()\n\n}\n\n\n", "file_path": "src/beatmap/parser.rs", "rank": 5, "score": 49645.549508152675 }, { "content": "pub fn log_info<D: Display>(value: D) {\n\n println!(\"info: {}\", value);\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 6, "score": 48701.98732588651 }, { "content": "pub fn log_fatal<D: Display>(value: D) -> ! {\n\n eprintln!(\"error: {}\", value);\n\n process::exit(1)\n\n}\n", "file_path": "src/util.rs", "rank": 7, "score": 48701.98732588651 }, { "content": "// Trims the byte order mark from the start of a UTF-8 string, if present.\n\nfn trim_utf8_bom(line: String) -> Option<String> {\n\n if line.as_bytes().starts_with(b\"\\xef\\xbb\\xbf\") {\n\n String::from_utf8(line.as_bytes()[3..].to_vec()).ok()\n\n } else {\n\n Some(line.to_string())\n\n }\n\n}\n", "file_path": "src/beatmap/parser.rs", "rank": 8, "score": 47374.94835804202 }, { "content": "// Convenience wrapper over `parse` specifically for parsing required values in a beatmap.\n\nfn parse_ff<F: FromStr>(str: &str) -> Result<F> {\n\n str.parse().or(Err(ParseError::InvalidBeatmap))\n\n}\n\n\n", "file_path": "src/beatmap/parser.rs", "rank": 9, "score": 44903.21571567442 }, { "content": "// Returns a result based on whether `cond` is true. This is designed to be used with the ? operator, returning Err(e)\n\n// when `cond` is false, and Ok(()) otherwise.\n\npub fn verify<E>(cond: bool, e: E) -> Result<(), E> {\n\n cond.then(|| {}).ok_or(e)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 10, "score": 44468.14637667153 }, { "content": "struct Delegate;\n\n\n\nimpl AppDelegate<AppData> for Delegate {\n\n // When the user selects a file, store it.\n\n fn command(&mut self, _: &mut DelegateCtx, _: Target, cmd: &Command, data: &mut AppData, _: &Env) -> Handled {\n\n if let Some(file_info) = cmd.get(commands::OPEN_FILE) {\n\n let path = file_info.path().to_path_buf();\n\n data.files.push(path);\n\n Handled::Yes\n\n } else {\n\n Handled::No\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gui.rs", "rank": 11, "score": 39767.95454904638 }, { "content": "#[derive(Clone, Lens)]\n\nstruct AppData {\n\n rates_str: Arc<String>,\n\n files: Vec<PathBuf>,\n\n status: String,\n\n}\n\n\n\nimpl Data for AppData {\n\n fn same(&self, other: &Self) -> bool {\n\n self.rates_str == other.rates_str && self.files == other.files && self.status == other.status\n\n }\n\n}\n\n\n", "file_path": "src/gui.rs", "rank": 12, "score": 38591.26272155158 }, { "content": "// Generates and saves the given rate for the given beatmap.\n\nfn generate_rate(mut map: Beatmap, rate: f64, path: &PathBuf) -> Result<(), String> {\n\n let parent_dir = path.parent().unwrap_or(Path::new(\"./\"));\n\n\n\n map.change_rate(rate).then(|| {}).ok_or_else(|| \"invalid beatmap file\")?;\n\n audio::stretch_beatmap_audio(&mut map, parent_dir, rate).map_err(|e| match e {\n\n AudioStretchError::SourceNotFound => \"couldn't find mp3 file\",\n\n AudioStretchError::InvalidSource => \"couldn't parse mp3 file\",\n\n AudioStretchError::UnsupportedChannelCount => \"unsupported mp3 channel count\",\n\n AudioStretchError::LameInitializationError => \"couldn't initialize lame (is it installed?)\",\n\n AudioStretchError::LameEncodingError => \"lame mp3 encoding error\",\n\n _ => \"mp3 output i/o error\",\n\n })?;\n\n\n\n // New file name with the rate in the difficulty name part.\n\n let old_file_name = path.file_stem().unwrap().to_string_lossy();\n\n let name_with_rate = format!(\"{} ({}x)].osu\", &old_file_name[..old_file_name.len() - 1], rate);\n\n\n\n let new_path = parent_dir.join(name_with_rate);\n\n let mut new_file = File::create(new_path).map_err(|_| \"couldn't create new beatmap file\")?;\n\n new_file.write_all(map.into_string().as_bytes()).map_err(|_| \"couldn't write new beatmap file\")?;\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 13, "score": 38214.37425799316 }, { "content": "use std::{io, result};\n\nuse std::io::BufRead;\n\nuse std::option::NoneError;\n\nuse std::str::FromStr;\n\n\n\nuse crate::beatmap::{\n\n Beatmap, Colors, DifficultyInfo, EditorInfo, Events, GeneralInfo, HitObject, HitObjectParams, Metadata,\n\n TimingPoint,\n\n};\n\nuse crate::util;\n\nuse crate::util::verify;\n\n\n\n#[derive(Debug)]\n\npub enum ParseError {\n\n UnsupportedVersion,\n\n InvalidBeatmap,\n\n IoError,\n\n}\n\n\n\nimpl From<io::Error> for ParseError {\n", "file_path": "src/beatmap/parser.rs", "rank": 24, "score": 37070.17237648988 }, { "content": " fn from(_: io::Error) -> Self {\n\n ParseError::IoError\n\n }\n\n}\n\n\n\nimpl From<NoneError> for ParseError {\n\n fn from(_: NoneError) -> Self {\n\n ParseError::InvalidBeatmap\n\n }\n\n}\n\n\n\npub type Result<T> = result::Result<T, ParseError>;\n\n\n\npub struct Parser<R: BufRead> {\n\n reader: R,\n\n}\n\n\n\nimpl<R: BufRead> Parser<R> {\n\n pub fn new(reader: R) -> Self {\n\n Self { reader }\n", "file_path": "src/beatmap/parser.rs", "rank": 25, "score": 37064.8999994418 }, { "content": " }\n\n\n\n pub fn parse(&mut self) -> Result<Beatmap> {\n\n let header = trim_utf8_bom(self.read_line()?)?;\n\n verify_ff(header.starts_with(\"osu file format v\"))?;\n\n util::verify(&header[17..] == \"14\", ParseError::UnsupportedVersion)?;\n\n\n\n verify_ff(self.read_line()? == \"[General]\")?;\n\n let (general_info, next_section_header) = self.parse_general_info()?;\n\n\n\n verify_ff(next_section_header == \"[Editor]\")?;\n\n let (rest, next_section_header) = self.read_section()?;\n\n let editor_info = EditorInfo(rest);\n\n\n\n verify_ff(next_section_header == \"[Metadata]\")?;\n\n let (metadata, next_section_header) = self.parse_metadata()?;\n\n\n\n verify_ff(next_section_header == \"[Difficulty]\")?;\n\n let (rest, next_section_header) = self.read_section()?;\n\n let difficulty = DifficultyInfo(rest);\n", "file_path": "src/beatmap/parser.rs", "rank": 26, "score": 37063.046382338536 }, { "content": " Ok(Beatmap { general_info, editor_info, metadata, difficulty, events, timing_points, colors, hit_objects })\n\n }\n\n\n\n fn parse_general_info(&mut self) -> Result<(GeneralInfo, String)> {\n\n let mut audio_file = String::new();\n\n let mut preview_time = -1;\n\n let mut rest = String::new();\n\n\n\n let mut line = self.read_line()?;\n\n while !is_section_header_or_eof(&line) {\n\n let (key, value) = line.split_once(\": \")?;\n\n match key {\n\n \"AudioFilename\" => audio_file = value.to_string(),\n\n \"PreviewTime\" => preview_time = parse_ff(value)?,\n\n _ => rest += &(line + \"\\n\"),\n\n }\n\n line = self.read_line()?;\n\n }\n\n\n\n // Verify that required values were parsed.\n", "file_path": "src/beatmap/parser.rs", "rank": 27, "score": 37062.768076703695 }, { "content": "\n\n verify_ff(next_section_header == \"[Events]\")?;\n\n let (rest, next_section_header) = self.read_section()?;\n\n let events = Events(rest);\n\n\n\n verify_ff(next_section_header == \"[TimingPoints]\")?;\n\n let (timing_points, mut next_section_header) = self.parse_timing_points()?;\n\n\n\n // This section appears to be optional.\n\n let colors = if next_section_header == \"[Colours]\" {\n\n let (rest, next) = self.read_section()?;\n\n next_section_header = next;\n\n Some(Colors(rest))\n\n } else {\n\n None\n\n };\n\n\n\n verify_ff(next_section_header == \"[HitObjects]\")?;\n\n let hit_objects = self.parse_hit_objects()?;\n\n\n", "file_path": "src/beatmap/parser.rs", "rank": 28, "score": 37061.39284991923 }, { "content": " let mut hit_objects = vec![];\n\n\n\n let mut line = self.read_line()?;\n\n while !is_section_header_or_eof(&line) {\n\n let mut split = line.split(',');\n\n let mut rest_parts = vec![]; // See `beatmap/mod.rs`.\n\n\n\n rest_parts.push(format!(\"{},{}\", split.next()?, split.next()?));\n\n let time = parse_ff(split.next()?)?;\n\n let kind = parse_ff::<i32>(split.next()?)?;\n\n rest_parts.push(format!(\"{},{}\", kind, split.next()?));\n\n\n\n let params = if kind & (1 << 0) == 1 || kind & (1 << 1) == 2 {\n\n HitObjectParams::NoneUseful\n\n } else if kind & (1 << 3) == 8 {\n\n HitObjectParams::Spinner(parse_ff(split.next()?)?)\n\n } else if kind & (1 << 7) == 128 {\n\n let end_time = split.clone().next()?.split_once(':')?.0;\n\n HitObjectParams::LongNote(parse_ff(end_time)?)\n\n } else {\n", "file_path": "src/beatmap/parser.rs", "rank": 29, "score": 37060.397156967105 }, { "content": " verify_ff(!audio_file.is_empty())?;\n\n Ok((GeneralInfo { audio_file, preview_time, rest }, line))\n\n }\n\n\n\n fn parse_metadata(&mut self) -> Result<(Metadata, String)> {\n\n let mut diff_name = String::new();\n\n let mut rest = String::new();\n\n\n\n let mut line = self.read_line()?;\n\n while !is_section_header_or_eof(&line) {\n\n let (key, value) = line.split_once(\":\")?;\n\n match key {\n\n \"Version\" => diff_name = value.to_string(),\n\n _ => rest += &(line + \"\\n\"),\n\n }\n\n line = self.read_line()?;\n\n }\n\n\n\n // Verify that required values were parsed.\n\n verify_ff(!diff_name.is_empty())?;\n", "file_path": "src/beatmap/parser.rs", "rank": 30, "score": 37059.24243874941 }, { "content": " line = self.read_line()?;\n\n };\n\n Ok((rest, line))\n\n }\n\n\n\n // Reads a line from `reader`, discarding the newline delimiter and skipping empty lines and comments.\n\n fn read_line(&mut self) -> io::Result<String> {\n\n let mut buf = String::new();\n\n\n\n // Return an empty string on EOF.\n\n if self.reader.read_line(&mut buf)? == 0 {\n\n return Ok(buf);\n\n }\n\n\n\n // Skip empty lines and comments.\n\n if buf.trim().is_empty() || buf.starts_with(\"//\") {\n\n self.read_line()\n\n } else {\n\n Ok(buf.trim_end().to_string())\n\n }\n\n }\n\n}\n\n\n\n// Convenience wrapper over `util::verify` specifically for verifying parts of a beatmap.\n", "file_path": "src/beatmap/parser.rs", "rank": 31, "score": 37058.62474757569 }, { "content": " return Err(ParseError::InvalidBeatmap);\n\n };\n\n rest_parts.push(split.collect::<Vec<_>>().join(\",\"));\n\n\n\n hit_objects.push(HitObject { time, params, rest_parts });\n\n line = self.read_line()?;\n\n }\n\n\n\n // Verify that EOF has been reached.\n\n verify_ff(line.is_empty())?;\n\n Ok(hit_objects)\n\n }\n\n\n\n // Read an entire section to a string without any special parsing.\n\n fn read_section(&mut self) -> io::Result<(String, String)> {\n\n let mut rest = String::new();\n\n\n\n let mut line = self.read_line()?;\n\n while !is_section_header_or_eof(&line) {\n\n rest += &(line + \"\\n\");\n", "file_path": "src/beatmap/parser.rs", "rank": 32, "score": 37058.614623922804 }, { "content": " Ok((Metadata { diff_name, rest }, line))\n\n }\n\n\n\n fn parse_timing_points(&mut self) -> Result<(Vec<TimingPoint>, String)> {\n\n let mut timing_points = vec![];\n\n\n\n let mut line = self.read_line()?;\n\n while !is_section_header_or_eof(&line) {\n\n let split = line.splitn(3, ',').collect::<Vec<_>>();\n\n verify_ff(split.len() == 3)?;\n\n\n\n let time = parse_ff(split[0])?;\n\n let beat_len = parse_ff(split[1])?;\n\n timing_points.push(TimingPoint { time, beat_len, rest: split[2].to_string() });\n\n line = self.read_line()?;\n\n }\n\n Ok((timing_points, line))\n\n }\n\n\n\n fn parse_hit_objects(&mut self) -> Result<Vec<HitObject>> {\n", "file_path": "src/beatmap/parser.rs", "rank": 33, "score": 37058.5796572795 }, { "content": "fn main() {\n\n // Change help text if compiled without GUI support.\n\n let mut gui_help = \"enters gui mode\".to_string();\n\n if !cfg!(feature = \"gui\") {\n\n gui_help += \" (unavailable; recompile with `--features gui`)\"\n\n }\n\n let gui_help = gui_help.as_str();\n\n let matches = clap_app!(osurate =>\n\n (version: \"0.2.1\")\n\n (author: \"LunarCoffee <[email protected]>\")\n\n (about: \"rate generator for osu! beatmaps\")\n\n (@arg gui: -g conflicts_with[inputs rates] required_unless[inputs] gui_help)\n\n (@arg inputs: #{1, u64::MAX} requires[rates] required_unless[gui] \"sets the input .osu file(s)\")\n\n (@arg rates: -r #{1, u64::MAX} requires[inputs] \"sets the rate(s) to generate\")\n\n (help_message: \"prints help information\")\n\n (version_message: \"prints version information\")\n\n ).get_matches();\n\n\n\n if matches.is_present(\"gui\") {\n\n #[cfg(feature = \"gui\")] gui::run_gui(); // This call diverges.\n", "file_path": "src/main.rs", "rank": 34, "score": 29478.4938544873 }, { "content": "// Generates and saves the rates in `rates` for the .osu file at `path`. The returned value is the name of the map,\n\n// used for user-facing logging.\n\nfn generate_rates(path: &PathBuf, rates: &[f64]) -> Result<String, String> {\n\n let path = path.canonicalize().map_err(|_| \"couldn't find file\")?;\n\n let base_map_name = path.file_stem().ok_or_else(|| \"not a file\").map(|s| s.to_string_lossy())?;\n\n let map_file = File::open(&path).map_err(|_| \"couldn't open file\")?;\n\n let reader = BufReader::new(map_file);\n\n\n\n let map = Beatmap::parse(reader).map_err(|e| match e {\n\n ParseError::UnsupportedVersion => \"unsupported beatmap file format version\",\n\n ParseError::InvalidBeatmap => \"couldn't parse beatmap file\",\n\n _ => \"beatmap file i/o error\",\n\n })?;\n\n\n\n for rate in rates {\n\n // Since the map is mutated by `change_rate`, inaccuracies may accumulate when reverting a rate change. To work\n\n // around this, the beatmap is cloned for each rate.\n\n generate_rate(map.clone(), *rate, &path)?;\n\n util::log_info(format!(\"generated {}x rate of {}\", rate, base_map_name));\n\n }\n\n Ok(base_map_name.to_string())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 35, "score": 18852.630049863823 }, { "content": "// Helper function to resample a chunk of PCM samples.\n\nfn resample_chunk(samples: Vec<i16>, rate: f64) -> Vec<(i16, i16)> {\n\n let mut src = signal::from_interleaved_samples_iter::<_, [i16; 2]>(samples);\n\n let lerp = Linear::new(src.next(), src.next());\n\n src.scale_hz(lerp, rate).until_exhausted().map(|[l, r]| (l, r)).collect()\n\n}\n", "file_path": "src/audio.rs", "rank": 36, "score": 18523.521908467235 }, { "content": "// Resamples dual channel PCM `samples` by a factor of `rate` in parallel with `n_threads` worker threads.\n\nfn resample_parallel(samples: Vec<i16>, rate: f64, n_threads: usize) -> (Vec<i16>, Vec<i16>) {\n\n // Split the samples into equally sized chunks and spawn a thread to process each.\n\n let n_chunks = (samples.len() as f64 / n_threads as f64).ceil() as usize;\n\n let chunks = samples.chunks(n_chunks).map(|c| c.to_vec());\n\n let handles = chunks.map(|c| thread::spawn(move || resample_chunk(c, rate))).collect::<Vec<_>>();\n\n\n\n // Recombine the resampled chunks.\n\n handles.into_iter().map(|h| h.join().unwrap()).flatten().unzip()\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 37, "score": 16414.26898589574 }, { "content": "#![feature(available_concurrency)]\n\n#![feature(iter_intersperse)]\n\n#![feature(try_trait)]\n\n\n\nuse std::fs::File;\n\nuse std::io::{BufReader, Write};\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse clap::clap_app;\n\n\n\nuse crate::audio::AudioStretchError;\n\nuse crate::beatmap::{Beatmap, ParseError};\n\n\n\nmod audio;\n\nmod beatmap;\n\nmod gui;\n\nmod util;\n\n\n", "file_path": "src/main.rs", "rank": 38, "score": 7.425281444996753 }, { "content": "use std::{result, thread};\n\nuse std::fs::File;\n\nuse std::io::{Read, Write};\n\nuse std::path::Path;\n\n\n\nuse dasp::{signal, Signal};\n\nuse dasp::interpolate::linear::Linear;\n\nuse lame::Lame;\n\nuse minimp3::Decoder;\n\n\n\nuse crate::beatmap::Beatmap;\n\nuse crate::util;\n\n\n\n#[derive(Debug)]\n\npub enum AudioStretchError {\n\n SourceNotFound,\n\n InvalidSource,\n\n UnsupportedChannelCount,\n\n LameInitializationError,\n\n LameEncodingError,\n\n DestinationIoError,\n\n}\n\n\n\nimpl From<lame::Error> for AudioStretchError {\n\n fn from(_: lame::Error) -> Self {\n\n Self::LameInitializationError\n\n }\n\n}\n\n\n", "file_path": "src/audio.rs", "rank": 39, "score": 7.289329963295501 }, { "content": "#![cfg(feature = \"gui\")]\n\n\n\nuse std::path::PathBuf;\n\nuse std::process;\n\nuse std::sync::Arc;\n\n\n\nuse druid::{\n\n AppDelegate, AppLauncher, Color, Command, commands, Data, DelegateCtx, Env, FileDialogOptions, FileSpec, Handled,\n\n Lens, Target, Widget, WidgetExt, WindowDesc,\n\n};\n\nuse druid::widget::{Button, Flex, Label, LineBreaking, TextBox};\n\n\n\nuse crate::util;\n\n\n", "file_path": "src/gui.rs", "rank": 40, "score": 3.8773630274711497 }, { "content": "use std::fmt::Display;\n\nuse std::process;\n\n\n\n// Returns a result based on whether `cond` is true. This is designed to be used with the ? operator, returning Err(e)\n\n// when `cond` is false, and Ok(()) otherwise.\n", "file_path": "src/util.rs", "rank": 41, "score": 2.653268457554321 }, { "content": " for file in &data.files {\n\n data.status = match crate::generate_rates(file, &rates) {\n\n Err(e) => format!(\"[Error] {}\", e),\n\n Ok(map_name) => format!(\"[Info] generated rate(s) for {}\", map_name),\n\n };\n\n }\n\n })\n\n .padding(6.);\n\n\n\n let configure_label = |l: Label<AppData>| l\n\n .with_line_break_mode(LineBreaking::WordWrap)\n\n .with_text_size(12.)\n\n .background(Color::grey(0.12))\n\n .border(Color::grey(0.12), 3.)\n\n .rounded(4.)\n\n .expand_width();\n\n\n\n let selected_maps_label = configure_label(Label::dynamic(\n\n |data: &AppData, _| {\n\n let name = data.files.iter()\n", "file_path": "src/gui.rs", "rank": 42, "score": 1.7283368152900118 }, { "content": "# osurate - osu! rate generator\n\n\n\nosurate is a small command line tool (optional GUI available) written in [Rust](https://www.rust-lang.org/) for\n\ngenerating rates (speed variations) of [osu!](https://osu.ppy.sh) beatmaps. When generating rates, the audio will also\n\nbe scaled accordingly (and pitch-shifted).\n\n\n\n## Building\n\n\n\nIf you're on Windows, you can download the latest release [here](https://github.com/LunarCoffee/osurate/releases). This\n\nwill include a binary executable, a launch script that enters the GUI, as well as usage instructions. That's it!\n\n\n\nOtherwise, before building, make sure you have libmp3lame and nightly rustc (at least 1.50.0). If you want to build with GUI\n\nsupport on Linux, also have GTK+ 3 installed. To build, just clone [this repo](https://github.com/LunarCoffee/osurate)\n\nand compile with `cargo build --release`, and tack on `--features gui` if you want the GUI.\n\n\n\n## Usage\n\n\n\n```shell\n\nosurate <inputs>... -r <rates>\n\n\n\n# This will generate 0.85x and 0.9x rates for the specified map.\n\nosurate \"Wanderflux [Annihilation].osu\" -r 0.85 0.9\n\n\n\n# This will generate 1.1x and 1.2x rates for both specified maps.\n\nosurate \"MANIERA [Collab Another].osu\" \"Crystallized [listen].osu\" -r 1.1 1.2\n\n\n\n# This opens the GUI.\n\nosurate -g\n\n```\n\n\n\nWhen using the CLI, specify the paths of the .osu files you want to generate rates for in `inputs`, and put the `rates`\n\nyou want after. If you specify multiple files, all of the rates you specify will be generated for each file.\n\n\n\n## Performance\n\n\n\nWith an Intel i7-6700HQ on Ubuntu, it takes around 2-3 seconds to generate one rate for a 2 minute (~3 MB MP3) map, the\n\nbottleneck being the MP3 encoding with LAME.\n", "file_path": "README.md", "rank": 43, "score": 1.0840914487299576 }, { "content": " .map(|f| f.file_name().unwrap().to_string_lossy().trim_end_matches(\".osu\").to_string())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n format!(\"Selected map(s):\\n{}\", if name.is_empty() { \"(none)\".to_string() } else { name })\n\n }))\n\n .expand_height()\n\n .padding((6., 1., 6., 6.));\n\n\n\n let status_label = configure_label(Label::dynamic(|data: &AppData, _| data.status.to_string()))\n\n .padding((6., 2., 6., 6.));\n\n\n\n Flex::column()\n\n .with_child(rates_input)\n\n .with_child(Flex::row()\n\n .with_child(select_files_button)\n\n .with_child(undo_button)\n\n .with_child(clear_button)\n\n .with_child(generate_button))\n\n .with_flex_child(selected_maps_label, 1.)\n\n .with_child(status_label)\n\n .background(Color::grey(0.05))\n\n}\n", "file_path": "src/gui.rs", "rank": 44, "score": 1.051126596677912 } ]
Rust
all-is-cubes/benches/space_bench.rs
kpreid/all-is-cubes
81e0fb8fbd5a0557f0f9002085f4160bce37174a
use criterion::{ black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput, }; use all_is_cubes::content::make_some_blocks; use all_is_cubes::space::{Grid, Space, SpaceTransaction}; use all_is_cubes::transaction::Transaction; pub fn space_bulk_mutation(c: &mut Criterion) { let mut group = c.benchmark_group("space-bulk-mutation"); for &mutation_size in &[1, 4, 64] { let grid = Grid::new([0, 0, 0], [mutation_size, mutation_size, mutation_size]); let bigger_grid = grid.multiply(2); let size_description = format!("{}×{}×{}", mutation_size, mutation_size, mutation_size); let mutation_volume = grid.volume(); group.throughput(Throughput::Elements(mutation_volume as u64)); group.bench_function( BenchmarkId::new("fill() entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { space.fill(space.grid(), |_| Some(&block)).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("fill() part of space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(bigger_grid), |mut space| { space.fill(grid, |_| Some(&block)).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("fill_uniform() entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { space.fill_uniform(space.grid(), &block).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("fill_uniform() part of space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(bigger_grid), |mut space| { space.fill_uniform(grid, &block).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("set() entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { for x in 0..mutation_size { for y in 0..mutation_size { for z in 0..mutation_size { space.set([x, y, z], &block).unwrap(); } } } }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("transaction entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { let mut txn = SpaceTransaction::default(); for x in 0..mutation_size { for y in 0..mutation_size { for z in 0..mutation_size { txn.set([x, y, z], None, Some(block.clone())).unwrap(); } } } txn.execute(&mut space).unwrap(); }, BatchSize::SmallInput, ) }, ); } group.finish(); } pub fn grid_bench(c: &mut Criterion) { let mut group = c.benchmark_group("Grid"); let grid = Grid::new([0, 0, 0], [256, 256, 256]); group.throughput(Throughput::Elements(grid.volume() as u64)); group.bench_function("Grid::interior_iter", |b| { b.iter(|| { for cube in grid.interior_iter() { black_box(cube); } }) }); group.finish(); } criterion_group!(benches, space_bulk_mutation, grid_bench); criterion_main!(benches);
use criterion::{ black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput, }; use all_is_cubes::content::make_some_blocks; use all_is_cubes::space::{Grid, Space, SpaceTransaction}; use all_is_cubes::transac
function( BenchmarkId::new("fill_uniform() entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { space.fill_uniform(space.grid(), &block).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("fill_uniform() part of space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(bigger_grid), |mut space| { space.fill_uniform(grid, &block).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("set() entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { for x in 0..mutation_size { for y in 0..mutation_size { for z in 0..mutation_size { space.set([x, y, z], &block).unwrap(); } } } }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("transaction entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { let mut txn = SpaceTransaction::default(); for x in 0..mutation_size { for y in 0..mutation_size { for z in 0..mutation_size { txn.set([x, y, z], None, Some(block.clone())).unwrap(); } } } txn.execute(&mut space).unwrap(); }, BatchSize::SmallInput, ) }, ); } group.finish(); } pub fn grid_bench(c: &mut Criterion) { let mut group = c.benchmark_group("Grid"); let grid = Grid::new([0, 0, 0], [256, 256, 256]); group.throughput(Throughput::Elements(grid.volume() as u64)); group.bench_function("Grid::interior_iter", |b| { b.iter(|| { for cube in grid.interior_iter() { black_box(cube); } }) }); group.finish(); } criterion_group!(benches, space_bulk_mutation, grid_bench); criterion_main!(benches);
tion::Transaction; pub fn space_bulk_mutation(c: &mut Criterion) { let mut group = c.benchmark_group("space-bulk-mutation"); for &mutation_size in &[1, 4, 64] { let grid = Grid::new([0, 0, 0], [mutation_size, mutation_size, mutation_size]); let bigger_grid = grid.multiply(2); let size_description = format!("{}×{}×{}", mutation_size, mutation_size, mutation_size); let mutation_volume = grid.volume(); group.throughput(Throughput::Elements(mutation_volume as u64)); group.bench_function( BenchmarkId::new("fill() entire space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(grid), |mut space| { space.fill(space.grid(), |_| Some(&block)).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_function( BenchmarkId::new("fill() part of space", &size_description), |b| { let [block] = make_some_blocks(); b.iter_batched( || Space::empty(bigger_grid), |mut space| { space.fill(grid, |_| Some(&block)).unwrap(); }, BatchSize::SmallInput, ) }, ); group.bench_
random
[]
Rust
Katalon_OrangeHRMS/Object Repository/Header_Menu/Performance/a_Manage Reviews.rs
girishbhangale416/Katalon_Docker
a3f8ada90afac9e0b71454366ca93ff8a09dffe4
<?xml version="1.0" encoding="UTF-8"?> <WebElementEntity> <description></description> <name>a_Manage Reviews</name> <tag></tag> <elementGuidId>a9d86c40-dd16-4349-9cf7-4fb26ece97a5</elementGuidId> <selectorCollection> <entry> <key>XPATH</key> <value> </entry> </selectorCollection> <selectorMethod>XPATH</selectorMethod> <useRalativeImagePath>false</useRalativeImagePath> <webElementProperties> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>tag</name> <type>Main</type> <value>a</value> </webElementProperties> <webElementProperties> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>href</name> <type>Main</type> <value>#</value> </webElementProperties> <webElementProperties> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>id</name> <type>Main</type> <value>menu_performance_ManageReviews</value> </webElementProperties> <webElementProperties> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>class</name> <type>Main</type> <value>arrow</value> </webElementProperties> <webElementProperties> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>text</name> <type>Main</type> <value>Manage Reviews</value> </webElementProperties> <webElementProperties> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath</name> <type>Main</type> <value>id(&quot;menu_performance_ManageReviews&quot;)</value> </webElementProperties> <webElementXpaths> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>xpath:attributes</name> <type>Main</type> <value> </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:idRelative</name> <type>Main</type> <value> </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:link</name> <type>Main</type> <value> </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:href</name> <type>Main</type> <value>( </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:position</name> <type>Main</type> <value> </webElementXpaths> </WebElementEntity>
<?xml version="1.0" encoding="UTF-8"?> <WebElementEntity> <description></description> <name>a_Manage Reviews</name> <tag></tag> <elementGuidId>a9d86c40-dd16-4349-9cf7-4fb26ece97a5</elementGuidId> <selectorCollection> <entry> <key>XPATH</key> <value> </entry> </selectorCollection> <selectorMethod>XPATH</selectorMethod> <useRalativeImagePath>false</useRalativeImagePath> <webElementProperties> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name
ondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:href</name> <type>Main</type> <value>( </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:position</name> <type>Main</type> <value> </webElementXpaths> </WebElementEntity>
>tag</name> <type>Main</type> <value>a</value> </webElementProperties> <webElementProperties> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>href</name> <type>Main</type> <value>#</value> </webElementProperties> <webElementProperties> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>id</name> <type>Main</type> <value>menu_performance_ManageReviews</value> </webElementProperties> <webElementProperties> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>class</name> <type>Main</type> <value>arrow</value> </webElementProperties> <webElementProperties> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>text</name> <type>Main</type> <value>Manage Reviews</value> </webElementProperties> <webElementProperties> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath</name> <type>Main</type> <value>id(&quot;menu_performance_ManageReviews&quot;)</value> </webElementProperties> <webElementXpaths> <isSelected>true</isSelected> <matchCondition>equals</matchCondition> <name>xpath:attributes</name> <type>Main</type> <value> </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:idRelative</name> <type>Main</type> <value> </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:link</name> <type>Main</type> <value> </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchCondition>equals</matchCondition> <name>xpath:neighbor</name> <type>Main</type> <value>(. </webElementXpaths> <webElementXpaths> <isSelected>false</isSelected> <matchC
random
[ { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input_Middle Name_middleName</name>\n\n <tag></tag>\n\n <elementGuidId>a02d298f-e2ee-48f3-8efe-dc1f9953b202</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='middleName']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Middle Name_middleName.rs", "rank": 0, "score": 38858.325952679785 }, { "content": " <value>text</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>middleName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n\n <type>Main</type>\n\n <value>middleName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Middle Name_middleName.rs", "rank": 1, "score": 38853.7690179005 }, { "content": " <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='Middle Name'])[1]/following::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[1]/following::input[2]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[2]/preceding::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:position</name>\n\n <type>Main</type>\n\n <value>//li/ol/li[2]/input</value>\n\n </webElementXpaths>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Middle Name_middleName.rs", "rank": 2, "score": 38853.56468517923 }, { "content": " </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>class</name>\n\n <type>Main</type>\n\n <value>formInputText</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>maxlength</name>\n\n <type>Main</type>\n\n <value>30</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>type</name>\n\n <type>Main</type>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Middle Name_middleName.rs", "rank": 3, "score": 38853.45916674349 }, { "content": " <type>Main</type>\n\n <value>id(&quot;middleName&quot;)</value>\n\n </webElementProperties>\n\n <webElementXpaths>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:attributes</name>\n\n <type>Main</type>\n\n <value>//input[@id='middleName']</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:idRelative</name>\n\n <type>Main</type>\n\n <value>//form[@id='frmAddEmp']/fieldset/ol/li/ol/li[2]/input</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Middle Name_middleName.rs", "rank": 4, "score": 38853.44576597184 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Emp_name</name>\n\n <tag></tag>\n\n <elementGuidId>c6af5fda-f905-444d-a80f-dd7f5cde3ae6</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;assignleave_txtEmployee_empName&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/Emp_name.rs", "rank": 5, "score": 31729.233799402602 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Emp_name</name>\n\n <tag></tag>\n\n <elementGuidId>cf8095a7-a99c-4fb8-96e9-de5f4a0eb8f1</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;entitlements_employee_empName&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Leave/Emp_name.rs", "rank": 6, "score": 31729.233799402602 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Type_name</name>\n\n <tag></tag>\n\n <elementGuidId>28cdd846-32ab-4e9c-a1e8-be9e53278295</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;leaveType_txtLeaveTypeName&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/leave_type/Type_name.rs", "rank": 7, "score": 31729.15093447045 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Employee_Name</name>\n\n <tag></tag>\n\n <elementGuidId>09ee38af-c435-4220-8d73-0765e1832931</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//h1[text()='MFLoIg rnWwuA']</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value>//h1[text()='$(Name)']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>BASIC</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Employee_info/Employee_Name.rs", "rank": 8, "score": 31729.069547877563 }, { "content": " <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>h1</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>text</name>\n\n <type>Main</type>\n\n <value>$(Name)</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n\n <type>Main</type>\n\n <value>//h1[text()='$(Name)']</value>\n\n </webElementProperties>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Employee_info/Employee_Name.rs", "rank": 9, "score": 31723.789574102393 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__user_name</name>\n\n <tag></tag>\n\n <elementGuidId>5c5c3e72-0729-45a5-af57-983e29fcce18</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='user_name']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_name.rs", "rank": 10, "score": 30956.143075074247 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__firstName</name>\n\n <tag></tag>\n\n <elementGuidId>5324c381-b9ca-428f-bc9e-124a56a36503</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='firstName']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__firstName.rs", "rank": 11, "score": 30956.143075074247 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__lastName</name>\n\n <tag></tag>\n\n <elementGuidId>2a717e54-91af-4b81-8dd0-1a3fcb868536</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='lastName']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__lastName.rs", "rank": 12, "score": 30956.101502999067 }, { "content": " <value>text</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>firstName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n\n <type>Main</type>\n\n <value>firstName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__firstName.rs", "rank": 13, "score": 30951.53141065677 }, { "content": " <value>text</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>lastName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n\n <type>Main</type>\n\n <value>lastName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__lastName.rs", "rank": 14, "score": 30951.53141065677 }, { "content": " <value>text</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>user_name</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n\n <type>Main</type>\n\n <value>user_name</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_name.rs", "rank": 15, "score": 30951.53141065677 }, { "content": " <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[1]/following::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='Middle Name'])[1]/preceding::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:position</name>\n\n <type>Main</type>\n\n <value>//li/input</value>\n\n </webElementXpaths>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__firstName.rs", "rank": 16, "score": 30951.34223004046 }, { "content": " <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[3]/following::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[4]/preceding::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:position</name>\n\n <type>Main</type>\n\n <value>//li[5]/input</value>\n\n </webElementXpaths>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_name.rs", "rank": 17, "score": 30951.297417695885 }, { "content": " <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[2]/following::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='Employee Id'])[1]/preceding::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='Photograph'])[1]/preceding::input[2]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:position</name>\n\n <type>Main</type>\n\n <value>//li/ol/li[3]/input</value>\n\n </webElementXpaths>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__lastName.rs", "rank": 18, "score": 30951.266557438328 }, { "content": " <type>Main</type>\n\n <value>id(&quot;user_name&quot;)</value>\n\n </webElementProperties>\n\n <webElementXpaths>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:attributes</name>\n\n <type>Main</type>\n\n <value>//input[@id='user_name']</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:idRelative</name>\n\n <type>Main</type>\n\n <value>//form[@id='frmAddEmp']/fieldset/ol/li[5]/input</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_name.rs", "rank": 19, "score": 30951.230166006546 }, { "content": " </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>class</name>\n\n <type>Main</type>\n\n <value>formInputText</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>maxlength</name>\n\n <type>Main</type>\n\n <value>30</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>type</name>\n\n <type>Main</type>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__lastName.rs", "rank": 20, "score": 30951.221559499758 }, { "content": " </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>class</name>\n\n <type>Main</type>\n\n <value>formInputText</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>maxlength</name>\n\n <type>Main</type>\n\n <value>30</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>type</name>\n\n <type>Main</type>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__firstName.rs", "rank": 21, "score": 30951.221559499758 }, { "content": " </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>class</name>\n\n <type>Main</type>\n\n <value>formInputText</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>maxlength</name>\n\n <type>Main</type>\n\n <value>40</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>type</name>\n\n <type>Main</type>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_name.rs", "rank": 22, "score": 30951.221559499758 }, { "content": " <type>Main</type>\n\n <value>id(&quot;firstName&quot;)</value>\n\n </webElementProperties>\n\n <webElementXpaths>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:attributes</name>\n\n <type>Main</type>\n\n <value>//input[@id='firstName']</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:idRelative</name>\n\n <type>Main</type>\n\n <value>//form[@id='frmAddEmp']/fieldset/ol/li/ol/li/input</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__firstName.rs", "rank": 23, "score": 30951.208158728106 }, { "content": " <type>Main</type>\n\n <value>id(&quot;lastName&quot;)</value>\n\n </webElementProperties>\n\n <webElementXpaths>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:attributes</name>\n\n <type>Main</type>\n\n <value>//input[@id='lastName']</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:idRelative</name>\n\n <type>Main</type>\n\n <value>//form[@id='frmAddEmp']/fieldset/ol/li/ol/li[3]/input</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__lastName.rs", "rank": 24, "score": 30951.208158728106 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__leave_balanceemployeeempName</name>\n\n <tag></tag>\n\n <elementGuidId>67e36d1e-972b-48d6-a681-0bed1144a3b7</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='leave_balance_employee_empName']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__leave_balanceemployeeempName.rs", "rank": 25, "score": 28849.427698062504 }, { "content": " <value>leave_balance_employee_empName</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>class</name>\n\n <type>Main</type>\n\n <value>ac_input</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>autocomplete</name>\n\n <type>Main</type>\n\n <value>off</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__leave_balanceemployeeempName.rs", "rank": 26, "score": 28844.878737032952 }, { "content": " <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='*'])[2]/following::input[1]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='Leave Type'])[2]/preceding::input[2]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:neighbor</name>\n\n <type>Main</type>\n\n <value>(.//*[normalize-space(text()) and normalize-space(.)='From'])[1]/preceding::input[2]</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:position</name>\n\n <type>Main</type>\n\n <value>//li[2]/input</value>\n\n </webElementXpaths>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__leave_balanceemployeeempName.rs", "rank": 27, "score": 28844.740449683246 }, { "content": " </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>type</name>\n\n <type>Main</type>\n\n <value>text</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>leave_balance[employee][empName]</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n\n <type>Main</type>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__leave_balanceemployeeempName.rs", "rank": 28, "score": 28844.71394474996 }, { "content": " <type>Main</type>\n\n <value>id(&quot;leave_balance_employee_empName&quot;)</value>\n\n </webElementProperties>\n\n <webElementXpaths>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:attributes</name>\n\n <type>Main</type>\n\n <value>//input[@id='leave_balance_employee_empName']</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath:idRelative</name>\n\n <type>Main</type>\n\n <value>//form[@id='frmLeaveBalanceReport']/fieldset/ol/li[2]/input</value>\n\n </webElementXpaths>\n\n <webElementXpaths>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__leave_balanceemployeeempName.rs", "rank": 29, "score": 28844.602521677076 }, { "content": "\t\t def intdate = findTestData(‘Give your Data File Path’).getValue(‘Day’, 1)\n\n\t\t */\n\n\n", "file_path": "Katalon_OrangeHRMS/Keywords/util/utility.groovy", "rank": 30, "score": 26880.37789442255 }, { "content": "\t\t def intmonth = findTestData(‘Give your Data File Path’).getValue(‘Month’, 1)\n", "file_path": "Katalon_OrangeHRMS/Keywords/util/utility.groovy", "rank": 31, "score": 26880.37789442255 }, { "content": "\tdef List<WebElement> getHtmlTableRows(TestObject table, String outerTagName) {\n\n\t\tWebElement mailList = WebUiBuiltInKeywords.findWebElement(table)\n\n\t\tList<WebElement> selectedRows = mailList.findElements(By.xpath(\"./\" + outerTagName + \"/tr\"))\n\n\t\treturn selectedRows\n\n\t}\n\n\n\n\n\n\n\n\t@Keyword\n", "file_path": "Katalon_OrangeHRMS/Keywords/util/utility.groovy", "rank": 32, "score": 24887.00736687802 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Assign_btn</name>\n\n <tag></tag>\n\n <elementGuidId>5d1b3101-b795-4c07-895d-eaab59d88988</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;assignBtn&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>CSS</key>\n\n <value>#assignBtn</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>CSS</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/Assign_btn.rs", "rank": 33, "score": 10.342630196901423 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>welcom_link</name>\n\n <tag></tag>\n\n <elementGuidId>5f5e3a77-4684-4e49-bc88-c73d4816ed53</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id=&quot;welcome&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/welcom_link.rs", "rank": 34, "score": 10.329354411708504 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>comment</name>\n\n <tag></tag>\n\n <elementGuidId>d1fae1ce-8278-4138-b5c8-180c10c02fbd</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//textarea[@id=&quot;assignleave_txtComment&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/comment.rs", "rank": 35, "score": 10.284099947524586 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>entitlements_no</name>\n\n <tag></tag>\n\n <elementGuidId>e8679a28-6141-4dc3-b446-e94b777f0790</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;entitlements_entitlement&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Leave/entitlements_no.rs", "rank": 36, "score": 10.284099947524586 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>save_btn</name>\n\n <tag></tag>\n\n <elementGuidId>45d1ecfd-9afe-452a-9255-62be5f78bdbe</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;btnCancel&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Leave/save_btn.rs", "rank": 37, "score": 10.284099947524586 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>from_date</name>\n\n <tag></tag>\n\n <elementGuidId>2085023e-c89b-49f2-a98d-b7f7b43f6204</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;assignleave_txtFromDate&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/from_date.rs", "rank": 38, "score": 10.239293778090039 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Add_Entitlement</name>\n\n <tag></tag>\n\n <elementGuidId>c59bfbbc-195c-4357-9018-99fddea8e6ea</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id=&quot;menu_leave_addLeaveEntitlement&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/Add_Entitlement/Add_Entitlement.rs", "rank": 39, "score": 10.239293778090039 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Add_type</name>\n\n <tag></tag>\n\n <elementGuidId>c05a3460-f95f-4484-ba41-cb8d111ac32a</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;btnAdd&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/leave_type/Add_type.rs", "rank": 40, "score": 10.239293778090039 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Leave_period</name>\n\n <tag></tag>\n\n <elementGuidId>4c023f9a-b8e0-4d48-9b0c-b6cebf11b0f7</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@id=&quot;period&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Leave/Leave_period.rs", "rank": 41, "score": 10.239293778090039 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Save_button</name>\n\n <tag></tag>\n\n <elementGuidId>2cba999d-5670-4cc6-affa-e949fa969e9d</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;saveButton&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/leave_type/Save_button.rs", "rank": 42, "score": 10.239293778090039 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>multiperiod_balance</name>\n\n <tag></tag>\n\n <elementGuidId>d300e9db-99fa-4d4c-b13b-ebcdaa85f74b</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@href=&quot;#multiperiod_balance&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/multiperiod_balance.rs", "rank": 43, "score": 10.239293778090039 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>popup_cancel_btn</name>\n\n <tag></tag>\n\n <elementGuidId>1facbe86-b767-4098-859e-c18d329064ce</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;confirmCancelButton&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/popup_cancel_btn.rs", "rank": 44, "score": 10.194928446395663 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>leave_type_dropdown</name>\n\n <tag></tag>\n\n <elementGuidId>b53d1caa-238b-4ad7-8899-a745e93d0a1b</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@id=&quot;entitlements_leave_type&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Leave/leave_type_dropdown.rs", "rank": 45, "score": 10.194928446395663 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>cancel_btn</name>\n\n <tag></tag>\n\n <elementGuidId>ecd3a02b-17c1-4aad-a89f-5d24bcf8480b</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;btnSave&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Leave/cancel_btn.rs", "rank": 46, "score": 10.194928446395663 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>datepicker-month</name>\n\n <tag></tag>\n\n <elementGuidId>b2b32614-ff9c-4e8c-81c2-01ad18e0db1d</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@class=&quot;ui-datepicker-month&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/datepicker-month.rs", "rank": 47, "score": 10.194928446395663 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>leave_type</name>\n\n <tag></tag>\n\n <elementGuidId>a4b3604b-9aab-4829-918f-36c18887348b</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id=&quot;menu_leave_leaveTypeList&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/leave_type.rs", "rank": 48, "score": 10.194928446395663 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>popup_ok_btn</name>\n\n <tag></tag>\n\n <elementGuidId>b43e4567-494a-4a2a-8981-8d13a67ae1e7</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;confirmOkButton&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/popup_ok_btn.rs", "rank": 49, "score": 10.19492844639566 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>datepicker-year</name>\n\n <tag></tag>\n\n <elementGuidId>930c77c8-8b23-46f5-9da0-e42f236ea3c6</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@class=&quot;ui-datepicker-year&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/datepicker-year.rs", "rank": 50, "score": 10.194928446395663 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Type_dropdown</name>\n\n <tag></tag>\n\n <elementGuidId>649f90e8-1165-44fd-8bed-c2fcb0d7b944</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@id=&quot;assignleave_txtLeaveType&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/Type_dropdown.rs", "rank": 51, "score": 10.150996675335056 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>assignleave_partialDays_dropdown</name>\n\n <tag></tag>\n\n <elementGuidId>039b6a45-55b2-4218-a422-21b6330a1ed4</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@id=&quot;assignleave_partialDays&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/assignleave_partialDays_dropdown.rs", "rank": 52, "score": 10.107491361983477 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>Logout_btn</name>\n\n <tag></tag>\n\n <elementGuidId>449dc0db-1daa-40c6-8d3b-df8404373959</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@href=&quot;/symfony/web/index.php/auth/logout&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Logout_btn.rs", "rank": 53, "score": 10.064405572101592 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>leaveType_excludeIfNoEntitlement_CHECKBOX</name>\n\n <tag></tag>\n\n <elementGuidId>c90e4eef-99b8-4cd7-9cde-5b02c99f8c80</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;leaveType_excludeIfNoEntitlement&quot;]</value>\n\n </entry>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/leave_type/leaveType_excludeIfNoEntitlement_CHECKBOX.rs", "rank": 54, "score": 9.979465637730199 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description>Employee does not have sufficient leave balance for leave request.</description>\n\n <name>alert_message</name>\n\n <tag></tag>\n\n <elementGuidId>156d5fa7-a7b5-40a7-ad7b-966ee93cd198</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n <value></value>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//*[@id=&quot;leaveBalanceConfirm&quot;]/div[2]/p[1]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/alert_message.rs", "rank": 55, "score": 9.855037935676435 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>to_date</name>\n\n <tag></tag>\n\n <elementGuidId>786af4dc-053c-41f2-8cd4-c3283ed723ba</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;assignleave_txtToDate&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Assigne_leave/to_date.rs", "rank": 56, "score": 9.812872362935511 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Configuration</name>\n\n <tag></tag>\n\n <elementGuidId>0892967c-910c-4a72-bda2-3b036fd83934</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_pim_Configuration']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/PIM/a_Configuration.rs", "rank": 57, "score": 9.694497121533216 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Qualifications</name>\n\n <tag></tag>\n\n <elementGuidId>e2189428-3873-4080-99ef-221cf6965477</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_Qualifications']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/a_Qualifications.rs", "rank": 58, "score": 9.694497121533214 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Performance</name>\n\n <tag></tag>\n\n <elementGuidId>585547ad-6314-42af-a4c8-0e49b7fc220b</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu__Performance']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Performance/a_Performance.rs", "rank": 59, "score": 9.694497121533214 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Configure</name>\n\n <tag></tag>\n\n <elementGuidId>5b2df28c-673a-49bb-88b7-bf9725a75f95</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_performance_Configure']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Performance/a_Configure.rs", "rank": 60, "score": 9.650014035705993 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Configuration</name>\n\n <tag></tag>\n\n <elementGuidId>343f1f34-1e48-497d-92ef-2b28056e0291</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_Configuration']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/a_Configuration.rs", "rank": 61, "score": 9.650014035705993 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Organization</name>\n\n <tag></tag>\n\n <elementGuidId>dcad033a-168c-4aed-94bb-a878733ca229</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_Organization']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/a_Organization.rs", "rank": 62, "score": 9.650014035705993 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Reports</name>\n\n <tag></tag>\n\n <elementGuidId>46516ed1-f234-4695-b2b1-b36ee4f29f39</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_leave_Reports']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/a_Reports.rs", "rank": 63, "score": 9.650014035705993 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Dashboard</name>\n\n <tag></tag>\n\n <elementGuidId>4f9db3ca-a74c-4780-a91a-3afae9df15e8</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_dashboard_index']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/a_Dashboard.rs", "rank": 64, "score": 9.650014035705993 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Job</name>\n\n <tag></tag>\n\n <elementGuidId>e5a26f6e-19ee-4e87-ae7b-df09c2a3eaf0</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_Job']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/a_Job.rs", "rank": 65, "score": 9.60597931781062 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Assign Leave</name>\n\n <tag></tag>\n\n <elementGuidId>63e41533-713a-497f-9ce3-1537e7e78bf4</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_leave_assignLeave']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/a_Assign Leave.rs", "rank": 66, "score": 9.60597931781062 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Nationalities</name>\n\n <tag></tag>\n\n <elementGuidId>d03abc06-c09d-4f03-8fae-a4e3fba45e4a</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_nationality']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/a_Nationalities.rs", "rank": 67, "score": 9.60597931781062 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Configure</name>\n\n <tag></tag>\n\n <elementGuidId>57c40b7e-ca26-41e3-ace9-a419dbd79414</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_leave_Configure']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/a_Configure.rs", "rank": 68, "score": 9.60597931781062 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Time</name>\n\n <tag></tag>\n\n <elementGuidId>9833603c-c3bc-4190-9d25-7fd8ee8f44e5</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_time_viewTimeModule']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/a_Time.rs", "rank": 69, "score": 9.60597931781062 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Recruitment</name>\n\n <tag></tag>\n\n <elementGuidId>b50cafe6-94b9-4959-926e-38c2fba692a3</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_recruitment_viewRecruitmentModule']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/a_Recruitment.rs", "rank": 70, "score": 9.60597931781062 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>select_EnabledDisabled</name>\n\n <tag></tag>\n\n <elementGuidId>bfc4a4d0-e93a-4284-a588-8d7cb33f85a1</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@id='status']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>select</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/select_EnabledDisabled.rs", "rank": 72, "score": 9.5623856234158 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Entitlements</name>\n\n <tag></tag>\n\n <elementGuidId>16aa5e6c-f6af-4216-a134-edbec2edc36c</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id=&quot;menu_leave_Entitlements&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/Add_Entitlement/a_Entitlements.rs", "rank": 73, "score": 9.5623856234158 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_PIM</name>\n\n <tag></tag>\n\n <elementGuidId>c9707721-c768-4177-a5d6-63883a4e6bd3</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_pim_viewPimModule']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/PIM/a_PIM.rs", "rank": 74, "score": 9.5623856234158 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Maintenance</name>\n\n <tag></tag>\n\n <elementGuidId>2fec605e-e5f8-48a6-a69f-c89630e39abf</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_maintenance_purgeEmployee']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/a_Maintenance.rs", "rank": 75, "score": 9.5623856234158 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Directory</name>\n\n <tag></tag>\n\n <elementGuidId>68fe4e16-35c8-47c8-89d6-621f1db8a9e1</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_directory_viewDirectory']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/a_Directory.rs", "rank": 76, "score": 9.5623856234158 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Add Employee</name>\n\n <tag></tag>\n\n <elementGuidId>dc2d3809-8d6e-4b6b-8150-4e659b8fba37</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_pim_addEmployee']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/PIM/a_Add Employee.rs", "rank": 77, "score": 9.5623856234158 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description>//li[@class=&quot;ac_even ac_over&quot;]</description>\n\n <name>auto_complete</name>\n\n <tag></tag>\n\n <elementGuidId>e9f35120-da38-4645-92a9-b236b4202baf</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>BASIC</key>\n\n </entry>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//li[@class=&quot;ac_even ac_over&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n</WebElementEntity>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/auto_complete.rs", "rank": 78, "score": 9.536529019278984 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__view</name>\n\n <tag></tag>\n\n <elementGuidId>cc310d2d-c144-4cfd-ad65-3912501020cc</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='viewBtn']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__view.rs", "rank": 79, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Leave</name>\n\n <tag></tag>\n\n <elementGuidId>14d95b4c-e381-48fe-b710-b629ff7941c8</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_leave_viewLeaveModule']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/a_Leave.rs", "rank": 80, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Reports</name>\n\n <tag></tag>\n\n <elementGuidId>c93988cb-439c-4e25-ac9b-23f7eb6b7c9c</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_core_viewDefinedPredefinedReports']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/PIM/a_Reports.rs", "rank": 81, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_User Management</name>\n\n <tag></tag>\n\n <elementGuidId>a92ba944-c447-46ef-ac8d-6a96825656fb</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_UserManagement']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/a_User Management.rs", "rank": 82, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Employee List</name>\n\n <tag></tag>\n\n <elementGuidId>7e429bde-b543-4129-a19f-de8c882f2aa4</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_pim_viewEmployeeList']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/PIM/a_Employee List.rs", "rank": 83, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__re_password</name>\n\n <tag></tag>\n\n <elementGuidId>20e37104-be34-4343-8b4f-d9bbba23525c</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='re_password']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__re_password.rs", "rank": 84, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__user_password</name>\n\n <tag></tag>\n\n <elementGuidId>20dada8a-a2cd-4e2a-9917-f084e6e20f29</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='user_password']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_password.rs", "rank": 85, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Leave List</name>\n\n <tag></tag>\n\n <elementGuidId>388095e5-f0dd-444e-b4a3-0b72505f3cbf</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_leave_viewLeaveList']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/a_Leave List.rs", "rank": 86, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>b_Admin</name>\n\n <tag></tag>\n\n <elementGuidId>bda7aaae-f7e9-42c9-82f7-ca0eb14607bd</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_admin_viewAdminModule']/b</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>b</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Admin_menu/b_Admin.rs", "rank": 87, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input_Password_Submit</name>\n\n <tag></tag>\n\n <elementGuidId>3a6675a6-1ff9-415d-bbfa-f5c599c697fc</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='btnLogin']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/input_Password_Submit.rs", "rank": 88, "score": 9.519225778466854 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input_Username_txtPassword</name>\n\n <tag></tag>\n\n <elementGuidId>69bb1ecc-b2e4-482d-a920-fab87aea156d</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='txtPassword']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/input_Username_txtPassword.rs", "rank": 89, "score": 9.476492774136418 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input__btnSave</name>\n\n <tag></tag>\n\n <elementGuidId>97d1065f-1aad-47e2-89f4-101934d0190f</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='btnSave']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__btnSave.rs", "rank": 90, "score": 9.476492774136418 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input_Create Login Details_chkLogin</name>\n\n <tag></tag>\n\n <elementGuidId>6b212eec-795d-421c-9c46-f17abd269468</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='chkLogin']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Create Login Details_chkLogin.rs", "rank": 91, "score": 9.43417976186689 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Leave Entitlements and Usage Report</name>\n\n <tag></tag>\n\n <elementGuidId>104cc178-8675-4123-9c47-b546abb5a69d</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_leave_viewLeaveBalanceReport']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Leave/a_Leave Entitlements and Usage Report.rs", "rank": 92, "score": 9.43417976186689 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input_LOGIN Panel_txtUsername</name>\n\n <tag></tag>\n\n <elementGuidId>2bcd05ec-0d51-489a-8a9a-571307d1f146</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id='txtUsername']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/input_LOGIN Panel_txtUsername.rs", "rank": 93, "score": 9.43417976186689 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>a_Employee Trackers</name>\n\n <tag></tag>\n\n <elementGuidId>68f70c7c-f6ee-40bc-aba4-08e662deb91f</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//a[@id='menu_performance_viewEmployeePerformanceTrackerList']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>a</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Header_Menu/Performance/a_Employee Trackers.rs", "rank": 94, "score": 9.392280048596268 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>select_-- Select --Leave TypeEmployee</name>\n\n <tag></tag>\n\n <elementGuidId>acd494ad-974f-4d71-8241-54d5f97519ad</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//select[@id='leave_balance_report_type']</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>select</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/select_-- Select --Leave TypeEmployee.rs", "rank": 95, "score": 9.392280048596268 }, { "content": "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n\n<WebElementEntity>\n\n <description></description>\n\n <name>input_Employee Id_employeeId</name>\n\n <tag></tag>\n\n <elementGuidId>e0dcbe28-e574-418c-a3a5-f4e64d591002</elementGuidId>\n\n <selectorCollection>\n\n <entry>\n\n <key>XPATH</key>\n\n <value>//input[@id=&quot;employeeId&quot;]</value>\n\n </entry>\n\n </selectorCollection>\n\n <selectorMethod>XPATH</selectorMethod>\n\n <useRalativeImagePath>false</useRalativeImagePath>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>tag</name>\n\n <type>Main</type>\n\n <value>input</value>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Employee Id_employeeId.rs", "rank": 96, "score": 9.350787092159226 }, { "content": " <value>text</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>employeeId</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>value</name>\n\n <type>Main</type>\n\n <value>0034</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input_Employee Id_employeeId.rs", "rank": 97, "score": 5.123023708870547 }, { "content": " <value>viewBtn</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>value</name>\n\n <type>Main</type>\n\n <value>View</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>class</name>\n\n <type>Main</type>\n\n <value>hover</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>false</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>xpath</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Leave Entitlements and Usage Report/input__view.rs", "rank": 98, "score": 5.060873584742146 }, { "content": " <value>off</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>type</name>\n\n <type>Main</type>\n\n <value>password</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>name</name>\n\n <type>Main</type>\n\n <value>user_password</value>\n\n </webElementProperties>\n\n <webElementProperties>\n\n <isSelected>true</isSelected>\n\n <matchCondition>equals</matchCondition>\n\n <name>id</name>\n", "file_path": "Katalon_OrangeHRMS/Object Repository/Add_Employee/input__user_password.rs", "rank": 99, "score": 5.058918886957337 } ]
Rust
unsafe_collection/src/bytes/uninit.rs
HFQR/xitca-web
ee2d4fa9e88b2be149c9ca3454bc14d90f8f9ca2
use std::{io::IoSlice, mem::MaybeUninit}; use bytes_crate::{buf::Chain, Buf, Bytes}; use crate::uninit; use super::buf_list::{BufList, EitherBuf}; mod sealed { pub trait Sealed {} } pub trait ChunkVectoredUninit: sealed::Sealed { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize; } impl<T, U> sealed::Sealed for Chain<T, U> where T: sealed::Sealed, U: sealed::Sealed, { } impl<T, U> ChunkVectoredUninit for Chain<T, U> where T: ChunkVectoredUninit, U: ChunkVectoredUninit, { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { let mut n = self.first_ref().chunks_vectored_uninit(dst); n += self.last_ref().chunks_vectored_uninit(&mut dst[n..]); n } } impl<B, const LEN: usize> sealed::Sealed for BufList<B, LEN> where B: ChunkVectoredUninit {} impl<B: ChunkVectoredUninit, const LEN: usize> ChunkVectoredUninit for BufList<B, LEN> { #[inline] unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { assert!(!dst.is_empty()); let mut vecs = 0; for buf in self.bufs.iter() { vecs += buf.chunks_vectored_uninit(&mut dst[vecs..]); if vecs == dst.len() { break; } } vecs } } impl<B: ChunkVectoredUninit, const LEN: usize> BufList<B, LEN> { pub fn chunks_vectored_uninit_into_init<'a, 's>( &'a self, dst: &'s mut [MaybeUninit<IoSlice<'a>>], ) -> &'s mut [IoSlice<'a>] { unsafe { let len = self.chunks_vectored_uninit(dst); uninit::slice_assume_init_mut(&mut dst[..len]) } } } impl<L, R> sealed::Sealed for EitherBuf<L, R> where L: ChunkVectoredUninit, R: ChunkVectoredUninit, { } impl<L, R> ChunkVectoredUninit for EitherBuf<L, R> where L: ChunkVectoredUninit, R: ChunkVectoredUninit, { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { match *self { Self::Left(ref buf) => buf.chunks_vectored_uninit(dst), Self::Right(ref buf) => buf.chunks_vectored_uninit(dst), } } } impl sealed::Sealed for Bytes {} impl ChunkVectoredUninit for Bytes { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { if dst.is_empty() { return 0; } if self.has_remaining() { dst[0].write(IoSlice::new(self.chunk())); 1 } else { 0 } } } impl sealed::Sealed for &'_ [u8] {} impl ChunkVectoredUninit for &'_ [u8] { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { if dst.is_empty() { return 0; } if self.has_remaining() { dst[0].write(IoSlice::new(self)); 1 } else { 0 } } } #[cfg(test)] mod test { use super::*; use crate::uninit::uninit_array; #[test] fn either_buf() { let mut lst = BufList::<_, 2>::new(); let mut buf = uninit_array::<_, 4>(); lst.push(EitherBuf::Left(&b"left"[..])); lst.push(EitherBuf::Right(&b"right"[..])); let slice = lst.chunks_vectored_uninit_into_init(&mut buf); assert_eq!(slice.len(), 2); assert_eq!(slice[0].as_ref(), b"left"); assert_eq!(slice[1].as_ref(), b"right"); } #[test] fn either_chain() { let mut lst = BufList::<_, 3>::new(); let mut buf = uninit_array::<_, 5>(); lst.push(EitherBuf::Left((&b"1"[..]).chain(&b"2"[..]))); lst.push(EitherBuf::Right(&b"3"[..])); let slice = lst.chunks_vectored_uninit_into_init(&mut buf); assert_eq!(slice.len(), 3); assert_eq!(slice[0].as_ref(), b"1"); assert_eq!(slice[1].as_ref(), b"2"); assert_eq!(slice[2].as_ref(), b"3"); } }
use std::{io::IoSlice, mem::MaybeUninit}; use bytes_crate::{buf::Chain, Buf, Bytes}; use crate::uninit; use super::buf_list::{BufList, EitherBuf}; mod sealed { pub trait Sealed {} } pub trait ChunkVectoredUninit: sealed::Sealed { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize; } impl<T, U> sealed::Sealed for Chain<T, U> where T: sealed::Sealed, U: sealed::Sealed, { } impl<T, U> ChunkVectoredUninit for Chain<T, U> where T: ChunkVectoredUninit, U: ChunkVectoredUninit, { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { let mut n = self.first_ref().chunks_vectored_uninit(dst); n += self.last_ref().chunks_vectored_uninit(&mut dst[n..]); n } } impl<B, const LEN: usize> sealed::Sealed for BufList<B, LEN> where B: ChunkVectoredUninit {} impl<B: ChunkVectoredUninit, const LEN: usize> ChunkVectoredUninit for BufList<B, LEN> { #[inline] unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { assert!(!dst.is_empty()); let mut vecs = 0; for buf in self.bufs.iter() { vecs += buf.chunks_vectored_uninit(&mut dst[vecs..]); if vecs == dst.len() { break; } } vecs } } impl<B: ChunkVectoredUninit, const LEN: usize> BufList<B, LEN> { pub fn chunks_vectored_uninit_into_init<'a, 's>( &'a sel
} impl<L, R> sealed::Sealed for EitherBuf<L, R> where L: ChunkVectoredUninit, R: ChunkVectoredUninit, { } impl<L, R> ChunkVectoredUninit for EitherBuf<L, R> where L: ChunkVectoredUninit, R: ChunkVectoredUninit, { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { match *self { Self::Left(ref buf) => buf.chunks_vectored_uninit(dst), Self::Right(ref buf) => buf.chunks_vectored_uninit(dst), } } } impl sealed::Sealed for Bytes {} impl ChunkVectoredUninit for Bytes { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { if dst.is_empty() { return 0; } if self.has_remaining() { dst[0].write(IoSlice::new(self.chunk())); 1 } else { 0 } } } impl sealed::Sealed for &'_ [u8] {} impl ChunkVectoredUninit for &'_ [u8] { unsafe fn chunks_vectored_uninit<'a>(&'a self, dst: &mut [MaybeUninit<IoSlice<'a>>]) -> usize { if dst.is_empty() { return 0; } if self.has_remaining() { dst[0].write(IoSlice::new(self)); 1 } else { 0 } } } #[cfg(test)] mod test { use super::*; use crate::uninit::uninit_array; #[test] fn either_buf() { let mut lst = BufList::<_, 2>::new(); let mut buf = uninit_array::<_, 4>(); lst.push(EitherBuf::Left(&b"left"[..])); lst.push(EitherBuf::Right(&b"right"[..])); let slice = lst.chunks_vectored_uninit_into_init(&mut buf); assert_eq!(slice.len(), 2); assert_eq!(slice[0].as_ref(), b"left"); assert_eq!(slice[1].as_ref(), b"right"); } #[test] fn either_chain() { let mut lst = BufList::<_, 3>::new(); let mut buf = uninit_array::<_, 5>(); lst.push(EitherBuf::Left((&b"1"[..]).chain(&b"2"[..]))); lst.push(EitherBuf::Right(&b"3"[..])); let slice = lst.chunks_vectored_uninit_into_init(&mut buf); assert_eq!(slice.len(), 3); assert_eq!(slice[0].as_ref(), b"1"); assert_eq!(slice[1].as_ref(), b"2"); assert_eq!(slice[2].as_ref(), b"3"); } }
f, dst: &'s mut [MaybeUninit<IoSlice<'a>>], ) -> &'s mut [IoSlice<'a>] { unsafe { let len = self.chunks_vectored_uninit(dst); uninit::slice_assume_init_mut(&mut dst[..len]) } }
function_block-function_prefixed
[ { "content": " pub trait Sealed {}\n\n}\n\n\n\nimpl<T> sealed::Sealed for &mut [MaybeUninit<T>] {}\n\n\n", "file_path": "unsafe_collection/src/uninit.rs", "rank": 2, "score": 266279.3679360628 }, { "content": "/// Trait for safely initialize an unit slice.\n\npub trait PartialInit: sealed::Sealed + Sized {\n\n /// Uninitialized slice is coming from input slice.\n\n fn init_from<I>(self, slice: I) -> PartialInitWith<Self, I>\n\n where\n\n I: Iterator,\n\n {\n\n PartialInitWith {\n\n uninit: self,\n\n init: slice,\n\n }\n\n }\n\n}\n\n\n\n/// T must be `Copy` so the initializer don't worry about dropping the value.\n\nimpl<T: Copy> PartialInit for &mut [MaybeUninit<T>] {}\n\n\n\npub struct PartialInitWith<A, B> {\n\n uninit: A,\n\n init: B,\n\n}\n", "file_path": "unsafe_collection/src/uninit.rs", "rank": 3, "score": 256513.2497888153 }, { "content": " pub trait Sealed {}\n\n\n", "file_path": "postgres/src/row.rs", "rank": 4, "score": 229688.53588422967 }, { "content": "pub fn encode_bind<P, I>(stmt: &Statement, params: I, portal: &str, buf: &mut BytesMut) -> Result<(), Error>\n\nwhere\n\n P: BorrowToSql,\n\n I: IntoIterator<Item = P>,\n\n I::IntoIter: ExactSizeIterator,\n\n{\n\n let params = params.into_iter();\n\n\n\n assert_eq!(\n\n stmt.params().len(),\n\n params.len(),\n\n \"expected {} parameters but got {}\",\n\n stmt.params().len(),\n\n params.len()\n\n );\n\n\n\n let mut error_idx = 0;\n\n let r = frontend::bind(\n\n portal,\n\n stmt.name(),\n", "file_path": "postgres/src/query.rs", "rank": 5, "score": 223469.74846176177 }, { "content": "#[inline]\n\npub fn apply_mask(buf: &mut [u8], mask: [u8; 4]) {\n\n apply_mask_fast32(buf, mask)\n\n}\n\n\n\n/// A safe unoptimized mask application.\n", "file_path": "http-ws/src/mask.rs", "rank": 6, "score": 219888.0107261563 }, { "content": "#[inline]\n\npub fn apply_mask_fast32(buf: &mut [u8], mask: [u8; 4]) {\n\n let mask_u32 = u32::from_ne_bytes(mask);\n\n\n\n // SAFETY:\n\n //\n\n // https://github.com/snapview/tungstenite-rs/pull/126\n\n let (prefix, words, suffix) = unsafe { buf.align_to_mut::<u32>() };\n\n apply_mask_fallback(prefix, mask);\n\n let head = prefix.len() & 3;\n\n let mask_u32 = if head > 0 {\n\n if cfg!(target_endian = \"big\") {\n\n mask_u32.rotate_left(8 * head as u32)\n\n } else {\n\n mask_u32.rotate_right(8 * head as u32)\n\n }\n\n } else {\n\n mask_u32\n\n };\n\n for word in words.iter_mut() {\n\n *word ^= mask_u32;\n", "file_path": "http-ws/src/mask.rs", "rank": 7, "score": 216433.8772944194 }, { "content": "/// A trait implemented by types that can index into columns of a row.\n\n///\n\n/// This cannot be implemented outside of this crate.\n\npub trait RowIndex: Sealed {\n\n #[doc(hidden)]\n\n fn __idx<T>(&self, columns: &[T]) -> Option<usize>\n\n where\n\n T: AsName;\n\n}\n\n\n\nimpl Sealed for usize {}\n\n\n\nimpl RowIndex for usize {\n\n #[inline]\n\n fn __idx<T>(&self, columns: &[T]) -> Option<usize>\n\n where\n\n T: AsName,\n\n {\n\n if *self >= columns.len() {\n\n None\n\n } else {\n\n Some(*self)\n\n }\n", "file_path": "postgres/src/row.rs", "rank": 8, "score": 215604.5412594346 }, { "content": "pub trait Queueable {\n\n type Item;\n\n\n\n // capacity of Self for check bound\n\n fn capacity(&self) -> usize;\n\n\n\n /// # Safety\n\n /// caller must make sure given index is not out of bound and properly initialized.\n\n unsafe fn _get_unchecked(&self, idx: usize) -> &Self::Item;\n\n\n\n /// # Safety\n\n /// caller must make sure given index is not out of bound and properly initialized.\n\n unsafe fn _get_mut_unchecked(&mut self, idx: usize) -> &mut Self::Item;\n\n\n\n /// # Safety\n\n /// caller must make sure given index is not out of bound and properly initialized.\n\n unsafe fn _read_unchecked(&mut self, idx: usize) -> Self::Item;\n\n\n\n /// # Safety\n\n /// caller must make sure given index is not out of bound and properly initialized.\n\n unsafe fn _write_unchecked(&mut self, idx: usize, item: Self::Item);\n\n}\n\n\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 9, "score": 214493.5243701787 }, { "content": "/// A shortcut for generating a set of response types with given [Request] and `<Body>` type.\n\n///\n\n/// `<Body>` must be a type impl [futures_core::Stream] trait with `Result<T: AsRef<[u8]>, E>`\n\n/// as `Stream::Item` associated type.\n\n///\n\n/// # Examples:\n\n/// ```rust\n\n/// # use std::pin::Pin;\n\n/// # use std::task::{Context, Poll};\n\n/// # use http::{header, Request};\n\n/// # use futures_core::Stream;\n\n/// # #[derive(Default)]\n\n/// # struct DummyRequestBody;\n\n/// #\n\n/// # impl Stream for DummyRequestBody {\n\n/// # type Item = Result<Vec<u8>, ()>;\n\n/// # fn poll_next(self:Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n/// # Poll::Ready(Some(Ok(vec![1, 2, 3])))\n\n/// # }\n\n/// # }\n\n/// // an incoming http request.\n\n/// let mut req = Request::get(\"/\")\n\n/// .header(header::UPGRADE, header::HeaderValue::from_static(\"websocket\"))\n\n/// .header(header::CONNECTION, header::HeaderValue::from_static(\"upgrade\"))\n\n/// .header(header::SEC_WEBSOCKET_VERSION, header::HeaderValue::from_static(\"13\"))\n\n/// .header(header::SEC_WEBSOCKET_KEY, header::HeaderValue::from_static(\"some_key\"))\n\n/// .body(())\n\n/// .unwrap();\n\n///\n\n/// let (decoded_stream, http_response, encode_stream_sink) = http_ws::ws(&mut req, DummyRequestBody).unwrap();\n\n/// ```\n\npub fn ws<Req, B, T, E>(mut req: Req, body: B) -> Result<WsOutput<B>, HandshakeError>\n\nwhere\n\n Req: std::borrow::BorrowMut<Request<()>>,\n\n B: futures_core::Stream<Item = Result<T, E>>,\n\n T: AsRef<[u8]>,\n\n{\n\n let req = req.borrow_mut();\n\n\n\n let builder = handshake(req.method(), req.headers())?;\n\n\n\n let decode = DecodeStream::new(body);\n\n let (tx, encode) = decode.encode_stream();\n\n\n\n let res = builder\n\n .body(encode)\n\n .expect(\"handshake function failed to generate correct Response Builder\");\n\n\n\n Ok((decode, res, tx))\n\n}\n\n\n", "file_path": "http-ws/src/lib.rs", "rank": 10, "score": 204609.47869651136 }, { "content": " /// Helper trait for convert a [Request] to [Response].\n\n /// This is for re-use request's heap allocation and pass down the context data inside [Extensions]\n\n pub trait IntoResponse<B, ResB> {\n\n fn into_response(self, body: B) -> Response<ResB>;\n\n\n\n fn as_response(&mut self, body: B) -> Response<ResB>\n\n where\n\n Self: Default,\n\n {\n\n std::mem::take(self).into_response(body)\n\n }\n\n }\n\n\n\n impl<ReqB, B, ResB> IntoResponse<B, ResB> for super::request::Request<ReqB>\n\n where\n\n B: Into<ResB>,\n\n {\n\n fn into_response(self, body: B) -> Response<ResB> {\n\n let (\n\n request::Parts {\n\n mut headers,\n\n extensions,\n", "file_path": "http/src/lib.rs", "rank": 11, "score": 204034.1399814531 }, { "content": "/// An async array that act in mpsc manner. There can be multiple `Sender`s and one `Receiver`.\n\npub fn async_vec<T>(cap: usize) -> (Sender<T>, Receiver<T>) {\n\n assert!(cap > 0, \"async_vec must have a capacity larger than 0.\");\n\n\n\n let array = Rc::new(RefCell::new(AsyncVec::new(cap)));\n\n\n\n (Sender { inner: array.clone() }, Receiver { inner: array })\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Sender<T> {\n\n inner: Rc<RefCell<AsyncVec<T>>>,\n\n}\n\n\n\nimpl<T> Drop for Sender<T> {\n\n fn drop(&mut self) {\n\n // Last copy of Sender. wake up receiver.\n\n if Rc::strong_count(&self.inner) == 2 {\n\n let mut inner = self.inner.borrow_mut();\n\n inner.set_close();\n\n inner.wake_receiver();\n", "file_path": "unsafe_collection/src/channel/mpsc.rs", "rank": 12, "score": 201872.51028821064 }, { "content": "#[inline]\n\nfn encode_version_status_reason(buf: &mut BytesMut, version: Version, status: StatusCode) {\n\n // encode version, status code and reason\n\n match (version, status) {\n\n // happy path shortcut.\n\n (Version::HTTP_11, StatusCode::OK) => {\n\n buf.extend_from_slice(b\"HTTP/1.1 200 OK\\r\\n\");\n\n return;\n\n }\n\n (Version::HTTP_11, _) => {\n\n buf.extend_from_slice(b\"HTTP/1.1 \");\n\n }\n\n (Version::HTTP_10, _) => {\n\n buf.extend_from_slice(b\"HTTP/1.0 \");\n\n }\n\n _ => {\n\n debug!(target: \"h1_encode\", \"response with unexpected response version\");\n\n buf.extend_from_slice(b\"HTTP/1.1 \");\n\n }\n\n }\n\n\n", "file_path": "http/src/h1/proto/encode.rs", "rank": 13, "score": 195230.49353707366 }, { "content": "/// Trait for bound check different types of read/write buffer strategy.\n\npub trait BufBound {\n\n fn backpressure(&self) -> bool;\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n", "file_path": "http/src/h1/proto/buf.rs", "rank": 14, "score": 186678.09749895745 }, { "content": "/// Trait to generic over different types of write buffer strategy.\n\npub trait BufWrite: BufBound {\n\n fn buf_head<F, T, E>(&mut self, func: F) -> Result<T, E>\n\n where\n\n F: FnOnce(&mut BytesMut) -> Result<T, E>;\n\n\n\n fn buf_static(&mut self, bytes: &'static [u8]);\n\n\n\n fn buf_bytes(&mut self, bytes: Bytes);\n\n\n\n fn buf_chunked(&mut self, bytes: Bytes);\n\n\n\n fn try_write<Io: AsyncIo>(&mut self, io: &mut Io) -> io::Result<()>;\n\n}\n\n\n\npub struct FlatBuf<const BUF_LIMIT: usize>(BytesMut);\n\n\n\nimpl<const BUF_LIMIT: usize> FlatBuf<BUF_LIMIT> {\n\n #[inline]\n\n pub fn new() -> Self {\n\n Self(BytesMut::new())\n", "file_path": "http/src/h1/proto/buf.rs", "rank": 15, "score": 182135.02655167092 }, { "content": "/// Prepare a request with given Uri.\n\n/// After process the request would be ready to be sent to server for websocket connection.\n\npub fn client_request_from_uri<U, E>(uri: U) -> Result<Request<()>, E>\n\nwhere\n\n Uri: TryFrom<U, Error = E>,\n\n{\n\n let uri = uri.try_into()?;\n\n let mut req = Request::new(());\n\n *req.uri_mut() = uri;\n\n\n\n req.headers_mut()\n\n .insert(header::UPGRADE, HeaderValue::from_static(\"websocket\"));\n\n req.headers_mut()\n\n .insert(header::CONNECTION, HeaderValue::from_static(\"upgrade\"));\n\n req.headers_mut()\n\n .insert(header::SEC_WEBSOCKET_VERSION, HeaderValue::from_static(\"13\"));\n\n\n\n let sec_key = rand::random::<[u8; 16]>();\n\n let key = base64::encode(&sec_key);\n\n\n\n req.headers_mut()\n\n .insert(header::SEC_WEBSOCKET_KEY, HeaderValue::try_from(key.as_str()).unwrap());\n\n\n\n Ok(req)\n\n}\n\n\n", "file_path": "http-ws/src/lib.rs", "rank": 16, "score": 174941.4345401544 }, { "content": "/// Trait for simulate `Fn<(&Self, Arg)> -> impl Future<Output = Result<T, E>> + '_`.\n\n/// The function call come from stateful type that can be referenced within returned opaque future.\n\npub trait Service<Req> {\n\n /// The Ok part of output future.\n\n type Response;\n\n\n\n /// The Err part of output future.\n\n type Error;\n\n\n\n /// The output future that can reference Self with GAT lifetime.\n\n type Future<'f>: Future<Output = Result<Self::Response, Self::Error>>\n\n where\n\n Self: 'f;\n\n\n\n fn call(&self, req: Req) -> Self::Future<'_>;\n\n}\n\n\n\nmacro_rules! impl_alloc {\n\n ($alloc: ident) => {\n\n impl<S, Req> Service<Req> for $alloc<S>\n\n where\n\n S: Service<Req> + ?Sized,\n", "file_path": "service/src/service/mod.rs", "rank": 17, "score": 174200.00361827045 }, { "content": "pub trait Select: Sized {\n\n fn select<Fut>(self, other: Fut) -> SelectFuture<Self, Fut>;\n\n}\n\n\n\nimpl<F> Select for F\n\nwhere\n\n F: Future,\n\n{\n\n #[inline]\n\n fn select<Fut>(self, other: Fut) -> SelectFuture<Self, Fut> {\n\n SelectFuture {\n\n fut1: self,\n\n fut2: other,\n\n }\n\n }\n\n}\n\n\n\npub struct SelectFuture<Fut1, Fut2> {\n\n fut1: Fut1,\n\n fut2: Fut2,\n", "file_path": "unsafe_collection/src/futures.rs", "rank": 18, "score": 173960.05224442325 }, { "content": "/// Creates a bounded spsc channel with the given capacity.\n\n///\n\n/// Returns the sender and receiver.\n\n///\n\n/// # Panics\n\n///\n\n/// Panics if the capacity is zero.\n\n///\n\npub fn channel<T>(cap: usize) -> (Sender<T>, Receiver<T>) {\n\n assert!(cap > 0, \"capacity must be non-zero\");\n\n\n\n let inner = Arc::new(Inner {\n\n head: CachePadded::new(AtomicUsize::new(0)),\n\n tail: CachePadded::new(AtomicUsize::new(0)),\n\n buffer: ManuallyDrop::new(Vec::with_capacity(cap)).as_mut_ptr(),\n\n cap,\n\n waker: AtomicWaker::default(),\n\n _marker: PhantomData,\n\n });\n\n\n\n let tx = Sender {\n\n inner: inner.clone(),\n\n head: 0,\n\n tail: 0,\n\n };\n\n\n\n let rx = Receiver {\n\n inner,\n", "file_path": "unsafe_collection/src/channel/spsc.rs", "rank": 19, "score": 171495.53473838905 }, { "content": "/// trait for Borrow &mut T from &mut Self.\n\n/// used for foreign types that can be impl with [BorrowMut] trait.\n\npub trait BorrowReqMut<T> {\n\n fn borrow_mut(&mut self) -> &mut T;\n\n}\n\n\n\nimpl<B> BorrowReq<http::Uri> for http::Request<B> {\n\n fn borrow(&self) -> &http::Uri {\n\n self.uri()\n\n }\n\n}\n\n\n\nimpl<B> BorrowReq<http::Method> for http::Request<B> {\n\n fn borrow(&self) -> &http::Method {\n\n self.method()\n\n }\n\n}\n\n\n\nimpl<B> BorrowReqMut<http::Extensions> for http::Request<B> {\n\n fn borrow_mut(&mut self) -> &mut http::Extensions {\n\n self.extensions_mut()\n\n }\n", "file_path": "http/src/request.rs", "rank": 20, "score": 171207.145788097 }, { "content": "#[inline]\n\nfn apply_mask_fallback(buf: &mut [u8], mask: [u8; 4]) {\n\n for (i, byte) in buf.iter_mut().enumerate() {\n\n *byte ^= mask[i & 3];\n\n }\n\n}\n\n\n\n/// Faster version of `apply_mask()` which operates on 4-byte blocks.\n", "file_path": "http-ws/src/mask.rs", "rank": 21, "score": 169032.25214311146 }, { "content": "/// Same as `std::ops::Fn` trait but for async output.\n\n///\n\n/// It is necessary in the the HRTB bounds for async fn's with reference paramters because it\n\n/// allows the output future to be bound to the paramter lifetime.\n\n/// `F: for<'a> AsyncFn<(&'a u8,) Output=u8>`\n\npub trait AsyncFn<Arg> {\n\n type Output;\n\n type Future: Future<Output = Self::Output>;\n\n\n\n fn call(&self, arg: Arg) -> Self::Future;\n\n}\n\n\n\nmacro_rules! async_fn_impl {\n\n ($($arg: ident),*) => {\n\n impl<Func, Fut, $($arg,)*> AsyncFn<($($arg,)*)> for Func\n\n where\n\n Func: Fn($($arg),*) -> Fut,\n\n Fut: Future,\n\n {\n\n type Output = Fut::Output;\n\n type Future = Fut;\n\n\n\n #[inline]\n\n fn call(&self, ($($arg,)*): ($($arg,)*)) -> Self::Future {\n\n self($($arg,)*)\n", "file_path": "http/src/util/service/handler.rs", "rank": 22, "score": 167845.03236081486 }, { "content": "/// Extend trait for [Service].\n\n///\n\n/// Can be used to cehck the ready state of a service before calling it.\n\n///\n\n/// # Examples:\n\n/// ```rust\n\n/// #![feature(generic_associated_types, type_alias_impl_trait)]\n\n/// # use std::{cell::Cell, rc::Rc, future::Future};\n\n/// # use xitca_service::{Service, ready::ReadyService};\n\n///\n\n/// // a service with conditional availability based on state of Permit.\n\n/// struct Foo(Permit);\n\n///\n\n/// // a permit reset the inner boolean to true on drop.\n\n/// #[derive(Clone)]\n\n/// struct Permit(Rc<Cell<bool>>);\n\n///\n\n/// impl Drop for Permit {\n\n/// fn drop(&mut self) {\n\n/// self.0.set(true);\n\n/// }\n\n/// }\n\n///\n\n/// impl Service<()> for Foo {\n\n/// type Response = ();\n\n/// type Error = ();\n\n/// type Future<'f> = impl Future<Output = Result<Self::Response, Self::Error>>;\n\n///\n\n/// fn call(&self, _req: ()) -> Self::Future<'_> {\n\n/// async { Ok(()) }\n\n/// }\n\n/// }\n\n///\n\n/// impl ReadyService<()> for Foo {\n\n/// type Ready = Result<Permit, Self::Error>;\n\n/// type ReadyFuture<'f> = impl Future<Output = Self::Ready>;\n\n///\n\n/// fn ready(&self) -> Self::ReadyFuture<'_> {\n\n/// async move {\n\n/// if self.0.0.get() {\n\n/// // set permit to false and return with Ok<Permit>\n\n/// self.0.0.set(false);\n\n/// Ok(self.0.clone())\n\n/// } else {\n\n/// // return error is to simply the example.\n\n/// // In real world this branch should be an async waiting for Permit reset to true.\n\n/// Err(())\n\n/// } \n\n/// }\n\n/// }\n\n/// }\n\n///\n\n/// async fn workflow(service: &Foo) {\n\n/// let permit = service.ready().await.unwrap(); // check service ready state.\n\n///\n\n/// service.call(()).await.unwrap(); // run Service::call when permit is held in scope.\n\n///\n\n/// drop(permit); // drop permit after Service::call is finished.\n\n/// }\n\n///\n\n/// async fn throttle(service: &Foo) {\n\n/// let permit = service.ready().await.unwrap();\n\n/// assert!(service.ready().await.is_err()); // service is throttled because permit is still held in scope.\n\n/// }\n\n/// ```\n\npub trait ReadyService<Req>: Service<Req> {\n\n type Ready;\n\n\n\n type ReadyFuture<'f>: Future<Output = Self::Ready>\n\n where\n\n Self: 'f;\n\n\n\n fn ready(&self) -> Self::ReadyFuture<'_>;\n\n}\n\n\n\nmacro_rules! impl_alloc {\n\n ($alloc: ident) => {\n\n impl<S, Req> ReadyService<Req> for $alloc<S>\n\n where\n\n S: ReadyService<Req> + ?Sized,\n\n {\n\n type Ready = S::Ready;\n\n type ReadyFuture<'f> = S::ReadyFuture<'f> where S: 'f;\n\n\n\n #[inline]\n", "file_path": "service/src/ready/mod.rs", "rank": 23, "score": 157770.0825979722 }, { "content": "struct BufferedIo<'a, St, W, const READ_BUF_LIMIT: usize, const WRITE_BUF_LIMIT: usize> {\n\n io: &'a mut St,\n\n read_buf: FlatBuf<READ_BUF_LIMIT>,\n\n write_buf: W,\n\n}\n\n\n\nimpl<'a, St, W, const READ_BUF_LIMIT: usize, const WRITE_BUF_LIMIT: usize>\n\n BufferedIo<'a, St, W, READ_BUF_LIMIT, WRITE_BUF_LIMIT>\n\nwhere\n\n St: AsyncIo,\n\n W: BufWrite,\n\n{\n\n fn new(io: &'a mut St, write_buf: W) -> Self {\n\n Self {\n\n io,\n\n read_buf: FlatBuf::new(),\n\n write_buf,\n\n }\n\n }\n\n\n", "file_path": "http/src/h1/proto/dispatcher.rs", "rank": 24, "score": 155102.58470834547 }, { "content": "type EncodedBuf<B, B2> = EitherBuf<B, EitherBuf<B2, &'static [u8]>>;\n\n\n\n// buf list is forced to go in backpressure when it reaches this length.\n\n// 32 is chosen for max of 16 pipelined http requests with a single body item.\n\nconst BUF_LIST_CNT: usize = 32;\n\n\n\nimpl<const BUF_LIMIT: usize> BufBound for ListBuf<EncodedBuf<Bytes, Eof>, BUF_LIMIT> {\n\n #[inline]\n\n fn backpressure(&self) -> bool {\n\n self.list.remaining() >= BUF_LIMIT || self.list.is_full()\n\n }\n\n\n\n #[inline]\n\n fn is_empty(&self) -> bool {\n\n self.list.remaining() == 0\n\n }\n\n}\n\n\n\nimpl<const BUF_LIMIT: usize> BufWrite for ListBuf<EncodedBuf<Bytes, Eof>, BUF_LIMIT> {\n\n fn buf_head<F, T, E>(&mut self, func: F) -> Result<T, E>\n", "file_path": "http/src/h1/proto/buf.rs", "rank": 25, "score": 153427.4356909254 }, { "content": "/// A specialized http/1 server on top of [test_server]\n\npub fn test_h1_server<F, I, B, E>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> I + Send + Sync + 'static,\n\n I: BuildService + 'static,\n\n I::Service: ReadyService<Request<h1::RequestBody>, Response = HResponse<B>> + 'static,\n\n <I::Service as Service<Request<h1::RequestBody>>>::Error: fmt::Debug,\n\n I::Error: error::Error + 'static,\n\n B: Stream<Item = Result<Bytes, E>> + 'static,\n\n E: fmt::Debug + 'static,\n\n{\n\n test_server::<_, _, TcpStream>(move || {\n\n let f = factory();\n\n HttpServiceBuilder::h1(f)\n\n })\n\n}\n\n\n", "file_path": "test/src/lib.rs", "rank": 26, "score": 144405.3151469435 }, { "content": "/// A specialized http/3 server\n\npub fn test_h3_server<F, I, B, E>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> I + Send + Sync + 'static,\n\n I: BuildService + 'static,\n\n I::Service: ReadyService<Request<h3::RequestBody>, Response = HResponse<B>> + 'static,\n\n <I::Service as Service<Request<h3::RequestBody>>>::Error: fmt::Debug,\n\n I::Error: error::Error + 'static,\n\n B: Stream<Item = Result<Bytes, E>> + 'static,\n\n E: fmt::Debug + 'static,\n\n{\n\n let addr = std::net::UdpSocket::bind(\"127.0.0.1:0\")?.local_addr()?;\n\n\n\n let key = fs::read(\"../examples/cert/key.pem\")?;\n\n let cert = fs::read(\"../examples/cert/cert.pem\")?;\n\n\n\n let key = rustls_pemfile::pkcs8_private_keys(&mut &*key)?.remove(0);\n\n let key = rustls::PrivateKey(key);\n\n\n\n let cert = rustls_pemfile::certs(&mut &*cert)?\n\n .into_iter()\n", "file_path": "test/src/lib.rs", "rank": 27, "score": 144405.3151469435 }, { "content": "/// A specialized http/2 server on top of [test_server]\n\npub fn test_h2_server<F, I, B, E>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> I + Send + Sync + 'static,\n\n I: BuildService + 'static,\n\n I::Service: ReadyService<Request<h2::RequestBody>, Response = HResponse<B>> + 'static,\n\n <I::Service as Service<Request<h2::RequestBody>>>::Error: fmt::Debug,\n\n I::Error: error::Error + 'static,\n\n B: Stream<Item = Result<Bytes, E>> + 'static,\n\n E: fmt::Debug + 'static,\n\n{\n\n test_server::<_, _, TcpStream>(move || {\n\n let f = factory();\n\n let config = HttpServiceConfig::new()\n\n .first_request_timeout(Duration::from_millis(500))\n\n .tls_accept_timeout(Duration::from_millis(500))\n\n .keep_alive_timeout(Duration::from_millis(500));\n\n HttpServiceBuilder::h2(f).config(config)\n\n })\n\n}\n\n\n", "file_path": "test/src/lib.rs", "rank": 28, "score": 144405.3151469435 }, { "content": "/// A helper trait for get a protocol from certain types.\n\npub trait AsVersion {\n\n fn as_version(&self) -> Version;\n\n\n\n fn from_alpn<B: AsRef<[u8]>>(proto: B) -> Version {\n\n if proto.as_ref().windows(2).any(|window| window == b\"h2\") {\n\n Version::HTTP_2\n\n } else {\n\n Version::HTTP_11\n\n }\n\n }\n\n}\n\n\n\nimpl AsVersion for xitca_io::net::Stream {\n\n #[inline]\n\n fn as_version(&self) -> Version {\n\n match *self {\n\n Self::Tcp(ref tcp) => tcp.as_version(),\n\n #[cfg(unix)]\n\n Self::Unix(..) => Version::HTTP_11,\n\n #[cfg(feature = \"http3\")]\n", "file_path": "http/src/version.rs", "rank": 29, "score": 142712.10618497373 }, { "content": "/// Trait for multiplex connection.\n\n/// HTTP2 and HTTP3 connections are supposed to be multiplexed on single TCP connection.\n\npub trait Multiplex {\n\n /// Get a ownership from mut reference.\n\n ///\n\n /// # Panics:\n\n /// When called on connection type that are not multiplexable.\n\n fn multiplex(&mut self) -> Self;\n\n\n\n /// Return true for connection that can be multiplexed.\n\n fn is_multiplexable(&self) -> bool;\n\n}\n\n\n\nimpl Multiplex for Connection {\n\n fn multiplex(&mut self) -> Self {\n\n match *self {\n\n #[cfg(feature = \"http2\")]\n\n Self::H2(ref conn) => Self::H2(conn.clone()),\n\n _ => unreachable!(\"Connection is not multiplexable\"),\n\n }\n\n }\n\n\n\n fn is_multiplexable(&self) -> bool {\n\n match *self {\n\n #[cfg(feature = \"http2\")]\n\n Self::H2(_) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "client/src/connection.rs", "rank": 30, "score": 142711.8949627222 }, { "content": "pub trait Address {\n\n /// Get hostname part.\n\n fn hostname(&self) -> &str;\n\n\n\n /// Get optional port part.\n\n fn port(&self) -> Option<u16> {\n\n None\n\n }\n\n}\n\n\n\nimpl Address for Uri<'_> {\n\n fn hostname(&self) -> &str {\n\n self.host().unwrap_or(\"\")\n\n }\n\n\n\n fn port(&self) -> Option<u16> {\n\n match self.port_u16() {\n\n Some(port) => Some(port),\n\n None => scheme_to_port(self.scheme_str()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/src/connect.rs", "rank": 31, "score": 142706.97313241812 }, { "content": " pub trait AsName {\n\n fn as_name(&self) -> &str;\n\n }\n\n}\n\n\n\nimpl AsName for Column {\n\n fn as_name(&self) -> &str {\n\n self.name()\n\n }\n\n}\n\n\n\nimpl AsName for String {\n\n fn as_name(&self) -> &str {\n\n self\n\n }\n\n}\n\n\n", "file_path": "postgres/src/row.rs", "rank": 32, "score": 142706.97313241812 }, { "content": "/// Trait for getting current date/time.\n\n///\n\n/// This is usally used by a low resolution of timer to reduce frequent syscall to OS.\n\npub trait DateTime {\n\n /// The size hint of slice by Self::date method.\n\n const DATE_VALUE_LENGTH: usize;\n\n\n\n /// closure would receive byte slice representation of [HttpDate].\n\n fn with_date<F, O>(&self, f: F) -> O\n\n where\n\n F: FnOnce(&[u8]) -> O;\n\n\n\n fn now(&self) -> Instant;\n\n}\n\n\n\n/// Struct with Date update periodically at 500 milli seconds interval.\n\npub(crate) struct DateTimeService {\n\n state: Rc<RefCell<DateTimeState>>,\n\n handle: JoinHandle<()>,\n\n}\n\n\n\nimpl Drop for DateTimeService {\n\n fn drop(&mut self) {\n", "file_path": "http/src/date.rs", "rank": 33, "score": 140208.0925257476 }, { "content": " pub trait TaskFut\n\n where\n\n Self: Future,\n\n Self::Output: Send,\n\n {\n\n }\n\n\n\n #[cfg(not(feature = \"single-thread\"))]\n\n impl<F> TaskFut for F\n\n where\n\n F: Future + Send + 'static,\n\n F::Output: Send,\n\n {\n\n }\n\n\n\n #[cfg(feature = \"single-thread\")]\n\n impl<F> TaskFut for F\n\n where\n\n F: Future + 'static,\n\n F::Output: Send,\n\n {\n\n }\n\n}\n\n\n", "file_path": "postgres/src/lib.rs", "rank": 34, "score": 140198.6848679531 }, { "content": "/// An object constructor represents a one of possibly many ways to create a trait object from `I`.\n\n///\n\n/// A [Service] type, for example, may be type-erased into `Box<dyn Service<&'static str>>`,\n\n/// `Box<dyn for<'a> Service<&'a str>>`, `Box<dyn Service<&'static str> + Service<u8>>`, etc.\n\n/// Each would be a separate impl for [ObjectConstructor].\n\npub trait ObjectConstructor<I> {\n\n /// The type-erased form of `I`.\n\n type Object;\n\n\n\n /// Constructs `Self::Object` from `I`.\n\n fn into_object(inner: I) -> Self::Object;\n\n}\n\n\n\n/// The most trivial [ObjectConstructor] for [ServiceFactory] types.\n\n///\n\n/// Its main limitation is that the trait object is not polymorphic over `Req`.\n\n/// So if the original service type is `impl for<'r> Service<&'r str>`,\n\n/// the resulting object type would only be `impl Service<&'r str>`\n\n/// for some specific lifetime `'r`.\n\npub struct DefaultObjectConstructor<Req, Arg>(PhantomData<(Req, Arg)>);\n\n\n\n/// [ServiceFactory] object created by the [DefaultObjectConstructor]\n\npub type DefaultFactoryObject<Req, Arg, BErr, Res, Err> =\n\n impl BuildService<Arg, Service = DefaultServiceObject<Req, Res, Err>, Error = BErr>;\n\n\n", "file_path": "service/src/object.rs", "rank": 35, "score": 135884.6527485097 }, { "content": "/// trait for Borrow &T from &Self.\n\n/// used for foreign types that can be impl with [Borrow] trait.\n\npub trait BorrowReq<T> {\n\n fn borrow(&self) -> &T;\n\n}\n\n\n", "file_path": "http/src/request.rs", "rank": 36, "score": 133528.66538932832 }, { "content": " /// Object-safe counterpart of [Service].\n\n pub trait ServiceObject<Req> {\n\n type Response;\n\n type Error;\n\n\n\n fn call<'s, 'f>(&'s self, req: Req) -> BoxFuture<'f, Self::Response, Self::Error>\n\n where\n\n Req: 'f,\n\n 's: 'f;\n\n }\n\n\n\n /// Converts between object-safe non-object-safe Service and ServiceFactory. See impls.\n\n pub struct Wrapper<I>(pub I);\n\n\n\n impl<Inner, Req> Service<Req> for Wrapper<Box<Inner>>\n\n where\n\n Inner: ServiceObject<Req> + ?Sized,\n\n {\n\n type Response = Inner::Response;\n\n type Error = Inner::Error;\n\n type Future<'f> = impl Future<Output = Result<Self::Response, Self::Error>> where Self: 'f;\n", "file_path": "service/src/object.rs", "rank": 37, "score": 133513.48075642932 }, { "content": "/// Make Response with reference of Req.\n\n/// The Output type is what returns from [handler_service] function.\n\npub trait Responder<Req> {\n\n type Output;\n\n type Future: Future<Output = Self::Output>;\n\n\n\n fn respond_to(self, req: Req) -> Self::Future;\n\n}\n\n\n\nimpl<R, T, E> Responder<R> for Result<T, E>\n\nwhere\n\n T: Responder<R>,\n\n{\n\n type Output = Result<T::Output, E>;\n\n type Future = impl Future<Output = Self::Output>;\n\n\n\n #[inline]\n\n fn respond_to(self, req: R) -> Self::Future {\n\n async { Ok(self?.respond_to(req).await) }\n\n }\n\n}\n\n\n", "file_path": "http/src/util/service/handler.rs", "rank": 38, "score": 131275.94187634304 }, { "content": "pub trait AsyncClosure<Args> {\n\n type Output;\n\n type Future: Future<Output = Self::Output>;\n\n\n\n fn call(&self, arg: Args) -> Self::Future;\n\n}\n\n\n\nimpl<F, Arg1, Arg2, Fut> AsyncClosure<(Arg1, Arg2)> for F\n\nwhere\n\n F: Fn(Arg1, Arg2) -> Fut,\n\n Fut: Future,\n\n{\n\n type Output = Fut::Output;\n\n type Future = impl Future<Output = Self::Output>;\n\n\n\n fn call(&self, (arg1, arg2): (Arg1, Arg2)) -> Self::Future {\n\n (self)(arg1, arg2)\n\n }\n\n}\n", "file_path": "service/src/async_closure.rs", "rank": 39, "score": 131275.94187634304 }, { "content": "/// Trait for custom resolver.\n\n///\n\n/// # Examples\n\n/// ```rust\n\n/// use std::net::SocketAddr;\n\n///\n\n/// use xitca_client::{error::Error, ClientBuilder, Resolve};\n\n///\n\n/// struct MyResolver;\n\n///\n\n/// #[async_trait::async_trait]\n\n/// impl Resolve for MyResolver {\n\n/// async fn resolve(&self, hostname: &str, port: u16) -> Result<Vec<SocketAddr>, Error> {\n\n/// // Your DNS resolve logic goes here.\n\n/// todo!()\n\n/// }\n\n/// }\n\n///\n\n/// # fn resolve() {\n\n/// let client = ClientBuilder::new().resolver(MyResolver).finish();\n\n/// # }\n\n/// ```\n\npub trait Resolve: Send + Sync {\n\n /// *. hostname does not include port number.\n\n fn resolve<'s, 'h, 'f>(&'s self, hostname: &'h str, port: u16) -> BoxFuture<'f, Result<Vec<SocketAddr>, Error>>\n\n where\n\n 's: 'f,\n\n 'h: 'f;\n\n}\n", "file_path": "client/src/resolver.rs", "rank": 40, "score": 129838.18564635096 }, { "content": "mod buf_list;\n\nmod uninit;\n\n\n\npub use buf_list::{BufList, EitherBuf};\n", "file_path": "unsafe_collection/src/bytes/mod.rs", "rank": 41, "score": 129362.70503411713 }, { "content": "/// Helper trait to cast a type that impl [`BuildService`](xitca_service::BuildService)\n\n/// to a trait object that is `Send` and `Sync`.\n\npub trait BuildServiceSync<Req>\n\nwhere\n\n Req: From<Stream>,\n\n Self: Send + Sync + 'static,\n\n{\n\n type BuildService: BuildService<Service = Self::Service>;\n\n type Service: ReadyService<Req>;\n\n\n\n fn build(&self) -> Self::BuildService;\n\n}\n\n\n\nimpl<F, T, Req> BuildServiceSync<Req> for F\n\nwhere\n\n F: Fn() -> T + Send + Sync + 'static,\n\n T: BuildService,\n\n T::Service: ReadyService<Req>,\n\n Req: From<Stream>,\n\n{\n\n type BuildService = T;\n\n type Service = T::Service;\n\n\n\n fn build(&self) -> T {\n\n self()\n\n }\n\n}\n", "file_path": "server/src/server/service.rs", "rank": 42, "score": 129163.30704850683 }, { "content": "use std::io::IoSlice;\n\n\n\nuse bytes_crate::{Buf, BufMut, Bytes, BytesMut};\n\n\n\nuse crate::bound_queue::stack::StackQueue;\n\n\n\n/// A bounded stack buffer array that can hold up to LEN size items.\n\n/// BufList implement [Buf] trait when it's item is a type implement the same trait.\n\npub struct BufList<B, const LEN: usize = 8> {\n\n pub(super) bufs: StackQueue<B, LEN>,\n\n remaining: usize,\n\n}\n\n\n\nimpl<B: Buf> Default for BufList<B> {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl<B: Buf, const LEN: usize> BufList<B, LEN> {\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 43, "score": 125908.47001376808 }, { "content": " #[inline]\n\n fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize {\n\n assert!(!dst.is_empty());\n\n let mut vecs = 0;\n\n for buf in self.bufs.iter() {\n\n vecs += buf.chunks_vectored(&mut dst[vecs..]);\n\n if vecs == dst.len() {\n\n break;\n\n }\n\n }\n\n vecs\n\n }\n\n\n\n #[inline]\n\n fn advance(&mut self, mut cnt: usize) {\n\n assert!(self.remaining >= cnt);\n\n\n\n self.remaining -= cnt;\n\n\n\n // SAFETY:\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 44, "score": 125900.0459311481 }, { "content": " match *self {\n\n Self::Left(ref buf) => buf.chunks_vectored(dst),\n\n Self::Right(ref buf) => buf.chunks_vectored(dst),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn advance(&mut self, cnt: usize) {\n\n match *self {\n\n Self::Left(ref mut buf) => buf.advance(cnt),\n\n Self::Right(ref mut buf) => buf.advance(cnt),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::ptr;\n\n\n\n use super::*;\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 45, "score": 125896.53841417946 }, { "content": " self.bufs.is_full()\n\n }\n\n\n\n #[inline]\n\n pub const fn is_empty(&self) -> bool {\n\n self.bufs.is_empty()\n\n }\n\n}\n\n\n\nimpl<B: Buf, const LEN: usize> Buf for BufList<B, LEN> {\n\n #[inline]\n\n fn remaining(&self) -> usize {\n\n self.remaining\n\n }\n\n\n\n #[inline]\n\n fn chunk(&self) -> &[u8] {\n\n self.bufs.front().map(Buf::chunk).unwrap_or_default()\n\n }\n\n\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 46, "score": 125890.5487885095 }, { "content": " R: Buf,\n\n{\n\n #[inline]\n\n fn remaining(&self) -> usize {\n\n match *self {\n\n Self::Left(ref buf) => buf.remaining(),\n\n Self::Right(ref buf) => buf.remaining(),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn chunk(&self) -> &[u8] {\n\n match *self {\n\n Self::Left(ref buf) => buf.chunk(),\n\n Self::Right(ref buf) => buf.chunk(),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn chunks_vectored<'a>(&'a self, dst: &mut [IoSlice<'a>]) -> usize {\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 47, "score": 125888.8702271448 }, { "content": " #[inline]\n\n pub const fn new() -> Self {\n\n Self {\n\n bufs: StackQueue::new(),\n\n remaining: 0,\n\n }\n\n }\n\n\n\n /// # Panic:\n\n ///\n\n /// push new item when the list is already full.\n\n #[inline]\n\n pub fn push(&mut self, buf: B) {\n\n debug_assert!(buf.has_remaining());\n\n self.remaining += buf.remaining();\n\n self.bufs.push_back(buf).expect(\"BufList overflown\");\n\n }\n\n\n\n #[inline]\n\n pub fn is_full(&self) -> bool {\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 48, "score": 125888.16029555784 }, { "content": " let mut bm = BytesMut::with_capacity(len);\n\n bm.put(self.take(len));\n\n bm.freeze()\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An enum implement [Buf] trait when both arms are types implement the same trait.\n\n///\n\n/// *. Enum would implement [super::uninit::ChunkVectoredUninit] trait when both arms are types\n\n/// implement the same trait.\n\npub enum EitherBuf<L, R> {\n\n Left(L),\n\n Right(R),\n\n}\n\n\n\nimpl<L, R> Buf for EitherBuf<L, R>\n\nwhere\n\n L: Buf,\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 49, "score": 125887.23857645442 }, { "content": " }\n\n }\n\n\n\n #[inline]\n\n fn copy_to_bytes(&mut self, len: usize) -> Bytes {\n\n // Our inner buffer may have an optimized version of copy_to_bytes, and if the whole\n\n // request can be fulfilled by the front buffer, we can take advantage.\n\n match self.bufs.front_mut() {\n\n Some(front) if front.remaining() == len => {\n\n let b = front.copy_to_bytes(len);\n\n self.remaining -= len;\n\n self.bufs.pop_front();\n\n b\n\n }\n\n Some(front) if front.remaining() > len => {\n\n self.remaining -= len;\n\n front.copy_to_bytes(len)\n\n }\n\n _ => {\n\n assert!(len <= self.remaining(), \"`len` greater than remaining\");\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 50, "score": 125887.15330871263 }, { "content": " }\n\n\n\n #[test]\n\n fn one_long_buf_to_bytes() {\n\n let mut buf = BufList::default();\n\n buf.push(b\"Hello World\" as &[_]);\n\n assert_eq!(buf.copy_to_bytes(5), \"Hello\");\n\n assert_eq!(buf.chunk(), b\" World\");\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"`len` greater than remaining\")]\n\n fn buf_to_bytes_too_many() {\n\n hello_world_buf().copy_to_bytes(42);\n\n }\n\n}\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 51, "score": 125878.66701366116 }, { "content": "\n\n fn hello_world_buf() -> BufList<Bytes> {\n\n let mut lst = BufList::default();\n\n\n\n lst.push(Bytes::from(\"Hello\"));\n\n lst.push(Bytes::from(\" \"));\n\n lst.push(Bytes::from(\"World\"));\n\n\n\n lst\n\n }\n\n\n\n #[test]\n\n fn to_bytes_shorter() {\n\n let mut bufs = hello_world_buf();\n\n let old_ptr = bufs.chunk().as_ptr();\n\n let start = bufs.copy_to_bytes(4);\n\n assert_eq!(start, \"Hell\");\n\n assert!(ptr::eq(old_ptr, start.as_ptr()));\n\n assert_eq!(bufs.chunk(), b\"o\");\n\n assert!(ptr::eq(old_ptr.wrapping_add(4), bufs.chunk().as_ptr()));\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 52, "score": 125875.1417133717 }, { "content": " assert_eq!(bufs.remaining(), 7);\n\n }\n\n\n\n #[test]\n\n fn to_bytes_eq() {\n\n let mut bufs = hello_world_buf();\n\n let old_ptr = bufs.chunk().as_ptr();\n\n let start = bufs.copy_to_bytes(5);\n\n assert_eq!(start, \"Hello\");\n\n assert!(ptr::eq(old_ptr, start.as_ptr()));\n\n assert_eq!(bufs.chunk(), b\" \");\n\n assert_eq!(bufs.remaining(), 6);\n\n }\n\n\n\n #[test]\n\n fn to_bytes_longer() {\n\n let mut bufs = hello_world_buf();\n\n let start = bufs.copy_to_bytes(7);\n\n assert_eq!(start, \"Hello W\");\n\n assert_eq!(bufs.remaining(), 4);\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 53, "score": 125874.99542624986 }, { "content": " //\n\n // cnt is always in range of self.remaining so there is at least one\n\n // item in the buf list.\n\n unsafe {\n\n while cnt > 0 {\n\n {\n\n let front = self.bufs.front_mut().unwrap_unchecked();\n\n\n\n let rem = front.remaining();\n\n if rem > cnt {\n\n front.advance(cnt);\n\n return;\n\n } else {\n\n front.advance(rem);\n\n cnt -= rem;\n\n }\n\n }\n\n\n\n self.bufs.pop_front().unwrap_unchecked();\n\n }\n", "file_path": "unsafe_collection/src/bytes/buf_list.rs", "rank": 54, "score": 125872.40996040015 }, { "content": "/// Trait for custom tls connector.\n\n///\n\n/// # Examples\n\n/// ```rust\n\n/// use xitca_client::{error::Error, http::Version, ClientBuilder, Io, TlsConnect};\n\n///\n\n/// struct MyConnector;\n\n///\n\n/// #[async_trait::async_trait]\n\n/// impl TlsConnect for MyConnector {\n\n/// async fn connect(&self, io: Box<dyn Io>) -> Result<(Box<dyn Io>, Version), Error> {\n\n/// // tls handshake logic\n\n/// todo!()\n\n/// }\n\n/// }\n\n///\n\n/// # fn resolve() {\n\n/// let client = ClientBuilder::new().tls_connector(MyConnector).finish();\n\n/// # }\n\n/// ```\n\npub trait TlsConnect: Send + Sync {\n\n /// Box<dyn Io> is an async read/write type.\n\n ///\n\n /// See [Io] trait for detail.\n\n #[allow(clippy::type_complexity)]\n\n fn connect<'s, 'f>(&'s self, io: Box<dyn Io>) -> BoxFuture<'f, Result<(Box<dyn Io>, Version), Error>>\n\n where\n\n 's: 'f;\n\n}\n", "file_path": "client/src/tls/connector.rs", "rank": 55, "score": 125477.40894355299 }, { "content": "/// An async coding trait that consume self with every method call that can be used for either\n\n/// decode or encode.\n\n///\n\n/// This is useful when cross thread de/encode is desirable in the form of moving objects between\n\n/// threads.\n\npub trait AsyncCode<Item>: Sized {\n\n type Item;\n\n\n\n type Future: Future<Output = io::Result<(Self, Option<Self::Item>)>>;\n\n\n\n fn code(self, item: Item) -> Self::Future;\n\n\n\n fn code_eof(self) -> io::Result<Option<Self::Item>>;\n\n}\n\n\n\n/// Identity coder serve as a pass through coder that just forward items.\n\npub struct IdentityCoder;\n\n\n\nimpl<Item> AsyncCode<Item> for IdentityCoder\n\nwhere\n\n Bytes: From<Item>,\n\n{\n\n type Item = Bytes;\n\n type Future = impl Future<Output = io::Result<(Self, Option<Self::Item>)>>;\n\n\n", "file_path": "http-encoding/src/coder.rs", "rank": 56, "score": 125470.93401307586 }, { "content": "/// Extract type from Req and receive them with function passed to [handler_service].\n\n///\n\n/// `'a` is the lifetime of the extracted type.\n\n///\n\n/// When `Req` is also a borrowed type, the lifetimes of `Req` type and of the extracted type\n\n/// should be kept separate. See the example below.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// #![feature(generic_associated_types, type_alias_impl_trait)]\n\n/// # use std::future::Future;\n\n/// # use xitca_http::util::service::handler::FromRequest;\n\n/// struct MyExtractor<'a>(&'a str);\n\n///\n\n/// impl<'a, 'r> FromRequest<'a, &'r String> for MyExtractor<'a> {\n\n/// type Type<'b> = MyExtractor<'b>;\n\n/// type Error = ();\n\n/// type Future = impl Future<Output = Result<Self, Self::Error>> where 'r: 'a;\n\n/// fn from_request(req: &'a &'r String) -> Self::Future {\n\n/// async { Ok(MyExtractor(req)) }\n\n/// }\n\n/// }\n\n/// ```\n\npub trait FromRequest<'a, Req>: Sized {\n\n // Used to construct the type for any lifetime 'b.\n\n type Type<'b>: FromRequest<'b, Req, Error = Self::Error>;\n\n\n\n type Error;\n\n type Future: Future<Output = Result<Self, Self::Error>>\n\n where\n\n Req: 'a;\n\n\n\n fn from_request(req: &'a Req) -> Self::Future;\n\n}\n\n\n\nmacro_rules! from_req_impl {\n\n ($fut: ident; $($req: ident),*) => {\n\n impl<'a, Req, Err, $($req,)*> FromRequest<'a, Req> for ($($req,)*)\n\n where\n\n $(\n\n $req: FromRequest<'a, Req, Error = Err>,\n\n )*\n\n {\n", "file_path": "http/src/util/service/handler.rs", "rank": 57, "score": 122265.58094095386 }, { "content": "fn rustls_config(alpn_protocols: Vec<Vec<u8>>) -> io::Result<Arc<rustls::ServerConfig>> {\n\n let cert = fs::read(\"./cert/cert.pem\")?;\n\n let key = fs::read(\"./cert/key.pem\")?;\n\n\n\n let key = rustls_pemfile::pkcs8_private_keys(&mut &*key).unwrap().remove(0);\n\n let key = PrivateKey(key);\n\n\n\n let cert = rustls_pemfile::certs(&mut &*cert)\n\n .unwrap()\n\n .into_iter()\n\n .map(Certificate)\n\n .collect();\n\n\n\n let mut acceptor = rustls::ServerConfig::builder()\n\n .with_safe_defaults()\n\n .with_no_client_auth()\n\n .with_single_cert(cert, key)\n\n .unwrap();\n\n\n\n acceptor.alpn_protocols = alpn_protocols;\n\n\n\n Ok(Arc::new(acceptor))\n\n}\n", "file_path": "examples/hello-world.rs", "rank": 58, "score": 120707.84731876593 }, { "content": " /// A wrapper trait for an AsyncRead/AsyncWrite tokio type with additional methods.\n\n pub trait AsyncIo: AsyncRead + AsyncWrite + Unpin {\n\n type ReadyFuture<'f>: Future<Output = io::Result<Ready>>\n\n where\n\n Self: 'f;\n\n\n\n /// asynchronously wait for the IO type and\n\n fn ready(&self, interest: Interest) -> Self::ReadyFuture<'_>;\n\n\n\n fn try_read_buf<B: BufMut>(&mut self, buf: &mut B) -> io::Result<usize>;\n\n\n\n fn try_write(&mut self, buf: &[u8]) -> io::Result<usize>;\n\n\n\n fn try_write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result<usize>;\n\n }\n\n\n\n macro_rules! basic_impl {\n\n ($ty: ty) => {\n\n impl AsyncIo for $ty {\n\n type ReadyFuture<'f> = impl Future<Output = io::Result<Ready>>;\n\n\n", "file_path": "io/src/lib.rs", "rank": 59, "score": 118339.91950597797 }, { "content": "#[proc_macro_derive(State, attributes(borrow))]\n\npub fn state_impl(item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::DeriveInput);\n\n\n\n let ty_ident = &input.ident;\n\n let _generics = &input.generics;\n\n let ty = match input.data {\n\n Data::Struct(ref ty) => ty,\n\n _ => todo!(),\n\n };\n\n\n\n let fields = ty\n\n .fields\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, field)| {\n\n field.attrs.iter().any(|attr| {\n\n attr.path\n\n .segments\n\n .first()\n\n .filter(|seg| seg.ident.to_string().as_str() == \"borrow\")\n", "file_path": "codegen/src/lib.rs", "rank": 60, "score": 117971.17892945174 }, { "content": "/// Hashes the `Sec-WebSocket-Key` header according to the WebSocket spec.\n\n///\n\n/// Result is a Base64 encoded byte array. `base64(sha1(input))` is always 28 bytes.\n\npub fn hash_key(key: &[u8]) -> [u8; 28] {\n\n let hash = {\n\n use sha1::Digest as _;\n\n\n\n let mut hasher = sha1::Sha1::new();\n\n\n\n hasher.update(key);\n\n hasher.update(WS_GUID);\n\n\n\n hasher.finalize()\n\n };\n\n\n\n let mut hash_b64 = [0; 28];\n\n let n = base64::encode_config_slice(&hash, base64::STANDARD, &mut hash_b64);\n\n assert_eq!(n, 28);\n\n\n\n hash_b64\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "http-ws/src/proto.rs", "rank": 61, "score": 117095.9866460168 }, { "content": "/// A trait impl for all types that impl [AsyncRead], [AsyncWrite], [Send] and [Unpin].\n\n/// Enabling `Box<dyn Io>` trait object usage.\n\npub trait Io: AsyncRead + AsyncWrite + Send + Unpin {}\n\n\n\nimpl<S> Io for S where S: AsyncRead + AsyncWrite + Send + Unpin {}\n\n\n\n#[allow(unused_variables)]\n\nimpl<S> AsyncRead for TlsStream<S>\n\nwhere\n\n S: AsyncRead + AsyncWrite + Unpin,\n\n{\n\n fn poll_read(self: Pin<&mut Self>, cx: &mut Context<'_>, buf: &mut ReadBuf<'_>) -> Poll<io::Result<()>> {\n\n match self.get_mut() {\n\n Self::NoOp(io) => Pin::new(io).poll_read(cx, buf),\n\n Self::Boxed(io) => Pin::new(io.as_mut()).poll_read(cx, buf),\n\n #[cfg(feature = \"openssl\")]\n\n Self::Openssl(s) => Pin::new(s).poll_read(cx, buf),\n\n #[cfg(feature = \"rustls\")]\n\n Self::Rustls(s) => Pin::new(s).poll_read(cx, buf),\n\n }\n\n }\n\n}\n", "file_path": "client/src/tls/stream.rs", "rank": 62, "score": 113722.20891333683 }, { "content": "fn worker_name() -> String {\n\n thread::current()\n\n .name()\n\n .map(ToString::to_string)\n\n .unwrap_or_else(|| String::from(\"xitca-server-worker\"))\n\n}\n\n\n", "file_path": "server/src/worker/mod.rs", "rank": 63, "score": 109510.5488755276 }, { "content": "#[proc_macro_attribute]\n\npub fn middleware_impl(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(item as syn::ItemImpl);\n\n\n\n // Collect type path from impl.\n\n let service_ty = match input.self_ty.as_ref() {\n\n Type::Path(path) => path,\n\n _ => panic!(\"impl macro must be used on a TypePath\"),\n\n };\n\n\n\n // collect generics.\n\n let generic_ty = &input.generics.params;\n\n let where_clause = &input.generics.where_clause;\n\n\n\n // find methods from impl.\n\n let new_service_impl =\n\n find_async_method(&input.items, \"new_service\").expect(\"new_service method can not be located\");\n\n\n\n // collect ServiceFactory type\n\n let mut inputs = new_service_impl.sig.inputs.iter();\n\n\n", "file_path": "codegen/src/lib.rs", "rank": 64, "score": 107548.20046162995 }, { "content": "#[proc_macro_attribute]\n\npub fn service_impl(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n middleware_impl(_attr, item)\n\n}\n\n\n", "file_path": "codegen/src/lib.rs", "rank": 65, "score": 107548.20046162995 }, { "content": "#[cfg(any(feature = \"br\", feature = \"gz\", feature = \"de\"))]\n\nfn update_header(headers: &mut header::HeaderMap, value: &'static str) {\n\n headers.insert(header::CONTENT_ENCODING, header::HeaderValue::from_static(value));\n\n headers.remove(header::CONTENT_LENGTH);\n\n headers.insert(header::TRANSFER_ENCODING, header::HeaderValue::from_static(\"chunked\"));\n\n}\n\n\n\npub struct ContentEncoder {\n\n encoder: _ContentEncoder,\n\n}\n\n\n", "file_path": "http-encoding/src/encoder.rs", "rank": 66, "score": 106687.22636482742 }, { "content": "/// Verify WebSocket handshake request and create handshake response.\n\npub fn handshake(method: &Method, headers: &HeaderMap) -> Result<Builder, HandshakeError> {\n\n let key = verify_handshake(method, headers)?;\n\n let builder = handshake_response(key);\n\n Ok(builder)\n\n}\n\n\n", "file_path": "http-ws/src/lib.rs", "rank": 67, "score": 101841.1439531529 }, { "content": "/// This function defines errors that are per-connection. Which basically\n\n/// means that if we get this error from `accept()` system call it means\n\n/// next connection might be ready to be accepted.\n\n///\n\n/// All other errors will incur a timeout before next `accept()` is performed.\n\n/// The timeout is useful to handle resource exhaustion errors like ENFILE\n\n/// and EMFILE. Otherwise, could enter into tight loop.\n\nfn connection_error(e: &io::Error) -> bool {\n\n e.kind() == io::ErrorKind::ConnectionRefused\n\n || e.kind() == io::ErrorKind::ConnectionAborted\n\n || e.kind() == io::ErrorKind::ConnectionReset\n\n}\n\n\n", "file_path": "server/src/worker/mod.rs", "rank": 68, "score": 99569.1277120146 }, { "content": "fn fatal_error(e: &io::Error) -> bool {\n\n e.kind() == io::ErrorKind::BrokenPipe || e.kind() == io::ErrorKind::Other\n\n}\n", "file_path": "server/src/worker/mod.rs", "rank": 69, "score": 99565.26740074695 }, { "content": "/// A general test server for any given service type that accept the connection from\n\n/// xitca-server\n\npub fn test_server<F, T, Req>(factory: F) -> Result<TestServerHandle, Error>\n\nwhere\n\n F: Fn() -> T + Send + Sync + 'static,\n\n T: BuildService,\n\n T::Service: ReadyService<Req>,\n\n Req: From<NetStream> + Send + 'static,\n\n{\n\n let lst = TcpListener::bind(\"127.0.0.1:0\")?;\n\n\n\n let addr = lst.local_addr()?;\n\n\n\n let handle = Builder::new()\n\n .worker_threads(1)\n\n .server_threads(1)\n\n .disable_signal()\n\n .listen::<_, _, Req>(\"test_server\", lst, factory)?\n\n .build();\n\n\n\n Ok(TestServerHandle { addr, handle })\n\n}\n\n\n", "file_path": "test/src/lib.rs", "rank": 70, "score": 98584.82449830735 }, { "content": "pub mod mpsc;\n\npub mod spsc;\n", "file_path": "unsafe_collection/src/channel/mod.rs", "rank": 80, "score": 85969.13824718256 }, { "content": "fn encode<P, I>(client: &Client, stmt: &Statement, params: I) -> Result<Bytes, Error>\n\nwhere\n\n P: BorrowToSql,\n\n I: IntoIterator<Item = P>,\n\n I::IntoIter: ExactSizeIterator,\n\n{\n\n client.with_buf(|buf| {\n\n encode_bind(stmt, params, \"\", buf)?;\n\n frontend::execute(\"\", 0, buf).map_err(|_| Error::ToDo)?;\n\n frontend::sync(buf);\n\n Ok(buf.split().freeze())\n\n })\n\n}\n\n\n", "file_path": "postgres/src/query.rs", "rank": 81, "score": 85236.88481630295 }, { "content": "trait NextTrait {\n\n fn next(&mut self) -> Next<'_>;\n\n}\n\n\n\nimpl NextTrait for Incoming {\n\n #[inline(always)]\n\n fn next(&mut self) -> Next<'_> {\n\n Next { stream: self }\n\n }\n\n}\n\n\n", "file_path": "io/src/h3.rs", "rank": 82, "score": 84852.9786082103 }, { "content": "/// A service factory shortcut offering given async function ability to use [FromRequest] to destruct and transform `Service<Req>`'s\n\n/// `Req` type and receive them as function argument.\n\n///\n\n/// Given async function's return type must impl [Responder] trait for transforming arbitrary return type to `Service::Future`'s\n\n/// output type.\n\npub fn handler_service<F, T, O, Res, Err>(func: F) -> HandlerService<F, T, O, Res, Err> {\n\n HandlerService::new(func)\n\n}\n\n\n\npub struct HandlerService<F, T, O, Res, Err> {\n\n func: F,\n\n _p: PhantomData<(T, O, Res, Err)>,\n\n}\n\n\n\nimpl<F, T, O, Res, Err> HandlerService<F, T, O, Res, Err> {\n\n pub fn new(func: F) -> Self {\n\n Self { func, _p: PhantomData }\n\n }\n\n}\n\n\n\nimpl<F, T, O, Res, Err> Clone for HandlerService<F, T, O, Res, Err>\n\nwhere\n\n F: Clone,\n\n{\n\n fn clone(&self) -> Self {\n", "file_path": "http/src/util/service/handler.rs", "rank": 83, "score": 84170.22660749764 }, { "content": "// as special type for eof chunk when using transfer-encoding: chunked\n\ntype Eof = Chain<Chain<Bytes, Bytes>, &'static [u8]>;\n\n\n", "file_path": "http/src/h1/proto/buf.rs", "rank": 84, "score": 83627.66552153141 }, { "content": " };\n\n\n\n self.len -= 1;\n\n\n\n unsafe { Some(self.queue._get_unchecked(idx)) }\n\n }\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n (self.len, Some(self.len))\n\n }\n\n}\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 85, "score": 83489.93240778217 }, { "content": " tail: usize,\n\n len: usize,\n\n}\n\n\n\nimpl<'a, Q> Iterator for Iter<'a, Q>\n\nwhere\n\n Q: Queueable,\n\n{\n\n type Item = &'a Q::Item;\n\n\n\n #[inline]\n\n fn next(&mut self) -> Option<&'a Q::Item> {\n\n if self.len == 0 {\n\n return None;\n\n }\n\n\n\n let idx = if self.tail >= self.len {\n\n self.tail - self.len\n\n } else {\n\n self.queue.capacity() + self.tail - self.len\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 86, "score": 83488.09856708311 }, { "content": "\n\n const fn len(&self) -> usize {\n\n self.len\n\n }\n\n\n\n fn incr_tail_len(&mut self) {\n\n self.next += 1;\n\n self.len += 1;\n\n\n\n if self.next == self.queue.capacity() {\n\n self.next = 0;\n\n }\n\n }\n\n\n\n fn front(&self) -> Option<&Q::Item> {\n\n if self.is_empty() {\n\n None\n\n } else {\n\n Some(unsafe { self.front_unchecked() })\n\n }\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 87, "score": 83487.90999075364 }, { "content": "//! Simple bounded ring buffers with FIFO queue.\n\n\n\npub mod heap;\n\npub mod stack;\n\n\n\nuse core::fmt;\n\n\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 88, "score": 83484.25444319291 }, { "content": " unsafe fn pop_front_unchecked(&mut self) -> Q::Item {\n\n let idx = self.front_idx();\n\n self.len -= 1;\n\n self.queue._read_unchecked(idx)\n\n }\n\n\n\n fn push_back(&mut self, item: Q::Item) -> Result<(), PushError<Q::Item>> {\n\n if self.is_full() {\n\n Err(PushError(item))\n\n } else {\n\n unsafe {\n\n self.push_back_unchecked(item);\n\n }\n\n Ok(())\n\n }\n\n }\n\n\n\n // SAFETY:\n\n // caller must make sure self is not full.\n\n unsafe fn push_back_unchecked(&mut self, item: Q::Item) {\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 89, "score": 83482.76923614315 }, { "content": " let idx = self.front_idx();\n\n self.queue._get_mut_unchecked(idx)\n\n }\n\n\n\n fn clear(&mut self) {\n\n while self.pop_front().is_some() {}\n\n self.next = 0;\n\n self.len = 0;\n\n }\n\n\n\n fn pop_front(&mut self) -> Option<Q::Item> {\n\n if self.is_empty() {\n\n None\n\n } else {\n\n unsafe { Some(self.pop_front_unchecked()) }\n\n }\n\n }\n\n\n\n // SAFETY:\n\n // caller must make sure self is not empty\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 90, "score": 83482.36652843452 }, { "content": "\n\nimpl<T> PushError<T> {\n\n pub fn into_inner(self) -> T {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<T> fmt::Debug for PushError<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"PushError(..)\")\n\n }\n\n}\n\n\n\n#[must_use = \"iterator adaptors are lazy and do nothing unless consumed\"]\n\n#[derive(Clone)]\n\npub struct Iter<'a, Q>\n\nwhere\n\n Q: Queueable,\n\n{\n\n queue: &'a Q,\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 91, "score": 83480.80648735471 }, { "content": " }\n\n\n\n // SAFETY:\n\n // caller must make sure self is not empty\n\n unsafe fn front_unchecked(&self) -> &Q::Item {\n\n let idx = self.front_idx();\n\n self.queue._get_unchecked(idx)\n\n }\n\n\n\n fn front_mut(&mut self) -> Option<&mut Q::Item> {\n\n if self.is_empty() {\n\n None\n\n } else {\n\n Some(unsafe { self.front_mut_unchecked() })\n\n }\n\n }\n\n\n\n // SAFETY:\n\n // caller must make sure self is not empty\n\n unsafe fn front_mut_unchecked(&mut self) -> &mut Q::Item {\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 92, "score": 83479.97095986753 }, { "content": " self.queue._write_unchecked(self.next, item);\n\n self.incr_tail_len();\n\n }\n\n\n\n const fn iter(&self) -> Iter<'_, Q> {\n\n Iter {\n\n queue: &self.queue,\n\n tail: self.next,\n\n len: self.len(),\n\n }\n\n }\n\n\n\n fn front_idx(&self) -> usize {\n\n if self.next >= self.len {\n\n self.next - self.len\n\n } else {\n\n self.queue.capacity() + self.next - self.len\n\n }\n\n }\n\n}\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 93, "score": 83479.23789865302 }, { "content": "\n\nimpl<Q> fmt::Debug for BoundedQuery<Q>\n\nwhere\n\n Q: Queueable,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"ArrayQueue\")\n\n }\n\n}\n\n\n\nimpl<Q> Drop for BoundedQuery<Q>\n\nwhere\n\n Q: Queueable,\n\n{\n\n fn drop(&mut self) {\n\n self.clear();\n\n }\n\n}\n\n\n\npub struct PushError<T>(T);\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 94, "score": 83477.31030462435 }, { "content": "type HResponse<B> = Response<ResponseBody<B>>;\n\n\n", "file_path": "test/src/lib.rs", "rank": 95, "score": 81711.74715332549 }, { "content": "type Ctx<'c, 'd, const HEADER_LIMIT: usize> = context::Context<'c, DateTimeHandle<'d>, HEADER_LIMIT>;\n\n\n\n// xitca_http::h1::proto::context::Context always want a `&'a T: DateTime` as state.\n\n//\n\n// xitca_http::date::DateTime is a foreign trait so it can not be implemented to a lifetimed foreign type inside xitca-client.\n\n// (RwLock<DateTimeState>) in this case.\n\n//\n\n// See xitca_client::date module for details.\n\n//\n\n// The double lifetime is to go around this limiation.\n\npub(crate) struct Context<'c, 'd, const HEADER_LIMIT: usize>(Ctx<'c, 'd, HEADER_LIMIT>);\n\n\n\nimpl<'c, 'd, const HEADER_LIMIT: usize> Deref for Context<'c, 'd, HEADER_LIMIT> {\n\n type Target = Ctx<'c, 'd, HEADER_LIMIT>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "client/src/h1/proto/context.rs", "rank": 96, "score": 80233.21835380817 }, { "content": "struct AsyncVec<T> {\n\n // TODO: use a more efficient list.\n\n sender_waker: LinkedList<Waker>,\n\n receiver_waker: Option<Waker>,\n\n queue: HeapQueue<T>,\n\n closed: bool,\n\n}\n\n\n\nimpl<T> AsyncVec<T> {\n\n fn new(cap: usize) -> Self {\n\n Self {\n\n sender_waker: LinkedList::new(),\n\n receiver_waker: None,\n\n queue: HeapQueue::with_capacity(cap),\n\n closed: false,\n\n }\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.queue.is_empty()\n", "file_path": "unsafe_collection/src/channel/mpsc.rs", "rank": 97, "score": 79268.15653655019 }, { "content": "struct BoundedQuery<Q>\n\nwhere\n\n Q: Queueable,\n\n{\n\n queue: Q,\n\n next: usize,\n\n len: usize,\n\n}\n\n\n\nimpl<Q> BoundedQuery<Q>\n\nwhere\n\n Q: Queueable,\n\n{\n\n const fn is_empty(&self) -> bool {\n\n self.len == 0\n\n }\n\n\n\n fn is_full(&self) -> bool {\n\n self.len == self.queue.capacity()\n\n }\n", "file_path": "unsafe_collection/src/bound_queue/mod.rs", "rank": 98, "score": 76797.50319565051 }, { "content": "fn main() {\n\n prost_build::compile_protos(&[\"protobuf/helloworld.proto\"], &[\"protobuf/\"]).unwrap();\n\n}\n", "file_path": "examples/build.rs", "rank": 99, "score": 76566.08161567063 } ]
Rust
mayastor/tests/reconfigure.rs
gahag/MayaStor
0a2f01b04d75203e5ec19b3037703ee8967ec9e7
#![feature(async_await)] #![allow(clippy::cognitive_complexity)] use mayastor::{ bdev::{ nexus::nexus_bdev::{nexus_create, nexus_lookup}, Bdev, }, descriptor::Descriptor, mayastor_start, spdk_stop, }; use std::process::Command; static DISKNAME1: &str = "/tmp/disk1.img"; static BDEVNAME1: &str = "aio:///tmp/disk1.img?blk_size=512"; static DISKNAME2: &str = "/tmp/disk2.img"; static BDEVNAME2: &str = "aio:///tmp/disk2.img?blk_size=512"; #[test] fn reconfigure() { let log = mayastor::spdklog::SpdkLog::new(); let _ = log.init(); mayastor::CPS_INIT!(); let args = vec!["-c", "../etc/test.conf"]; let output = Command::new("truncate") .args(&["-s", "64m", DISKNAME1]) .output() .expect("failed exec truncate"); assert_eq!(output.status.success(), true); let output = Command::new("truncate") .args(&["-s", "64m", DISKNAME2]) .output() .expect("failed exec truncate"); assert_eq!(output.status.success(), true); let rc = mayastor_start("test", args, || { mayastor::executor::spawn(works()); }); assert_eq!(rc, 0); let output = Command::new("rm") .args(&["-rf", DISKNAME1, DISKNAME2]) .output() .expect("failed delete test file"); assert_eq!(output.status.success(), true); } fn buf_compare(first: &[u8], second: &[u8]) { for i in 0 .. first.len() { assert_eq!(first[i], second[i]); } } async fn stats_compare(first: &Bdev, second: &Bdev) { let stats1 = first.stats().await.unwrap(); let stats2 = second.stats().await.unwrap(); assert_eq!(stats1.num_write_ops, stats2.num_write_ops); } async fn works() { let child1 = BDEVNAME1.to_string(); let child2 = BDEVNAME2.to_string(); let children = vec![child1.clone(), child2.clone()]; nexus_create("hello", 512, 131_072, None, &children) .await .unwrap(); let nexus = nexus_lookup("hello").unwrap(); let nd = Descriptor::open("hello", true).expect("failed open bdev"); let cd1 = Descriptor::open(&child1, false).expect("failed open bdev"); let cd2 = Descriptor::open(&child2, false).expect("failed open bdev"); let bdev1 = cd1.get_bdev(); let bdev2 = cd2.get_bdev(); let mut buf = nd.dma_zmalloc(4096).expect("failed to allocate buffer"); buf.fill(0xff); let mut buf1 = cd1.dma_zmalloc(4096).unwrap(); let mut buf2 = cd2.dma_zmalloc(4096).unwrap(); for i in 0 .. 10 { nd.write_at(i * 4096, &buf).await.unwrap(); } stats_compare(&bdev1, &bdev2).await; for i in 0 .. 10 { cd1.read_at((i * 4096) + (10240 * 512), &mut buf1) .await .unwrap(); cd2.read_at((i * 4096) + (10240 * 512), &mut buf2) .await .unwrap(); buf_compare(buf1.as_slice(), buf2.as_slice()); } buf.fill(0xF0); nexus.offline_child(&child2).await.unwrap(); for i in 0 .. 10 { nd.write_at(i * 4096, &buf).await.unwrap(); } for i in 0 .. 10 { buf1.fill(0x0); buf2.fill(0x0); cd1.read_at((i * 4096) + (10240 * 512), &mut buf1) .await .unwrap(); cd2.read_at((i * 4096) + (10240 * 512), &mut buf2) .await .unwrap(); buf1.as_slice() .iter() .map(|b| assert_eq!(*b, 0xf0)) .for_each(drop); buf2.as_slice() .iter() .map(|b| assert_eq!(*b, 0xff)) .for_each(drop); } nexus.online_child(&child2).await.unwrap(); buf.fill(0xAA); for i in 0 .. 10 { nd.write_at(i * 4096, &buf).await.unwrap(); } for i in 0 .. 10 { buf1.fill(0x0); buf2.fill(0x0); cd1.read_at((i * 4096) + (10240 * 512), &mut buf1) .await .unwrap(); cd2.read_at((i * 4096) + (10240 * 512), &mut buf2) .await .unwrap(); buf1.as_slice() .iter() .map(|b| assert_eq!(*b, 0xAA)) .for_each(drop); buf2.as_slice() .iter() .map(|b| assert_eq!(*b, 0xAA)) .for_each(drop); } cd1.close(); cd2.close(); nd.close(); spdk_stop(0); }
#![feature(async_await)] #![allow(clippy::cognitive_complexity)] use mayastor::{ bdev::{ nexus::nexus_bdev::{nexus_create, nexus_lookup}, Bdev, }, descriptor::Descriptor, mayastor_start, spdk_stop, }; use std::process::Command; static DISKNAME1: &str = "/tmp/disk1.img"; static BDEVNAME1: &str = "aio:///tmp/disk1.img?blk_size=512"; static DISKNAME2: &str = "/tmp/disk2.img"; static BDEVNAME2: &str = "aio:///tmp/disk2.img?blk_size=512"; #[test] fn reconfigure() { let log = mayastor::spdklog::SpdkLog::new(); let _ = log.init(); mayastor::CPS_INIT!(); let args = vec!["-c", "../etc/test.conf"]; let output = Command::new("truncate") .args(&["-s", "64m", DISKNAME1]) .output() .expect("failed exec truncate"); assert_eq!(output.status.success(), true); let output = Command::new("truncate") .args(&["-s", "64m", DISKNAME2]) .output() .expect("failed exec truncate"); assert_eq!(output.status.success(), true); let rc = mayastor_start("test", args, || { mayastor::executor::spawn(works()); }); ass
let stats1 = first.stats().await.unwrap(); let stats2 = second.stats().await.unwrap(); assert_eq!(stats1.num_write_ops, stats2.num_write_ops); } async fn works() { let child1 = BDEVNAME1.to_string(); let child2 = BDEVNAME2.to_string(); let children = vec![child1.clone(), child2.clone()]; nexus_create("hello", 512, 131_072, None, &children) .await .unwrap(); let nexus = nexus_lookup("hello").unwrap(); let nd = Descriptor::open("hello", true).expect("failed open bdev"); let cd1 = Descriptor::open(&child1, false).expect("failed open bdev"); let cd2 = Descriptor::open(&child2, false).expect("failed open bdev"); let bdev1 = cd1.get_bdev(); let bdev2 = cd2.get_bdev(); let mut buf = nd.dma_zmalloc(4096).expect("failed to allocate buffer"); buf.fill(0xff); let mut buf1 = cd1.dma_zmalloc(4096).unwrap(); let mut buf2 = cd2.dma_zmalloc(4096).unwrap(); for i in 0 .. 10 { nd.write_at(i * 4096, &buf).await.unwrap(); } stats_compare(&bdev1, &bdev2).await; for i in 0 .. 10 { cd1.read_at((i * 4096) + (10240 * 512), &mut buf1) .await .unwrap(); cd2.read_at((i * 4096) + (10240 * 512), &mut buf2) .await .unwrap(); buf_compare(buf1.as_slice(), buf2.as_slice()); } buf.fill(0xF0); nexus.offline_child(&child2).await.unwrap(); for i in 0 .. 10 { nd.write_at(i * 4096, &buf).await.unwrap(); } for i in 0 .. 10 { buf1.fill(0x0); buf2.fill(0x0); cd1.read_at((i * 4096) + (10240 * 512), &mut buf1) .await .unwrap(); cd2.read_at((i * 4096) + (10240 * 512), &mut buf2) .await .unwrap(); buf1.as_slice() .iter() .map(|b| assert_eq!(*b, 0xf0)) .for_each(drop); buf2.as_slice() .iter() .map(|b| assert_eq!(*b, 0xff)) .for_each(drop); } nexus.online_child(&child2).await.unwrap(); buf.fill(0xAA); for i in 0 .. 10 { nd.write_at(i * 4096, &buf).await.unwrap(); } for i in 0 .. 10 { buf1.fill(0x0); buf2.fill(0x0); cd1.read_at((i * 4096) + (10240 * 512), &mut buf1) .await .unwrap(); cd2.read_at((i * 4096) + (10240 * 512), &mut buf2) .await .unwrap(); buf1.as_slice() .iter() .map(|b| assert_eq!(*b, 0xAA)) .for_each(drop); buf2.as_slice() .iter() .map(|b| assert_eq!(*b, 0xAA)) .for_each(drop); } cd1.close(); cd2.close(); nd.close(); spdk_stop(0); }
ert_eq!(rc, 0); let output = Command::new("rm") .args(&["-rf", DISKNAME1, DISKNAME2]) .output() .expect("failed delete test file"); assert_eq!(output.status.success(), true); } fn buf_compare(first: &[u8], second: &[u8]) { for i in 0 .. first.len() { assert_eq!(first[i], second[i]); } } async fn stats_compare(first: &Bdev, second: &Bdev) {
random
[ { "content": "/// lookup a bdev by its name or one of its alias\n\npub fn bdev_lookup_by_name(name: &str) -> Option<Bdev> {\n\n let name = std::ffi::CString::new(name.to_string()).unwrap();\n\n unsafe {\n\n let b = spdk_sys::spdk_bdev_get_by_name(name.as_ptr());\n\n if b.is_null() {\n\n None\n\n } else {\n\n Some(Bdev::from(b))\n\n }\n\n }\n\n}\n", "file_path": "mayastor/src/bdev/mod.rs", "rank": 1, "score": 201405.44042553546 }, { "content": "/// Convert nexus name to uuid.\n\n///\n\n/// This function never fails which means that if there is a nexus with\n\n/// unconventional name which likely means it was not created using nexus\n\n/// jsonrpc api, we return the whole name without modifications as it is.\n\nfn name_to_uuid(name: &str) -> &str {\n\n if name.starts_with(\"nexus-\") {\n\n &name[6 ..]\n\n } else {\n\n name\n\n }\n\n}\n\n\n\npub(crate) fn register_rpc_methods() {\n\n // JSON rpc method to list the nexus and their states\n\n jsonrpc_register::<(), _, _>(\"list_nexus\", |_| {\n\n future::ok(ListNexusReply {\n\n nexus_list: instances()\n\n .iter()\n\n .map(|nexus| RpcNexus {\n\n uuid: name_to_uuid(nexus.name()).to_string(),\n\n size: nexus.size(),\n\n state: nexus.state.to_string(),\n\n children: nexus\n\n .children\n", "file_path": "mayastor/src/bdev/nexus/nexus_rpc.rs", "rank": 2, "score": 199980.6987388135 }, { "content": "/// Export given bdev over iscsi. That involves creating iscsi target and\n\n/// adding the bdev as LUN to it.\n\npub fn share(uuid: &str, bdev: &Bdev) -> Result<(), String> {\n\n let iqn = target_name(uuid);\n\n let c_iqn = CString::new(iqn.clone()).unwrap();\n\n let mut group_idx: c_int = 0;\n\n let mut lun_id: c_int = 0;\n\n let idx = ISCSI_IDX.with(move |iscsi_idx| {\n\n let idx = *iscsi_idx.borrow();\n\n *iscsi_idx.borrow_mut() = idx + 1;\n\n idx\n\n });\n\n let tgt = unsafe {\n\n spdk_iscsi_tgt_node_construct(\n\n idx,\n\n c_iqn.as_ptr(),\n\n ptr::null(),\n\n &mut group_idx as *mut _,\n\n &mut group_idx as *mut _,\n\n 1, // portal and initiator group list length\n\n &mut spdk_bdev_get_name(bdev.as_ptr()),\n\n &mut lun_id as *mut _,\n", "file_path": "mayastor/src/iscsi_target.rs", "rank": 3, "score": 197132.2848757391 }, { "content": "/// The main test work horse. It runs a setup before the unit test and tear-down\n\n/// after the unit test. The setup involves starting a unix domain socket\n\n/// server. It is customizable by providing two closures:\n\n///\n\n/// 1) handler for constructing reply from the server and\n\n/// 2) test callback evaluating a return value from the json-rpc client call\n\n///\n\n/// Beware that rust executes the tests in parallel so whatever is done in this\n\n/// function must preserve independence of the tests on each other.\n\nfn run_test<A, R, H, T>(method: &str, arg: A, handler: H, test: T)\n\nwhere\n\n A: serde::ser::Serialize,\n\n R: 'static + serde::de::DeserializeOwned + panic::UnwindSafe + Send,\n\n H: FnOnce(Request) -> Vec<u8> + 'static + Send,\n\n T: FnOnce(Result<R, Error>) -> () + panic::UnwindSafe,\n\n{\n\n let sock = format!(\"{}.{:?}\", SOCK_PATH, std::thread::current().id());\n\n let sock_path = Path::new(&sock);\n\n // Cleanup should be called at all places where we exit from this function\n\n let cleanup = || {\n\n let _ = fs::remove_file(&sock_path);\n\n };\n\n let mut server = match UnixListener::bind(&sock_path) {\n\n Ok(server) => server,\n\n Err(_) => {\n\n // most likely the socket file exists, remove it and retry\n\n cleanup();\n\n UnixListener::bind(&sock_path).unwrap()\n\n }\n", "file_path": "jsonrpc/src/test.rs", "rank": 4, "score": 187352.38574873074 }, { "content": "/// Lookup a nexus by its name (currently used only by test functions).\n\npub fn nexus_lookup(name: &str) -> Option<&mut Nexus> {\n\n if let Some(nexus) = instances().iter_mut().find(|n| n.name() == name) {\n\n Some(nexus)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl Display for Nexus {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), std::fmt::Error> {\n\n let _ = writeln!(\n\n f,\n\n \"{}: state: {:?} blk_cnt: {}, blk_size: {}\",\n\n self.name,\n\n self.state,\n\n self.bdev.num_blocks(),\n\n self.bdev.block_size(),\n\n );\n\n\n\n self.children\n\n .iter()\n\n .map(|c| write!(f, \"\\t{}\", c))\n\n .for_each(drop);\n\n Ok(())\n\n }\n\n}\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 5, "score": 185877.14436488473 }, { "content": "/// get a static ref to the fn table of the nexus module\n\npub fn fn_table() -> Option<&'static spdk_sys::spdk_bdev_fn_table> {\n\n Some(NexusFnTable::table())\n\n}\n\n\n", "file_path": "mayastor/src/bdev/nexus/mod.rs", "rank": 6, "score": 185193.78453938756 }, { "content": "/// Cleanly exit from program.\n\n/// NOTE: cannot be called from a future -> double borrow of executor.\n\npub fn spdk_stop(rc: i32) {\n\n if let Err(msg) = iscsi_target::fini_iscsi() {\n\n error!(\"Failed to finalize iscsi: {}\", msg);\n\n }\n\n let fut = async move {\n\n if let Err(msg) = nvmf_target::fini_nvmf().await {\n\n error!(\"Failed to finalize nvmf target: {}\", msg);\n\n }\n\n };\n\n executor::stop(fut, Box::new(move || unsafe { spdk_app_stop(rc) }));\n\n}\n\n\n\n/// A callback called by spdk when it is shutting down.\n\nextern \"C\" fn mayastor_shutdown_cb() {\n\n spdk_stop(0);\n\n}\n", "file_path": "mayastor/src/lib.rs", "rank": 7, "score": 173279.79066680858 }, { "content": "/// Lookup a nexus by its uuid. Return error if uuid is invalid or nexus\n\n/// not found.\n\nfn nexus_lookup(uuid: &str) -> Result<&mut Nexus, JsonRpcError> {\n\n let name = uuid_to_name(uuid)?;\n\n\n\n if let Some(nexus) = instances().iter_mut().find(|n| n.name() == name) {\n\n Ok(nexus)\n\n } else {\n\n Err(JsonRpcError::new(\n\n Code::NotFound,\n\n format!(\"Nexus {} not found\", uuid),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_rpc.rs", "rank": 8, "score": 169182.06742393665 }, { "content": "/// Wrapper for create aio bdev C function\n\nfn create_base_bdev(file: &str, block_size: u32) -> Result<()> {\n\n debug!(\"Creating aio bdev {} ...\", file);\n\n let cstr_file = CString::new(file).unwrap();\n\n let rc = unsafe {\n\n create_aio_bdev(cstr_file.as_ptr(), cstr_file.as_ptr(), block_size)\n\n };\n\n if rc != 0 {\n\n Err(JsonRpcError::new(\n\n Code::InvalidParams,\n\n \"AIO bdev already exists or parameters are invalid\",\n\n ))\n\n } else {\n\n info!(\"aio bdev {} was created\", file);\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Callback called from SPDK for pool create and import methods.\n\nextern \"C\" fn pool_done_cb(\n\n sender_ptr: *mut c_void,\n", "file_path": "mayastor/src/pool.rs", "rank": 9, "score": 168051.54236607513 }, { "content": "/// get a reference to the global nexuses\n\npub fn instances() -> &'static mut Vec<Box<Nexus>> {\n\n nexus_module::NexusModule::get_instances()\n\n}\n\n\n", "file_path": "mayastor/src/bdev/nexus/mod.rs", "rank": 10, "score": 165501.03212800983 }, { "content": "/// returns the first bdev in the list\n\npub fn bdev_first() -> Option<Bdev> {\n\n let bdev = unsafe { spdk_bdev_first() };\n\n\n\n if bdev.is_null() {\n\n None\n\n } else {\n\n Some(Bdev::from(bdev))\n\n }\n\n}\n\n\n", "file_path": "mayastor/src/bdev/mod.rs", "rank": 11, "score": 163891.3864658715 }, { "content": "/// Rust friendly wrapper around SPDK app start function.\n\n/// The application code is a closure passed as argument and called\n\n/// when spdk initialization is done.\n\n///\n\n/// TODO: When needed add possibility to specify additional program\n\n/// arguments.\n\npub fn mayastor_start<T, F>(name: &str, mut args: Vec<T>, start_cb: F) -> i32\n\nwhere\n\n T: Into<Vec<u8>>,\n\n F: FnOnce(),\n\n{\n\n // hand over command line args to spdk arg parser\n\n let args = args\n\n .drain(..)\n\n .map(|arg| CString::new(arg).unwrap())\n\n .collect::<Vec<CString>>();\n\n let mut c_args = args\n\n .iter()\n\n .map(|arg| arg.as_ptr())\n\n .collect::<Vec<*const c_char>>();\n\n c_args.push(std::ptr::null());\n\n\n\n let mut opts: spdk_app_opts = Default::default();\n\n\n\n unsafe {\n\n spdk_app_opts_init(&mut opts as *mut spdk_app_opts);\n", "file_path": "mayastor/src/lib.rs", "rank": 13, "score": 159982.99656859034 }, { "content": "fn is_submodule(parent: &str, possible_child: &str) -> bool {\n\n // Treat as bytes, because we'll be doing slicing, and we only care about\n\n // ':' chars\n\n let parent = parent.as_bytes();\n\n let possible_child = possible_child.as_bytes();\n\n\n\n // a longer module path cannot be a parent of a shorter module path\n\n if parent.len() > possible_child.len() {\n\n return false;\n\n }\n\n\n\n // If the path up to the parent isn't the same as the child,\n\n if parent != &possible_child[.. parent.len()] {\n\n return false;\n\n }\n\n\n\n // Either the path is exactly the same, or the sub module should have a \"::\"\n\n // after the length of the parent path. This prevents things like\n\n // 'a::bad' being considered a submodule of 'a::b'\n\n parent.len() == possible_child.len()\n", "file_path": "mayastor/src/spdklog.rs", "rank": 14, "score": 159667.28226921902 }, { "content": "#[test]\n\nfn io_test() {\n\n let _log = mayastor::spdklog::SpdkLog::new();\n\n let _l = _log.init();\n\n mayastor::CPS_INIT!();\n\n let output = Command::new(\"truncate\")\n\n .args(&[\"-s\", \"64m\", DISKNAME])\n\n .output()\n\n .expect(\"failed exec truncate\");\n\n\n\n assert_eq!(output.status.success(), true);\n\n\n\n mayastor_start(\"io-testing\", vec![\"\"], || {\n\n mayastor::executor::spawn(start());\n\n });\n\n\n\n let output = Command::new(\"rm\")\n\n .args(&[\"-rf\", DISKNAME])\n\n .output()\n\n .expect(\"failed delete test file\");\n\n\n", "file_path": "mayastor/tests/io.rs", "rank": 15, "score": 159367.29652561195 }, { "content": "/// Convert the UUID to a nexus name in the form of \"nexus-{uuid}\".\n\n/// Return error if the UUID is not valid.\n\nfn uuid_to_name(uuid: &str) -> Result<String, JsonRpcError> {\n\n match Uuid::parse_str(uuid) {\n\n Ok(uuid) => Ok(format!(\"nexus-{}\", uuid.to_hyphenated().to_string())),\n\n Err(_) => Err(JsonRpcError::new(\n\n Code::InvalidParams,\n\n \"Invalid UUID\".to_owned(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_rpc.rs", "rank": 16, "score": 159071.3034827693 }, { "content": "/// Parse the given URI into a ChildBdev\n\npub fn nexus_parse_uri(uri: &str) -> Result<BdevType, UriError> {\n\n if let Ok(uri) = Url::parse(uri) {\n\n let bdev_type = match uri.scheme() {\n\n \"aio\" => BdevType::Aio(AioBdev::try_from(&uri)?),\n\n \"iscsi\" => BdevType::Iscsi(IscsiBdev::try_from(&uri)?),\n\n \"nvmf\" => BdevType::Nvmf(NvmfBdev::try_from(&uri)?),\n\n // strip the first slash in uri path\n\n \"bdev\" => BdevType::Bdev(uri.path()[1 ..].to_string()),\n\n _ => {\n\n warn!(\"Unknown URL scheme {}\", uri.to_string());\n\n return Err(UriError::Unsupported);\n\n }\n\n };\n\n\n\n Ok(bdev_type)\n\n } else {\n\n Err(UriError::InvalidScheme)\n\n }\n\n}\n", "file_path": "mayastor/src/nexus_uri.rs", "rank": 17, "score": 156292.98953429682 }, { "content": "const exec = require('child_process').exec;\n", "file_path": "mayastor-test/test_cli.js", "rank": 18, "score": 155415.73008633498 }, { "content": "fn test_known_label() {\n\n let mut file = std::fs::File::open(\"./gpt_test_data.bin\").unwrap();\n\n\n\n file.seek(SeekFrom::Start(512)).unwrap();\n\n let mut hdr_buf: [u8; 512] = [0; 512];\n\n file.read_exact(&mut hdr_buf).unwrap();\n\n\n\n let mut hdr: GPTHeader = GPTHeader::from_slice(&hdr_buf).unwrap();\n\n assert_eq!(hdr.self_checksum, CRC32);\n\n assert_eq!(hdr.guid.to_string(), HDR_GUID,);\n\n\n\n let mut part_buf: [u8; 128 * 128] = [0; 128 * 128];\n\n file.seek(SeekFrom::Start(1024)).unwrap();\n\n file.read_exact(&mut part_buf).unwrap();\n\n\n\n let partitions = GptEntry::from_slice(&part_buf, hdr.num_entries).unwrap();\n\n\n\n assert_eq!(partitions[0].ent_guid.to_string(), PART0_GUID);\n\n assert_eq!(partitions[1].ent_guid.to_string(), PART1_GUID);\n\n assert_eq!(partitions[0].ent_name.as_str(), \"nexus_meta\");\n", "file_path": "mayastor/tests/nexus_label.rs", "rank": 19, "score": 153381.214097645 }, { "content": "/// Given a bdev uuid return a NQN used to connect to the bdev from outside.\n\nfn gen_nqn(id: &str) -> String {\n\n format!(\"nqn.2019-05.io.openebs:{}\", id)\n\n}\n\n\n\n/// Wrapper around spdk nvme subsystem providing rust friendly api.\n\npub(crate) struct Subsystem {\n\n inner: *mut spdk_nvmf_subsystem,\n\n nqn: String,\n\n}\n\n\n\nimpl Subsystem {\n\n /// Create a nvme subsystem identified by the id string (used for nqn\n\n /// creation).\n\n pub unsafe fn create(\n\n inner: *mut spdk_nvmf_subsystem,\n\n trid: *mut spdk_nvme_transport_id,\n\n nqn: String,\n\n ) -> Result<Self, String> {\n\n let sn = CString::new(\"MayaData Inc.\").unwrap();\n\n if spdk_nvmf_subsystem_set_sn(inner, sn.as_ptr()) != 0 {\n", "file_path": "mayastor/src/nvmf_target.rs", "rank": 20, "score": 149307.08445859287 }, { "content": "/// Generate iqn based on provided uuid\n\nfn target_name(uuid: &str) -> String {\n\n format!(\"iqn.2019-09.org.openebs.mayastor:{}\", uuid)\n\n}\n\n\n", "file_path": "mayastor/src/iscsi_target.rs", "rank": 21, "score": 149297.47814579538 }, { "content": "#[test]\n\nfn read_label() {\n\n let _log = mayastor::spdklog::SpdkLog::new();\n\n let _l = _log.init();\n\n\n\n let output = Command::new(\"truncate\")\n\n .args(&[\"-s\", \"64m\", DISKNAME1])\n\n .output()\n\n .expect(\"failed exec truncate\");\n\n assert_eq!(output.status.success(), true);\n\n\n\n let output = Command::new(\"truncate\")\n\n .args(&[\"-s\", \"64m\", DISKNAME2])\n\n .output()\n\n .expect(\"failed exec truncate\");\n\n\n\n assert_eq!(output.status.success(), true);\n\n\n\n mayastor::CPS_INIT!();\n\n let rc = mayastor_start(\"io-testing\", vec![\"-L\", \"all\"], || {\n\n mayastor::executor::spawn(start());\n", "file_path": "mayastor/tests/nexus_label.rs", "rank": 22, "score": 146394.71280393284 }, { "content": "/// Return target iqn for a replica with uuid.\n\npub fn get_iqn(uuid: &str) -> Option<String> {\n\n let iqn = target_name(uuid);\n\n let c_iqn = CString::new(target_name(uuid)).unwrap();\n\n let tgt = unsafe { spdk_iscsi_find_tgt_node(c_iqn.as_ptr()) };\n\n\n\n if tgt.is_null() {\n\n None\n\n } else {\n\n Some(iqn)\n\n }\n\n}\n", "file_path": "mayastor/src/iscsi_target.rs", "rank": 23, "score": 136593.08323548554 }, { "content": "/// Detect share protocol (if any) for replica with given uuid and share ID\n\n/// string.\n\nfn detect_share(uuid: &str) -> Option<(ShareType, String)> {\n\n // first try nvmf and then try iscsi\n\n match nvmf_target::get_nqn(uuid) {\n\n Some(id) => Some((ShareType::Nvmf, id)),\n\n None => match iscsi_target::get_iqn(uuid) {\n\n Some(id) => Some((ShareType::Iscsi, id)),\n\n None => None,\n\n },\n\n }\n\n}\n\n\n\nimpl Replica {\n\n /// Create replica on storage pool.\n\n pub async fn create(\n\n uuid: &str,\n\n pool: &str,\n\n size: u64,\n\n thin: bool,\n\n ) -> Result<Self> {\n\n let lvs = match Pool::lookup(pool) {\n", "file_path": "mayastor/src/replica.rs", "rank": 24, "score": 136593.0832354855 }, { "content": "pub fn get_nqn(uuid: &str) -> Option<String> {\n\n NVMF_TGT.with(move |maybe_tgt| {\n\n let mut maybe_tgt = maybe_tgt.borrow_mut();\n\n let tgt = maybe_tgt.as_mut().unwrap();\n\n match tgt.lookup_subsystem(uuid) {\n\n Some(mut ss) => Some(ss.get_nqn()),\n\n None => None,\n\n }\n\n })\n\n}\n", "file_path": "mayastor/src/nvmf_target.rs", "rank": 25, "score": 136593.0832354855 }, { "content": "/// public function which simply calls register module\n\npub fn register_module() {\n\n register_rpc_methods();\n\n nexus_module::register_module()\n\n}\n\n\n", "file_path": "mayastor/src/bdev/nexus/mod.rs", "rank": 26, "score": 136120.11421213913 }, { "content": "/// function used to create a new nexus when parsing a config file\n\npub fn nexus_instance_new(\n\n name: String,\n\n size: u64,\n\n blksize: u32,\n\n children: Vec<String>,\n\n) {\n\n let list = instances();\n\n if let Ok(nexus) = Nexus::new(&name, blksize, size, None, Some(&children)) {\n\n list.push(nexus);\n\n }\n\n}\n", "file_path": "mayastor/src/bdev/nexus/mod.rs", "rank": 27, "score": 133299.54310991903 }, { "content": "pub fn register_module() {\n\n unsafe {\n\n spdk_bdev_module_list_add((&NEXUS_MODULE.module) as *const _ as *mut _);\n\n }\n\n}\n", "file_path": "mayastor/src/bdev/nexus/nexus_module.rs", "rank": 28, "score": 133295.04504859183 }, { "content": "var mayastorOutput = [];\n", "file_path": "mayastor-test/test_common.js", "rank": 37, "score": 125288.77913393031 }, { "content": "/// get a reference to the module\n\npub fn module() -> Option<NexusModule> {\n\n nexus_module::NexusModule::current()\n\n}\n\n\n", "file_path": "mayastor/src/bdev/nexus/mod.rs", "rank": 38, "score": 124851.90568045224 }, { "content": "/// Register new json-rpc method with given name and handler having form of\n\n/// a closure returning a future.\n\n///\n\n/// We use serde library with serialize/deserialize macro on structs which\n\n/// represent arguments and return values to allow us to define new\n\n/// json-rpc methods in rust with minimum code.\n\n///\n\n/// # Example:\n\n/// ```ignore\n\n/// use futures::future::Future;\n\n/// use serde::{Deserialize, Serialize};\n\n/// use mayastor::jsonrpc::{jsonrpc_register, Result};\n\n/// use std::pin::Pin;\n\n/// use futures::{future, FutureExt};\n\n/// use futures_util::future::FutureExt;\n\n/// #[derive(Deserialize)]\n\n/// struct Args {\n\n/// name: String,\n\n/// }\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Reply {\n\n/// result: String,\n\n/// }\n\n///\n\n/// pub fn init() {\n\n/// jsonrpc_register(\n\n/// \"hello\",\n\n/// |args: Args| -> Pin<Box<dyn Future<Output = Result<Reply>>>> {\n\n/// future::ok(Reply {\n\n/// result: format!(\"Hello {}!\", args.name),\n\n/// })\n\n/// .boxed_local()\n\n/// },\n\n/// );\n\n/// }\n\n/// ```\n\npub fn jsonrpc_register<P, H, R>(name: &str, handler: H)\n\nwhere\n\n H: 'static + Fn(P) -> Pin<Box<dyn Future<Output = Result<R>>>>,\n\n P: 'static + for<'de> Deserialize<'de>,\n\n R: Serialize,\n\n{\n\n let name = CString::new(name).unwrap();\n\n let handler_ptr = Box::into_raw(Box::new(handler)) as *mut c_void;\n\n\n\n unsafe {\n\n spdk_rpc_register_method(\n\n name.as_ptr(),\n\n Some(jsonrpc_handler::<H, P, R>),\n\n handler_ptr,\n\n SPDK_RPC_RUNTIME,\n\n );\n\n }\n\n}\n", "file_path": "mayastor/src/jsonrpc.rs", "rank": 39, "score": 124653.63205304375 }, { "content": "var mayastorGrpcOutput = [];\n", "file_path": "mayastor-test/test_common.js", "rank": 40, "score": 122974.41872516356 }, { "content": "/// Return first unused nbd device in /dev.\n\n///\n\n/// NOTE: We do a couple of syscalls in this function which by normal\n\n/// circumstances do not block. So it is reasonably safe to call this function\n\n/// from executor/reactor.\n\npub fn find_unused() -> Result<String, Error> {\n\n let nbd_max =\n\n parse_value(Path::new(\"/sys/class/modules/nbd/parameters\"), \"nbds_max\")\n\n .unwrap_or(16);\n\n\n\n for i in 0 .. nbd_max {\n\n let name = format!(\"nbd{}\", i);\n\n match parse_value::<u32>(\n\n Path::new(&format!(\"/sys/class/block/{}\", name)),\n\n \"pid\",\n\n ) {\n\n // if we find a pid file the device is in use\n\n Ok(_) => continue,\n\n Err(e) => match e.kind() {\n\n std::io::ErrorKind::NotFound => {\n\n // No PID file is found, which implies it is free to used.\n\n // The kernel needs time to construct the device\n\n // so we need to make sure we are not using it internally\n\n // already.\n\n let nbd_device = CString::new(format!(\"/dev/{}\", name))?;\n", "file_path": "mayastor/src/bdev/nexus/nexus_nbd.rs", "rank": 41, "score": 117849.19242797187 }, { "content": "use crate::bdev::nexus::{\n\n instances,\n\n nexus_bdev::Nexus,\n\n nexus_channel::NexusChannel,\n\n nexus_io::{Nio, NioType},\n\n};\n\nuse spdk_sys::{\n\n spdk_bdev_fn_table,\n\n spdk_bdev_io,\n\n spdk_bdev_io_type,\n\n spdk_get_io_channel,\n\n spdk_io_channel,\n\n};\n\nuse std::ffi::c_void;\n\n\n\n// TODO: put all the statics into a single nexus_module static and add these as\n\n// inners\n\nlazy_static! {\n\n /// global static fn table shared between all Nexus bdev modules\n\n pub(crate) static ref NEXUS_FN_TBL: NexusFnTable = NexusFnTable::new();\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 42, "score": 113096.1421284195 }, { "content": " }\n\n\n\n /// get a reference to this static function table to pass on to every\n\n /// instance\n\n pub fn table() -> &'static spdk_bdev_fn_table {\n\n &NEXUS_FN_TBL.f_tbl\n\n }\n\n\n\n /// check all the children for the specified IO type and return if it\n\n /// supported\n\n extern \"C\" fn io_supported(\n\n ctx: *mut c_void,\n\n io_type: spdk_bdev_io_type,\n\n ) -> bool {\n\n let nexus = unsafe { Nexus::from_raw(ctx) };\n\n match NioType::from(io_type) {\n\n // we always assume the device supports read/write commands\n\n NioType::Read | NioType::Write => true,\n\n NioType::Flush | NioType::Reset | NioType::Unmap => {\n\n let supported = nexus.io_is_supported(io_type);\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 43, "score": 113091.14706612492 }, { "content": "}\n\n\n\npub struct NexusFnTable {\n\n pub(crate) f_tbl: spdk_bdev_fn_table,\n\n}\n\n\n\n/// The FN table are function pointers called by SPDK when work is send\n\n/// our way. The functions are static, and shared between all instances.\n\n\n\nimpl NexusFnTable {\n\n fn new() -> Self {\n\n let mut f_tbl = spdk_bdev_fn_table::default();\n\n f_tbl.io_type_supported = Some(Self::io_supported);\n\n f_tbl.submit_request = Some(Self::io_submit);\n\n f_tbl.get_io_channel = Some(Self::io_channel);\n\n f_tbl.destruct = Some(Self::destruct);\n\n\n\n NexusFnTable {\n\n f_tbl,\n\n }\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 44, "score": 113088.82730323171 }, { "content": " // Main entry point to submit IO to the underlying children this uses\n\n // callbacks rather then futures and closures.\n\n\n\n extern \"C\" fn io_submit(\n\n channel: *mut spdk_io_channel,\n\n io: *mut spdk_bdev_io,\n\n ) {\n\n if let Some(io_type) = Nio::io_type(io) {\n\n let nio = Nio::from(io);\n\n\n\n let mut ch = NexusChannel::inner_from_channel(channel);\n\n let nexus = nio.nexus_as_ref();\n\n\n\n if nexus.dr_complete_notify.is_some() {\n\n // we are reconfiguring queue the IO\n\n trace!(\"What happens to this IO?\");\n\n }\n\n\n\n match io_type {\n\n NioType::Read => {\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 45, "score": 113085.50311743568 }, { "content": " if !supported {\n\n trace!(\n\n \"IO type {:?} not supported for {}\",\n\n NioType::from(io_type),\n\n nexus.bdev.name()\n\n );\n\n }\n\n supported\n\n }\n\n _ => {\n\n trace!(\n\n \"IO type {:?} not supported for {}\",\n\n NioType::from(io_type),\n\n nexus.bdev.name()\n\n );\n\n false\n\n }\n\n }\n\n }\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 46, "score": 113080.77427181567 }, { "content": " extern \"C\" fn io_channel(ctx: *mut c_void) -> *mut spdk_io_channel {\n\n let n = unsafe { Nexus::from_raw(ctx) };\n\n trace!(\"{}: Get IO channel\", n.bdev.name());\n\n unsafe { spdk_get_io_channel(ctx) }\n\n }\n\n\n\n /// called when the a nexus instance is unregister\n\n extern \"C\" fn destruct(ctx: *mut c_void) -> i32 {\n\n let nexus = unsafe { Nexus::from_raw(ctx) };\n\n nexus.close().unwrap();\n\n let instances = instances();\n\n // removing the nexus from the list should cause a drop\n\n instances.retain(|x| x.name() != nexus.name());\n\n\n\n 0\n\n }\n\n}\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 47, "score": 113079.12946784425 }, { "content": " //trace!(\"{}: Dispatching READ {:p}\", nexus.name(), io);\n\n nexus.readv(io, &mut ch)\n\n }\n\n NioType::Write => {\n\n //trace!(\"{}: Dispatching WRITE {:p}\", nexus.name(), io);\n\n nexus.writev(io, &ch)\n\n }\n\n NioType::Unmap => {\n\n trace!(\"{} Dispatching UNMAP {:p}\", nexus.name(), io);\n\n nexus.unmap(io, &ch)\n\n }\n\n _ => panic!(\"{} Received unsupported IO!\", nexus.name()),\n\n };\n\n } else {\n\n // something is every very wrong ...\n\n error!(\"Received unknown IO type {}\", unsafe { (*io).type_ });\n\n }\n\n }\n\n\n\n /// called per core to create IO channels per Nexus instance\n", "file_path": "mayastor/src/bdev/nexus/nexus_fn_table.rs", "rank": 48, "score": 113075.93901942637 }, { "content": "#[test]\n\nfn missing_version() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |req| {\n\n let resp = Response {\n\n error: None,\n\n id: req.id,\n\n jsonrpc: None,\n\n result: Some(json!(\"hello this is result\")),\n\n };\n\n\n\n serde_json::to_vec_pretty(&resp).unwrap()\n\n },\n\n |res: Result<String, Error>| match res {\n\n Ok(_) => (),\n\n Err(err) => panic!(format!(\"{}\", err)),\n\n },\n\n );\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 49, "score": 112462.70281667903 }, { "content": "#[test]\n\nfn connect_error() {\n\n // create tokio futures runtime\n\n let mut rt = Runtime::new().unwrap();\n\n // try to connect to server which does not exist\n\n let call_res: Result<(), Error> =\n\n rt.block_on(call(\"/crazy/path/look\", \"method\", Some(())));\n\n match call_res {\n\n Ok(_) => panic!(\"Expected error and got ok\"),\n\n Err(Error::IoError(err)) => match err.kind() {\n\n ErrorKind::NotFound => {}\n\n _ => {\n\n panic!(\"unexpected error\");\n\n }\n\n },\n\n _ => panic!(\"unexpected error\"),\n\n }\n\n rt.run().unwrap();\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 50, "score": 112462.70281667903 }, { "content": "#[test]\n\nfn invalid_version() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |req| {\n\n let resp = Response {\n\n error: None,\n\n id: req.id,\n\n jsonrpc: Some(\"1.0\".to_owned()),\n\n result: None,\n\n };\n\n\n\n serde_json::to_vec_pretty(&resp).unwrap()\n\n },\n\n |res: Result<(), Error>| match res {\n\n Ok(_) => panic!(\"Expected error and got ok\"),\n\n Err(Error::InvalidVersion) => (),\n\n Err(err) => panic!(format!(\"Wrong error type: {}\", err)),\n\n },\n\n );\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 51, "score": 112462.70281667903 }, { "content": "#[test]\n\nfn invalid_json() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |_req| {\n\n // missing quotes on result key below\n\n r#\"{\n\n \"id\": 0,\n\n \"jsonrpc\": \"2.0\",\n\n result: {},\n\n \"#\n\n .to_string()\n\n .into_bytes()\n\n },\n\n |res: Result<(), Error>| match res {\n\n Ok(_) => panic!(\"Expected error and got ok\"),\n\n Err(Error::ParseError(_)) => (),\n\n Err(err) => panic!(format!(\"Wrong error type: {}\", err)),\n\n },\n\n );\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 52, "score": 112462.70281667903 }, { "content": "#[test]\n\nfn rpc_error() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |req| {\n\n let resp = Response {\n\n error: Some(RpcError {\n\n code: -(Errno::ENOENT as i32),\n\n message: \"Not found\".to_owned(),\n\n data: None,\n\n }),\n\n id: req.id,\n\n jsonrpc: Some(\"2.0\".to_owned()),\n\n result: None,\n\n };\n\n\n\n serde_json::to_vec_pretty(&resp).unwrap()\n\n },\n\n |res: Result<(), Error>| match res {\n\n Ok(_) => panic!(\"Expected error and got ok\"),\n", "file_path": "jsonrpc/src/test.rs", "rank": 53, "score": 112462.70281667903 }, { "content": "/// Construct callback argument for spdk async function.\n\n/// The argument is a oneshot sender channel for result of the operation.\n\npub fn cb_arg<T>(sender: oneshot::Sender<T>) -> *mut c_void {\n\n Box::into_raw(Box::new(sender)) as *const _ as *mut c_void\n\n}\n\n\n\n/// Generic callback for spdk async functions expecting to be called with\n\n/// single argument which is a sender channel to notify the other end about\n\n/// the result.\n\npub extern \"C\" fn done_cb<T>(sender_ptr: *mut c_void, val: T)\n\nwhere\n\n T: fmt::Debug,\n\n{\n\n let sender =\n\n unsafe { Box::from_raw(sender_ptr as *mut oneshot::Sender<T>) };\n\n\n\n // the receiver side might be gone, if this happens it either means that the\n\n // function has gone out of scope or that the future was cancelled. We can\n\n // not cancel futures as they are driven by reactor. We currently fail\n\n // hard if the receiver is gone but in reality the determination of it\n\n // being fatal depends largely on what the future was supposed to do.\n\n sender\n\n .send(val)\n\n .expect(\"done callback receiver side disappeared\");\n\n}\n", "file_path": "mayastor/src/executor.rs", "rank": 54, "score": 111442.1121468632 }, { "content": "#[test]\n\nfn wrong_reply_id() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |_req| {\n\n let resp = Response {\n\n error: None,\n\n id: json!(\"12\"),\n\n jsonrpc: Some(\"2.0\".to_owned()),\n\n result: Some(json!(\"hello this is result\")),\n\n };\n\n\n\n serde_json::to_vec_pretty(&resp).unwrap()\n\n },\n\n |res: Result<String, Error>| match res {\n\n Ok(_) => panic!(\"Expected error and got ok\"),\n\n Err(Error::InvalidReplyId) => (),\n\n Err(err) => panic!(format!(\"Wrong error type: {}\", err)),\n\n },\n\n );\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 55, "score": 110179.66939085953 }, { "content": "#[test]\n\nfn empty_result_expected() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |req| {\n\n let resp = Response {\n\n error: None,\n\n id: req.id,\n\n jsonrpc: Some(\"2.0\".to_owned()),\n\n result: None,\n\n };\n\n\n\n serde_json::to_vec_pretty(&resp).unwrap()\n\n },\n\n |res: Result<(), Error>| match res {\n\n Ok(_) => (),\n\n Err(err) => panic!(format!(\"Unexpected error {}\", err)),\n\n },\n\n );\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 56, "score": 110179.66939085953 }, { "content": "#[test]\n\nfn empty_result_unexpected() {\n\n run_test(\n\n \"method\",\n\n EmptyArgs {},\n\n |req| {\n\n let resp = Response {\n\n error: None,\n\n id: req.id,\n\n jsonrpc: Some(\"2.0\".to_owned()),\n\n result: Some(json!(\"unexpected value\")),\n\n };\n\n\n\n serde_json::to_vec_pretty(&resp).unwrap()\n\n },\n\n |res: Result<(), Error>| match res {\n\n Ok(_) => panic!(\"Expected error and got ok\"),\n\n Err(Error::ParseError(_)) => (),\n\n Err(err) => panic!(format!(\"Wrong error type: {}\", err)),\n\n },\n\n );\n\n}\n\n\n", "file_path": "jsonrpc/src/test.rs", "rank": 57, "score": 110179.66939085953 }, { "content": "#[test]\n\nfn normal_request_reply() {\n\n #[derive(Debug, Serialize, Deserialize)]\n\n struct Args {\n\n msg: String,\n\n code: i32,\n\n flag: bool,\n\n }\n\n\n\n let args = Args {\n\n msg: \"some message\".to_owned(),\n\n code: -123,\n\n flag: true,\n\n };\n\n\n\n run_test(\n\n \"invert_method\",\n\n args,\n\n // we invert int and bool values in the request and send it back\n\n |req| {\n\n assert_eq!(req.method, \"invert_method\");\n", "file_path": "jsonrpc/src/test.rs", "rank": 58, "score": 110179.66939085953 }, { "content": "fn main() {\n\n let log = mayastor::spdklog::SpdkLog::new();\n\n let _ = log.init();\n\n mayastor::CPS_INIT!();\n\n let args = vec![\"-c\", \"../etc/test.conf\"];\n\n mayastor_start(\"test\", args, || {\n\n mayastor::executor::spawn(works());\n\n });\n\n}\n\nasync fn works() {\n\n let children = vec![\n\n \"aio:////disk1.img?blk_size=512\".to_string(),\n\n \"aio:////disk2.img?blk_size=512\".into(),\n\n ];\n\n let name = nexus_create(\"hello\", 512, 131_072, None, &children).await;\n\n\n\n if let Err(name) = name {\n\n error!(\"{:?}\", name);\n\n }\n\n spdk_stop(0);\n\n}\n", "file_path": "mayastor/examples/nvmf_uri.rs", "rank": 59, "score": 109177.77464541198 }, { "content": "/// Extract JSON object from text, trim any pending characters which follow\n\n/// the closing bracket of the object.\n\nfn extract_json_object(\n\n params: &spdk_json_val,\n\n) -> std::result::Result<String, String> {\n\n if params.type_ != SPDK_JSON_VAL_OBJECT_BEGIN {\n\n return Err(\"JSON parameters must be an object\".to_owned());\n\n }\n\n let text = unsafe {\n\n CStr::from_ptr(params.start as *const c_char)\n\n .to_str()\n\n .unwrap()\n\n };\n\n // find corresponding '}' for the object\n\n let mut level = 0;\n\n for (i, c) in text.chars().enumerate() {\n\n if c == '{' {\n\n level += 1;\n\n } else if c == '}' {\n\n level -= 1;\n\n if level == 0 {\n\n return Ok(text[0 ..= i].to_string());\n", "file_path": "mayastor/src/jsonrpc.rs", "rank": 60, "score": 106985.92446140024 }, { "content": "/// Start future executor and register its poll method with spdk so that the\n\n/// tasks can make steady progress.\n\npub fn start() {\n\n EXECUTOR_CTX.with(|ctx_cell| {\n\n let mut ctx_maybe = ctx_cell.try_borrow_mut().expect(\n\n \"start executor must be called before any other executor method\",\n\n );\n\n\n\n if ctx_maybe.is_some() {\n\n panic!(\n\n \"Executor was already started on thread {:?}\",\n\n thread::current().id()\n\n );\n\n }\n\n\n\n let pool = LocalPool::new();\n\n let spawner = pool.spawner();\n\n let poller =\n\n unsafe { spdk_poller_register(Some(tick), ptr::null_mut(), 1000) };\n\n\n\n *ctx_maybe = Some(ExecutorCtx {\n\n pool: RefCell::new(pool),\n", "file_path": "mayastor/src/executor.rs", "rank": 61, "score": 104866.87652723466 }, { "content": "/// Converts an array of Strings into the appropriate args type\n\n/// to construct the children from which we create the nexus.\n\npub fn nexus_uri_parse_vec(uris: &[String]) -> Result<Vec<BdevType>, UriError> {\n\n let mut results = Vec::new();\n\n for target in uris {\n\n results.push(nexus_parse_uri(target)?);\n\n }\n\n\n\n Ok(results)\n\n}\n", "file_path": "mayastor/src/nexus_uri.rs", "rank": 62, "score": 103052.65582704134 }, { "content": "// After some research, it turns out that default mount options\n\n// are depending on various CONFIG_XXXX options during kernel\n\n// config. Depending on FS (sigh) the defaults can be determined\n\n// by an util like tune2fs. However, this requires you to have a\n\n// filesystem of that type to begin with... (how useful)\n\n//\n\n// I have found no way to determine these options by digging through\n\n// sysfs so here is a hack. I feel bad about this. I hate to do this\n\n// but I've given up. Linux won, there you have it.\n\nfn probe_defaults(fsname: &str) -> Result<Vec<String>, String> {\n\n let output = Command::new(format!(\"mkfs.{}\", fsname))\n\n .arg(\"/tmp/fs.img\")\n\n .output()\n\n .expect(\"Failed to execute mkfs command\");\n\n if !output.status.success() {\n\n return Err(format!(\n\n \"Failed to mkfs {} fs: {}\",\n\n fsname,\n\n String::from_utf8(output.stderr).unwrap()\n\n ));\n\n }\n\n trace!(\n\n \"Output of mkfs.{} command: {}\",\n\n fsname,\n\n String::from_utf8(output.stdout).unwrap()\n\n );\n\n\n\n let output = Command::new(\"mount\")\n\n .arg(\"/tmp/fs.img\")\n", "file_path": "csi/src/mount.rs", "rank": 63, "score": 102018.25149388194 }, { "content": "/// Register replica json-rpc methods.\n\npub fn register_replica_methods() {\n\n jsonrpc_register(\"create_replica\", |args: jsondata::CreateReplicaArgs| {\n\n let fut = async move {\n\n let replica = Replica::create(\n\n &args.uuid,\n\n &args.pool,\n\n args.size,\n\n args.thin_provision,\n\n )\n\n .await?;\n\n\n\n match args.share {\n\n jsondata::ShareProtocol::Nvmf => {\n\n replica.share(ShareType::Nvmf).await\n\n }\n\n jsondata::ShareProtocol::Iscsi => {\n\n replica.share(ShareType::Iscsi).await\n\n }\n\n jsondata::ShareProtocol::None => Ok(()),\n\n }\n", "file_path": "mayastor/src/replica.rs", "rank": 64, "score": 100601.74489738581 }, { "content": "/// Register storage pool json-rpc methods.\n\npub fn register_pool_methods() {\n\n // Joining create and import together is questionable and we might split\n\n // the two operations in future. However not until cache config file\n\n // feature is implemented and requirements become clear.\n\n jsonrpc_register(\n\n \"create_or_import_pool\",\n\n |args: jsondata::CreateOrImportPoolArgs| {\n\n let fut = async move {\n\n // TODO: support RAID-0 devices\n\n if args.disks.len() != 1 {\n\n return Err(JsonRpcError::new(\n\n Code::InvalidParams,\n\n \"Invalid number of disks specified\",\n\n ));\n\n }\n\n\n\n if Pool::lookup(&args.name).is_some() {\n\n return Err(JsonRpcError::new(\n\n Code::AlreadyExists,\n\n format!(\"The pool {} already exists\", args.name),\n", "file_path": "mayastor/src/pool.rs", "rank": 65, "score": 100601.74489738581 }, { "content": "/// Unmount a filesystem. We use different unmount flags for bind and non-bind\n\n/// mounts (corresponds to stage and publish type of mounts).\n\npub fn unmount_fs(from: &str, bound: bool) -> Result<(), String> {\n\n let mut flags = UnmountFlags::empty();\n\n\n\n if bound {\n\n flags.insert(UnmountFlags::FORCE);\n\n } else {\n\n flags.insert(UnmountFlags::DETACH);\n\n }\n\n\n\n debug!(\"Unmounting {} ...\", from);\n\n\n\n match unmount(&from, UnmountFlags::DETACH) {\n\n Ok(_) => {\n\n info!(\"Filesystem at {} has been unmounted\", from);\n\n Ok(())\n\n }\n\n Err(err) => Err(format!(\"Failed to unmount fs at {}: {}\", from, err)),\n\n }\n\n}\n", "file_path": "csi/src/mount.rs", "rank": 66, "score": 99663.01660623177 }, { "content": "/// Read and parse value from a file\n\npub fn parse_value<T>(dir: &Path, file: &str) -> Result<T>\n\nwhere\n\n T: FromStr,\n\n{\n\n let path = dir.join(file);\n\n let s = fs::read_to_string(&path)?;\n\n let s = s.trim();\n\n match s.parse() {\n\n Ok(v) => Ok(v),\n\n Err(_) => Err(Error::new(\n\n ErrorKind::InvalidData,\n\n format!(\n\n \"Failed to parse {}: {}\",\n\n path.as_path().to_str().unwrap(),\n\n s\n\n ),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "sysfs/src/lib.rs", "rank": 67, "score": 94319.96764242857 }, { "content": "fn list_pools() -> Vec<jsondata::Pool> {\n\n let mut pools = Vec::new();\n\n\n\n for pool in PoolsIter::new() {\n\n pools.push(jsondata::Pool {\n\n name: pool.get_name().to_owned(),\n\n disks: vec![pool.get_base_bdev().name()],\n\n // TODO: figure out how to detect state of pool\n\n state: \"online\".to_owned(),\n\n capacity: pool.get_capacity(),\n\n used: pool.get_capacity() - pool.get_free(),\n\n });\n\n }\n\n pools\n\n}\n\n\n", "file_path": "mayastor/src/pool.rs", "rank": 68, "score": 92878.2412535194 }, { "content": "/// Spawn a future on the executor running on the same thread.\n\npub fn spawn<F>(f: F)\n\nwhere\n\n F: Future<Output = ()> + 'static,\n\n{\n\n EXECUTOR_CTX.with(|ctx_cell| {\n\n let ctx_maybe = ctx_cell.borrow();\n\n\n\n match ctx_maybe.as_ref() {\n\n // The only place we grab ref to spawner is here and since only\n\n // a single thread can access it, it is safe.\n\n Some(ctx) => ctx.spawner.borrow_mut().spawn_local(f).unwrap(),\n\n None => panic!(\n\n \"Executor was not started on thread {:?}\",\n\n thread::current().id()\n\n ),\n\n }\n\n })\n\n}\n\n\n", "file_path": "mayastor/src/executor.rs", "rank": 69, "score": 92823.39494571544 }, { "content": "/// Write string to a file\n\npub fn write_value<T>(dir: &Path, file: &str, content: T) -> Result<()>\n\nwhere\n\n T: string::ToString,\n\n{\n\n let path = dir.join(file);\n\n fs::write(path, content.to_string())\n\n}\n\n\n", "file_path": "sysfs/src/lib.rs", "rank": 70, "score": 91022.374732281 }, { "content": "/// Create iscsi portal and initiator group which will be used later when\n\n/// creating iscsi targets.\n\npub fn init_iscsi() -> Result<(), String> {\n\n let portal_host = CString::new(\"0.0.0.0\").unwrap();\n\n let portal_port = CString::new(\"3260\").unwrap();\n\n let initiator_host = CString::new(\"ANY\").unwrap();\n\n let initiator_netmask = CString::new(\"ANY\").unwrap();\n\n\n\n let pg = unsafe { spdk_iscsi_portal_grp_create(0) };\n\n if pg.is_null() {\n\n return Err(\"Failed to create default portal group\".to_owned());\n\n }\n\n unsafe {\n\n let p = spdk_iscsi_portal_create(\n\n portal_host.as_ptr(),\n\n portal_port.as_ptr(),\n\n ptr::null_mut(),\n\n );\n\n if p.is_null() {\n\n spdk_iscsi_portal_grp_release(pg);\n\n return Err(\"Failed to create default iscsi portal\".to_owned());\n\n }\n", "file_path": "mayastor/src/iscsi_target.rs", "rank": 71, "score": 91019.3501759689 }, { "content": "/// Destroy iscsi default portal and initiator group.\n\npub fn fini_iscsi() -> Result<(), String> {\n\n unsafe {\n\n let ig = spdk_iscsi_init_grp_unregister(0);\n\n if !ig.is_null() {\n\n spdk_iscsi_init_grp_destroy(ig);\n\n }\n\n let pg = spdk_iscsi_portal_grp_unregister(0);\n\n if !pg.is_null() {\n\n spdk_iscsi_portal_grp_release(pg);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "mayastor/src/iscsi_target.rs", "rank": 72, "score": 91014.955842155 }, { "content": "/// Read dictionary format from a file. Example:\n\n/// KEY1=val1\n\n/// KEY2=val2\n\n/// ...\n\npub fn parse_dict(dir: &Path, file: &str) -> Result<HashMap<String, String>> {\n\n let path = dir.join(file);\n\n let mut dict = HashMap::new();\n\n let f = fs::File::open(&path)?;\n\n let file = BufReader::new(&f);\n\n\n\n for line in file.lines() {\n\n let line = line.unwrap();\n\n let parts: Vec<&str> = line.split('=').collect();\n\n if parts.len() == 2 {\n\n dict.insert(parts[0].to_string(), parts[1].to_string());\n\n }\n\n }\n\n Ok(dict)\n\n}\n", "file_path": "sysfs/src/lib.rs", "rank": 73, "score": 89545.06284442848 }, { "content": "var mayastorProc;\n", "file_path": "mayastor-test/test_common.js", "rank": 74, "score": 88476.57459330306 }, { "content": "var mayastorGrpcProc;\n", "file_path": "mayastor-test/test_common.js", "rank": 75, "score": 87060.690680123 }, { "content": "var mayastorMockServer;\n", "file_path": "mayastor-test/test_cli.js", "rank": 76, "score": 87060.690680123 }, { "content": " fmt::{Display, Formatter},\n\n io::{Cursor, Seek, SeekFrom},\n\n ops::Neg,\n\n os::raw::c_void,\n\n str::FromStr,\n\n};\n\n\n\nuse bincode::serialize_into;\n\nuse futures::channel::oneshot;\n\nuse serde::Serialize;\n\n\n\nuse spdk_sys::{\n\n spdk_bdev,\n\n spdk_bdev_desc,\n\n spdk_bdev_io,\n\n spdk_bdev_io_get_buf,\n\n spdk_bdev_readv_blocks,\n\n spdk_bdev_register,\n\n spdk_bdev_unmap_blocks,\n\n spdk_bdev_unregister,\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 77, "score": 85745.21805283245 }, { "content": " descriptor::DmaBuf,\n\n};\n\n\n\npub(crate) static NEXUS_PRODUCT_ID: &str = \"Nexus CAS Driver v0.0.1\";\n\n\n\n/// The main nexus structure\n\n#[derive(Debug)]\n\npub struct Nexus {\n\n /// Name of the Nexus instance\n\n pub(crate) name: String,\n\n /// number of children part of this nexus\n\n pub(crate) child_count: u32,\n\n /// vector of children\n\n pub(crate) children: Vec<NexusChild>,\n\n /// inner bdev\n\n pub(crate) bdev: Bdev,\n\n /// raw pointer to bdev (to destruct it later using Box::from_raw())\n\n bdev_raw: *mut spdk_bdev,\n\n /// represents the current state of the Nexus\n\n pub(crate) state: NexusState,\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 78, "score": 85744.47162687911 }, { "content": "//!\n\n//! The nexus is one of core components, next to the target services. With\n\n//! the nexus a developer is able to construct a per application volume\n\n//! optimized for the perceived intent. For example, depending on\n\n//! application needs synchronous mirroring may be required.\n\n//!\n\n//! In order to create a nexus, it requires storage target URI's.\n\n//!\n\n//! Creating a 3 way replica nexus example:\n\n//!\n\n//! # example\n\n//! ```ignore\n\n//! use mayastor::descriptor::{Descriptor, DmaBuf};\n\n//! use mayastor::bdev::nexus::nexus_bdev::nexus_create;\n\n//! let children = vec![\n\n//! \"aio:////disk1.img?blk_size=512\".to_string(),\n\n//! \"iscsi://foobar/iqn.2019-05.io.openebs:disk0\".into(),\n\n//! \"nvmf://fooo/nqn.2019-05.io-openebs:disk0\".into(),\n\n//! ];\n\n//!\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 79, "score": 85742.27846665202 }, { "content": " unsafe {\n\n let b: Box<spdk_bdev> = Box::from_raw(self.bdev_raw);\n\n let _ = std::ffi::CString::from_raw(b.name);\n\n let _ = std::ffi::CString::from_raw(b.product_name);\n\n }\n\n }\n\n}\n\n\n\nimpl Nexus {\n\n /// create a new nexus instance with optionally directly attaching\n\n /// children to it.\n\n pub fn new(\n\n name: &str,\n\n block_len: u32,\n\n block_cnt: u64,\n\n uuid: Option<&str>,\n\n child_bdevs: Option<&[String]>,\n\n ) -> Result<Box<Self>, nexus::Error> {\n\n let mut b = Box::new(spdk_bdev::default());\n\n b.name = c_str!(name);\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 80, "score": 85739.91699301812 }, { "content": " debug!(\"{}: IO device registered at {:p}\", self.name, self.as_ptr());\n\n\n\n let rc = unsafe { spdk_bdev_register(self.bdev.inner) };\n\n\n\n if rc != 0 {\n\n error!(\"{}: Failed to register\", self.bdev.name());\n\n\n\n unsafe { spdk_io_device_unregister(self.as_ptr(), None) }\n\n self.children.iter_mut().map(|c| c.close()).for_each(drop);\n\n self.set_state(NexusState::Faulted);\n\n return Err(match rc.neg() {\n\n libc::EINVAL => Error::Invalid,\n\n libc::EEXIST => Error::Exists,\n\n libc::ENOMEM => Error::OutOfMemory,\n\n _ => Error::Internal(\"Failed to register bdev\".to_owned()),\n\n });\n\n }\n\n\n\n self.set_state(NexusState::Online);\n\n info!(\"{}\", self);\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 81, "score": 85739.91598728407 }, { "content": " }\n\n\n\n /// returns the size of the nexus instance\n\n pub fn size(&self) -> u64 {\n\n u64::from(self.bdev.block_size()) * self.bdev.num_blocks()\n\n }\n\n\n\n /// reconfigure the child event handler\n\n pub(crate) async fn reconfigure(&mut self, event: DREvent) {\n\n let (s, r) = oneshot::channel::<i32>();\n\n assert!(self.dr_complete_notify.is_none());\n\n self.dr_complete_notify = Some(s);\n\n\n\n info!(\n\n \"{}: Dynamic reconfiguration event: {:?} started\",\n\n self.name(),\n\n event\n\n );\n\n\n\n NexusChannel::reconfigure(self.as_ptr(), &event);\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 82, "score": 85739.8969464624 }, { "content": " }\n\n\n\n let mut ni = Nexus::new(name, block_len, block_cnt, uuid, None)\n\n .expect(\"Failed to allocate Nexus instance\");\n\n\n\n for child in children {\n\n if let Err(result) = ni.create_and_add_child(child).await {\n\n error!(\"{}: Failed to create child bdev {}\", ni.name, child);\n\n ni.destroy_children().await;\n\n return Err(result);\n\n }\n\n }\n\n\n\n let opened = ni.open().await;\n\n\n\n if opened.is_ok() {\n\n nexus_list.push(ni);\n\n Ok(())\n\n } else {\n\n ni.destroy_children().await;\n\n Err(Error::Internal(\"Failed to open the nexus\".to_owned()))\n\n }\n\n}\n\n\n\n/// Lookup a nexus by its name (currently used only by test functions).\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 83, "score": 85739.31372723736 }, { "content": " /// Dynamic Reconfigure event\n\n pub dr_complete_notify: Option<oneshot::Sender<i32>>,\n\n /// the offset in num blocks where the data partition starts\n\n pub data_ent_offset: u64,\n\n /// nbd device which the nexus is exposed through\n\n pub(crate) nbd_disk: Option<nbd::Disk>,\n\n /// the handle to be used when sharing the nexus, this allows for the bdev\n\n /// to be shared with vbdevs on top\n\n pub(crate) share_handle: Option<String>,\n\n}\n\n\n\nunsafe impl core::marker::Sync for Nexus {}\n\n\n\n#[derive(Debug, Serialize, PartialEq)]\n\npub enum NexusState {\n\n /// nexus created but no children attached\n\n Init,\n\n /// Online\n\n Online,\n\n /// The nexus can not do any IO\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 84, "score": 85739.20416126477 }, { "content": " b.product_name = c_str!(NEXUS_PRODUCT_ID);\n\n b.fn_table = nexus::fn_table().unwrap();\n\n b.module = nexus::module().unwrap().as_ptr();\n\n b.blocklen = block_len;\n\n b.blockcnt = block_cnt;\n\n b.required_alignment = 9;\n\n\n\n let mut n = Box::new(Nexus {\n\n name: name.to_string(),\n\n child_count: 0,\n\n children: Vec::new(),\n\n bdev: Bdev::from(&*b as *const _ as *mut spdk_bdev),\n\n state: NexusState::Init,\n\n bdev_raw: Box::into_raw(b),\n\n dr_complete_notify: None,\n\n data_ent_offset: 0,\n\n nbd_disk: None,\n\n share_handle: None,\n\n });\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 85, "score": 85738.41666682316 }, { "content": " }\n\n }\n\n\n\n info!(\"Destroying nexus {}\", self.name);\n\n\n\n unsafe {\n\n // This will trigger spdk callback to close() which removes\n\n // the device from global list of nexus's\n\n spdk_bdev_unregister(self.bdev_raw, None, std::ptr::null_mut());\n\n }\n\n }\n\n\n\n /// register the bdev with SPDK and set the callbacks for io channel\n\n /// creation. Once this function is called, the device is visible and can\n\n /// be used for IO.\n\n ///\n\n /// The registering is implement such that any core can call get_io_channel\n\n /// from the function table. The io_channels, are constructed on demand and\n\n /// that's basically what this function does.\n\n ///\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 86, "score": 85738.10436641052 }, { "content": " spdk_bdev_writev_blocks,\n\n spdk_io_channel,\n\n spdk_io_device_register,\n\n spdk_io_device_unregister,\n\n};\n\n\n\nuse crate::{\n\n bdev::{\n\n nexus::{\n\n self,\n\n instances,\n\n nexus_channel::{DREvent, NexusChannel, NexusChannelInner},\n\n nexus_child::{ChildState, NexusChild},\n\n nexus_io::{IoStatus, Nio},\n\n nexus_label::{GPTHeader, GptEntry, GptGuid, GptName, NexusLabel},\n\n nexus_nbd as nbd,\n\n Error,\n\n },\n\n Bdev,\n\n },\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 87, "score": 85738.10436641052 }, { "content": " let mut label = self.generate_label();\n\n self.data_ent_offset = label.offset();\n\n self.bdev.set_num_blocks(label.num_blocks());\n\n\n\n let blk_size = self.bdev.block_size();\n\n let mut buf = DmaBuf::new(\n\n (blk_size * (((1 << 14) / blk_size) + 1)) as usize,\n\n self.bdev.alignment(),\n\n )?;\n\n\n\n self.write_label(&mut buf, &mut label, true).await?;\n\n self.write_label(&mut buf, &mut label, false).await?;\n\n info!(\"{}: {} \", self.name, label);\n\n }\n\n\n\n self.register()\n\n }\n\n\n\n /// close the nexus and any children that are open\n\n pub fn close(&mut self) -> Result<(), ()> {\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 88, "score": 85738.08483473495 }, { "content": "//! // create the nexus using the vector of child devices\n\n//! let nexus = nexus_create(\n\n//! \"nexus-b6565df-af19-4645-9f98-e6a8b8c13b58\",\n\n//! 4096,\n\n//! 131_027,\n\n//! Some(\"b6565df-af19-4645-9f98-e6a8b8c13b58\"),\n\n//! &children,\n\n//! ).await.unwrap();\n\n//!\n\n//! // open a block descriptor\n\n//! let bd = Descriptor::open(&nexus, true).unwrap();\n\n//!\n\n//! // only use DMA buffers to issue IO, as its a member of the opened device\n\n//! // alignment is handled implicitly\n\n//! let mut buf = bd.dma_zmalloc(4096).unwrap();\n\n//!\n\n//! // fill the buffer with a know value\n\n//! buf.fill(0xff);\n\n//!\n\n//! // write out the buffer to the nexus, all child devices will receive the\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 89, "score": 85737.62744748792 }, { "content": " /// Each io device is registered using a io_device as a key, and/or name. In\n\n /// our case, we dont actually create a channel ourselves but we reference\n\n /// channels of the underlying bdevs.\n\n\n\n pub fn register(&mut self) -> Result<(), nexus::Error> {\n\n if self.state != NexusState::Init {\n\n error!(\"{}: Can only call register once\", self.name);\n\n return Err(Error::AlreadyClaimed);\n\n }\n\n\n\n unsafe {\n\n spdk_io_device_register(\n\n self.as_ptr(),\n\n Some(NexusChannel::create),\n\n Some(NexusChannel::destroy),\n\n std::mem::size_of::<NexusChannel>() as u32,\n\n (*self.bdev.inner).name,\n\n );\n\n }\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 90, "score": 85737.6081202552 }, { "content": " Ok(())\n\n }\n\n\n\n /// generate a new nexus label based on the nexus configuration. The meta\n\n /// partition is fixed in size and aligned to a 1MB boundary\n\n pub(crate) fn generate_label(&mut self) -> NexusLabel {\n\n let mut hdr = GPTHeader::new(\n\n self.bdev.block_size(),\n\n self.min_num_blocks(),\n\n self.bdev.uuid().into(),\n\n );\n\n\n\n let mut entries = vec![GptEntry::default(); hdr.num_entries as usize];\n\n\n\n entries[0] = GptEntry {\n\n ent_type: GptGuid::from_str(\"27663382-e5e6-11e9-81b4-ca5ca5ca5ca5\")\n\n .unwrap(),\n\n ent_guid: GptGuid::new_random(),\n\n // 1MB aligned\n\n ent_start: hdr.lba_start,\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 91, "score": 85737.23070945457 }, { "content": " // 4MB\n\n ent_end: hdr.lba_start\n\n + u64::from((4 << 20) / self.bdev.block_size())\n\n - 1,\n\n ent_attr: 0,\n\n ent_name: GptName {\n\n name: \"MayaMeta\".into(),\n\n },\n\n };\n\n\n\n entries[1] = GptEntry {\n\n ent_type: GptGuid::from_str(\"27663382-e5e6-11e9-81b4-ca5ca5ca5ca5\")\n\n .unwrap(),\n\n ent_guid: GptGuid::new_random(),\n\n ent_start: entries[0].ent_end + 1,\n\n ent_end: hdr.lba_end,\n\n ent_attr: 0,\n\n ent_name: GptName {\n\n name: \"MayaData\".into(),\n\n },\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 92, "score": 85736.67268977646 }, { "content": " // we use RR to read from the children and also, set that we only need\n\n // to read from one child before we complete the IO to the callee.\n\n io.set_outstanding(1);\n\n let child = channels.child_select();\n\n\n\n // if there is no buffer space for us allocated within the request\n\n // allocate it now, taking care of proper alignment\n\n if io.need_buf() {\n\n unsafe {\n\n spdk_bdev_io_get_buf(\n\n pio,\n\n Some(Self::nexus_get_buf_cb),\n\n io.num_blocks() * io.block_len(),\n\n )\n\n }\n\n return;\n\n }\n\n\n\n let (desc, ch) = channels.ch[child];\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 93, "score": 85736.55909417087 }, { "content": " n.bdev.set_uuid(match uuid {\n\n Some(uuid) => Some(uuid.to_string()),\n\n None => None,\n\n });\n\n\n\n if let Some(child_bdevs) = child_bdevs {\n\n n.add_children(child_bdevs);\n\n }\n\n\n\n // store a reference to the Self in the bdev structure.\n\n unsafe {\n\n (*n.bdev.inner).ctxt = n.as_ref() as *const _ as *mut c_void;\n\n }\n\n Ok(n)\n\n }\n\n\n\n /// get a mutable reference to a child at index\n\n pub fn get_child_as_mut_ref(\n\n &mut self,\n\n index: usize,\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 94, "score": 85735.59830232253 }, { "content": "\n\n let result = r.await.expect(\"Reconfigure notify failure\");\n\n\n\n info!(\n\n \"{}: Dynamic reconfiguration event: {:?} completed {}\",\n\n self.name(),\n\n event,\n\n result\n\n );\n\n }\n\n\n\n /// Opens the Nexus instance for IO\n\n pub async fn open(&mut self) -> Result<(), nexus::Error> {\n\n debug!(\"Opening nexus {}\", self.name);\n\n\n\n self.try_open_children()?;\n\n\n\n // during open all label information needs to be consistent among the\n\n // children\n\n if let Ok(label) = self.update_child_labels().await {\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 95, "score": 85734.81513155893 }, { "content": " let nexus = io.nexus_as_ref();\n\n unsafe {\n\n spdk_bdev_readv_blocks(\n\n desc,\n\n ch,\n\n io.iovs(),\n\n io.iov_count(),\n\n io.offset() + nexus.data_ent_offset,\n\n io.num_blocks(),\n\n Some(Self::io_completion),\n\n pio as *mut _,\n\n )\n\n }\n\n }\n\n\n\n /// write vectored IO to the underlying children.\n\n pub(crate) fn writev(\n\n &self,\n\n pio: *mut spdk_bdev_io,\n\n channels: &NexusChannelInner,\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 96, "score": 85734.52550058549 }, { "content": " let ret = Self::readv_impl(pio, desc, ch);\n\n\n\n if ret != 0 {\n\n error!(\n\n \"{}: Failed to submit dispatched IO {:p}\",\n\n io.nexus_as_ref().name(),\n\n pio\n\n );\n\n io.set_outstanding(0);\n\n io.nio_set_status(IoStatus::Failed);\n\n }\n\n }\n\n\n\n /// do the actual read\n\n fn readv_impl(\n\n pio: *mut spdk_bdev_io,\n\n desc: *mut spdk_bdev_desc,\n\n ch: *mut spdk_io_channel,\n\n ) -> i32 {\n\n let io = Nio::from(pio);\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 97, "score": 85734.45847876032 }, { "content": " // now register the bdev but update its size first to ensure we\n\n // adhere to the partitions\n\n\n\n // When the GUID does not match the given UUID it means that the PVC\n\n // has been recreated, is such as case we should\n\n // consider updating the labels\n\n\n\n info!(\"{}: {} \", self.name, label);\n\n self.data_ent_offset = label.offset();\n\n self.bdev.set_num_blocks(label.num_blocks());\n\n } else {\n\n // one or more children do not have, or have an invalid gpt label.\n\n // Recalculate that the header should have been and\n\n // write them out\n\n\n\n info!(\n\n \"{}: Child label(s) mismatch or absent, applying new label(s)\",\n\n self.name\n\n );\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 98, "score": 85734.42573031913 }, { "content": " /// the close method is called from SPDK close callback any time after\n\n /// the bdev unregister is called so keep this call at the end of this\n\n /// method!\n\n pub async fn destroy(&mut self) {\n\n let _ = self.unshare().await;\n\n\n\n assert_eq!(self.share_handle, None);\n\n\n\n // doing this in the context of nexus_close() would be better\n\n // however we can not change the function in async there so we\n\n // do it here.\n\n for child in self.children.iter_mut() {\n\n if child.state == ChildState::Open {\n\n let _ = child.close();\n\n }\n\n info!(\"Destroying child bdev {}\", child.name);\n\n\n\n let r = child.destroy().await;\n\n if r.is_err() {\n\n warn!(\"Failed to destroy child {}\", child.name);\n", "file_path": "mayastor/src/bdev/nexus/nexus_bdev.rs", "rank": 99, "score": 85734.29955777779 } ]
Rust
src/main.rs
falzberger/ddlog_bench
261fc5598353c648708088eb3b36d1e2082fd869
use clap::{App, Arg, ArgMatches}; use csv::{ReaderBuilder, StringRecord}; use ordered_float::OrderedFloat; use std::fs::File; use std::io::prelude::*; use query_ddlog::api::HDDlog; use query_ddlog::relid2name; use query_ddlog::typedefs::*; use query_ddlog::Relations; use differential_datalog::ddval::{DDValConvert, DDValue}; use differential_datalog::program::RelId; use differential_datalog::program::Update; use differential_datalog::DeltaMap; use differential_datalog::{DDlog, DDlogDynamic}; fn relation_str_to_enum(relation: &str) -> Relations { match relation { "edge" | "Edge" => Relations::Edge, _ => panic!("Unknown input relation: {}", relation), } } fn parse_tuple_for_relation(tuple: StringRecord, offset: usize, relation: Relations) -> DDValue { match relation { Relations::Edge => Edge { parent: tuple[offset + 0].to_string(), child: tuple[offset + 1].to_string(), weight: OrderedFloat::from(tuple[offset + 2].parse::<f32>().unwrap()), } .into_ddvalue(), _ => panic!("Unsupported input relation: {:?}", relation), } } fn get_cli_arg_matches<'a>() -> ArgMatches<'a> { App::new("DDlog Benchmark CLI") .arg( Arg::with_name("input") .short("i") .help("Specifies a CSV input file for a relation. We expect CSVs to have headers.") .takes_value(true) .number_of_values(2) .value_names(&["relation", "csv"]) .multiple(true) .required(true), ) .arg( Arg::with_name("updates") .short("u") .help("Specifies a CSV file for updates to the computation. We expect CSVs to have headers.\ The first column of the CSV should be either -1 or 1, indicating whether to add or remove a tuple.\ The second column must be the name of the relation, followed by the relation attributes.") .takes_value(true) .value_names(&["csv"]) .multiple(true) .required(false), ) .get_matches() } fn main() -> Result<(), String> { let matches = get_cli_arg_matches(); println!("Instantiating DDlog program..."); let start = std::time::Instant::now(); let (hddlog, init_state) = HDDlog::run(1, false)?; println!( "Instantiating program took {} µs", start.elapsed().as_micros() ); dump_delta(&init_state); let initial_start = std::time::Instant::now(); println!("Adding inputs to the dataflow with multiple transactions"); let inputs: Vec<_> = matches.values_of("input").unwrap().collect(); for i in (0..inputs.len()).step_by(2) { let (input_relation, csv_file) = (relation_str_to_enum(inputs[i]), inputs[i + 1]); let file = File::open(csv_file).expect(&*format!("Could not open file {}", csv_file)); let mut rdr = ReaderBuilder::new().has_headers(true).from_reader(file); let max_batch_size = 1000; let mut batch = Vec::with_capacity(max_batch_size); start_transaction(&hddlog); for tuple in rdr.records().flatten() { batch.push(Update::Insert { relid: input_relation as RelId, v: parse_tuple_for_relation(tuple, 0, input_relation), }); if batch.len() >= max_batch_size { hddlog.apply_updates(&mut batch.drain(..).into_iter())?; } } hddlog.apply_updates(&mut batch.into_iter())?; commit_transaction(&hddlog); } println!( "Initial computation took {} µs to complete", initial_start.elapsed().as_micros() ); let updates: Vec<_> = matches .values_of("updates") .map_or_else(Vec::new, |values| values.collect()); for file_name in updates { let start = std::time::Instant::now(); println!("Adding updates from file {} to the dataflow", file_name); let file = File::open(file_name).expect(&*format!("Could not open file {}", file_name)); let mut rdr = ReaderBuilder::new().has_headers(true).from_reader(file); start_transaction(&hddlog); let mut updates = vec![]; for tuple in rdr.records().flatten() { let add_or_remove = tuple.get(0).expect("Empty rows are not valid"); let relation = relation_str_to_enum(tuple.get(1).expect("Each row must contain a relation")); if add_or_remove == "1" { updates.push(Update::Insert { relid: relation as RelId, v: parse_tuple_for_relation(tuple, 2, relation), }); } else if add_or_remove == "-1" { updates.push(Update::DeleteValue { relid: relation as RelId, v: parse_tuple_for_relation(tuple, 2, relation), }); } else { panic!( "First column must either be 1 or -1 but was: {:?}", add_or_remove ) } } hddlog.apply_updates(&mut updates.into_iter())?; println!( "Finished adding updates after {} µs.", start.elapsed().as_micros() ); commit_transaction(&hddlog); } hddlog.stop().unwrap(); Ok(()) } fn start_transaction(hddlog: &HDDlog) { println!("Starting transaction..."); hddlog.transaction_start().unwrap(); } fn commit_transaction(hddlog: &HDDlog) { let start = std::time::Instant::now(); let delta = hddlog.transaction_commit_dump_changes().unwrap(); println!( "Committing transaction took {} µs", start.elapsed().as_micros() ); dump_delta(&delta); } fn dump_delta(delta: &DeltaMap<DDValue>) { for (rel, changes) in delta.iter() { let mut file = std::fs::OpenOptions::new() .append(true) .create(true) .open(format!("{}.out", relid2name(*rel).unwrap())) .expect(&*format!( "Could not open file for writing exported relation: {:?}.out", relid2name(*rel) )); for (val, weight) in changes.iter() { let _ = writeln!(file, "{:+}, ({})", weight, val); } } }
use clap::{App, Arg, ArgMatches}; use csv::{ReaderBuilder, StringRecord}; use ordered_float::OrderedFloat; use std::fs::File; use std::io::prelude::*; use query_ddlog::api::HDDlog; use query_ddlog::relid2name; use query_ddlog::typedefs::*; use query_ddlog::Relations; use differential_datalog::ddval::{DDValConvert, DDValue}; use differential_datalog::program::RelId; use differential_datalog::program::Update; use differential_datalog::DeltaMap; use differential_datalog::{DDlog, DDlogDynamic}; fn relation_str_to_enum(relation: &str) -> Relations { match relation { "edge" | "Edge" => Relations::Edge, _ => panic!("Unknown input relation: {}", relation), } } fn parse_tuple_for_relation(tuple: StringRecord, offset: usize, relation: Relations) -> DDValue { match relation { Relations::Edge => Edge { parent: tuple[offset + 0].to_string(), child: tuple[offset + 1].to_string(), weight: OrderedFloat::from(tuple[offset + 2].parse::<f32>().unwrap()), } .into_ddvalue(), _ => panic!("Unsupported input relation: {:?}", relation), } }
fn main() -> Result<(), String> { let matches = get_cli_arg_matches(); println!("Instantiating DDlog program..."); let start = std::time::Instant::now(); let (hddlog, init_state) = HDDlog::run(1, false)?; println!( "Instantiating program took {} µs", start.elapsed().as_micros() ); dump_delta(&init_state); let initial_start = std::time::Instant::now(); println!("Adding inputs to the dataflow with multiple transactions"); let inputs: Vec<_> = matches.values_of("input").unwrap().collect(); for i in (0..inputs.len()).step_by(2) { let (input_relation, csv_file) = (relation_str_to_enum(inputs[i]), inputs[i + 1]); let file = File::open(csv_file).expect(&*format!("Could not open file {}", csv_file)); let mut rdr = ReaderBuilder::new().has_headers(true).from_reader(file); let max_batch_size = 1000; let mut batch = Vec::with_capacity(max_batch_size); start_transaction(&hddlog); for tuple in rdr.records().flatten() { batch.push(Update::Insert { relid: input_relation as RelId, v: parse_tuple_for_relation(tuple, 0, input_relation), }); if batch.len() >= max_batch_size { hddlog.apply_updates(&mut batch.drain(..).into_iter())?; } } hddlog.apply_updates(&mut batch.into_iter())?; commit_transaction(&hddlog); } println!( "Initial computation took {} µs to complete", initial_start.elapsed().as_micros() ); let updates: Vec<_> = matches .values_of("updates") .map_or_else(Vec::new, |values| values.collect()); for file_name in updates { let start = std::time::Instant::now(); println!("Adding updates from file {} to the dataflow", file_name); let file = File::open(file_name).expect(&*format!("Could not open file {}", file_name)); let mut rdr = ReaderBuilder::new().has_headers(true).from_reader(file); start_transaction(&hddlog); let mut updates = vec![]; for tuple in rdr.records().flatten() { let add_or_remove = tuple.get(0).expect("Empty rows are not valid"); let relation = relation_str_to_enum(tuple.get(1).expect("Each row must contain a relation")); if add_or_remove == "1" { updates.push(Update::Insert { relid: relation as RelId, v: parse_tuple_for_relation(tuple, 2, relation), }); } else if add_or_remove == "-1" { updates.push(Update::DeleteValue { relid: relation as RelId, v: parse_tuple_for_relation(tuple, 2, relation), }); } else { panic!( "First column must either be 1 or -1 but was: {:?}", add_or_remove ) } } hddlog.apply_updates(&mut updates.into_iter())?; println!( "Finished adding updates after {} µs.", start.elapsed().as_micros() ); commit_transaction(&hddlog); } hddlog.stop().unwrap(); Ok(()) } fn start_transaction(hddlog: &HDDlog) { println!("Starting transaction..."); hddlog.transaction_start().unwrap(); } fn commit_transaction(hddlog: &HDDlog) { let start = std::time::Instant::now(); let delta = hddlog.transaction_commit_dump_changes().unwrap(); println!( "Committing transaction took {} µs", start.elapsed().as_micros() ); dump_delta(&delta); } fn dump_delta(delta: &DeltaMap<DDValue>) { for (rel, changes) in delta.iter() { let mut file = std::fs::OpenOptions::new() .append(true) .create(true) .open(format!("{}.out", relid2name(*rel).unwrap())) .expect(&*format!( "Could not open file for writing exported relation: {:?}.out", relid2name(*rel) )); for (val, weight) in changes.iter() { let _ = writeln!(file, "{:+}, ({})", weight, val); } } }
fn get_cli_arg_matches<'a>() -> ArgMatches<'a> { App::new("DDlog Benchmark CLI") .arg( Arg::with_name("input") .short("i") .help("Specifies a CSV input file for a relation. We expect CSVs to have headers.") .takes_value(true) .number_of_values(2) .value_names(&["relation", "csv"]) .multiple(true) .required(true), ) .arg( Arg::with_name("updates") .short("u") .help("Specifies a CSV file for updates to the computation. We expect CSVs to have headers.\ The first column of the CSV should be either -1 or 1, indicating whether to add or remove a tuple.\ The second column must be the name of the relation, followed by the relation attributes.") .takes_value(true) .value_names(&["csv"]) .multiple(true) .required(false), ) .get_matches() }
function_block-full_function
[ { "content": "def to_entity_str(entity_id: int):\n", "file_path": "generator.py", "rank": 7, "score": 12004.358802751458 }, { "content": "# Differential Datalog Benchmarks\n\n\n\nThis repository contains some simple benchmarks based on artificially generated datasets for\n\nthe [Differential Datalog engine](https://github.com/vmware/differential-datalog).\n\n\n\n## Prerequisites\n\n\n\n* Rust >= 1.50\n\n* Python >= 3.8\n\n\n\n## How-To\n\n\n\n1) Follow Differential\n\n Datalog's [installation instructions](https://github.com/vmware/differential-datalog#installing-ddlog-from-a-binary-release)\n\n\n\n2) Have a look at [query.dl](query.dl) and choose which query you would like to run.\n\n\n\n3) Use the provided [Makefile](Makefile) to compile the query, generate the datasets, and run the query against on of\n\n the datasets. Each dataset also has a trace of updates associated with it (see [updates/](updates)), which will be\n\n fed stepwise to the query computations, and thereby showcase the incremental computation aspects.\n\n\n\n4) You can inspect the output of the query in the respective `.out` files that will be generated. The benchmark results\n\n are visible in the console.\n\n\n\nAs a shorthand, you can use `make all` to execute the default query (computing the roots) over all datasets after you\n\nhave installed Differential Datalog.\n", "file_path": "README.md", "rank": 11, "score": 0.7435324046103635 } ]
Rust
src/main.rs
caperaven/svg-to-js
07a2f5962d878d183c4aac2fd5e75f91e78982cc
mod path_to_path; extern crate lyon; extern crate glob; use std::env; use glob::glob; use std::fs; use lyon::tessellation::{FillOptions, FillTessellator, StrokeTessellator, StrokeOptions}; use lyon::path::math::{Point}; use lyon::path::Path; use lyon::tessellation::geometry_builder::{VertexBuffers, simple_builder}; type PolyBuffer = VertexBuffers<Point, u16>; fn ensure_result_folder(folder: &str) { fs::create_dir_all(folder).ok(); } fn get_svg_in_folder() -> (glob::Paths, String) { let executable = env::current_exe().unwrap(); let path = match executable.parent() { Some(name) => name, _ => panic!() }; let folder = path.display(); let folder_str = format!("{}", folder); let target_folder = format!("{}/result", folder); ensure_result_folder(&target_folder); let glob_query = format!("{}/*.svg", folder_str); let glob_query_str = glob_query.as_str(); return (glob(glob_query_str).expect("Failed to read glob pattern"), target_folder); } fn process_file(filename: &str) -> (PolyBuffer, PolyBuffer) { let opt = usvg::Options::default(); let file_data = std::fs::read(filename).unwrap(); let rtree = usvg::Tree::from_data(&file_data, &opt.to_ref()).unwrap(); let mut fill_builder = lyon::path::Path::builder(); let mut stroke_builder = lyon::path::Path::builder(); for node in rtree.root().descendants() { if let usvg::NodeKind::Path(ref p) = *node.borrow() { if let Some(_) = p.fill { path_to_path::to_path(&p.data, &mut fill_builder); } if let Some(_) = p.stroke { path_to_path::to_path(&p.data, &mut stroke_builder); } } } let fill_buffer = create_fill(&fill_builder.build()); let stroke_buffer = create_stroke(&stroke_builder.build(), 5.0); return (fill_buffer, stroke_buffer); } pub fn create_fill(path: &Path) -> PolyBuffer { let mut buffer: PolyBuffer = VertexBuffers::new(); { let mut vertex_builder = simple_builder(&mut buffer); let mut tessellator = FillTessellator::new(); tessellator .tessellate_path(path, &FillOptions::default(), &mut vertex_builder) .ok(); } return buffer; } pub fn create_stroke(path: &Path, line_width: f32) -> PolyBuffer { let mut buffer: PolyBuffer = VertexBuffers::new(); { let mut vertex_builder = simple_builder(&mut buffer); let mut tessellator = StrokeTessellator::new(); let options = StrokeOptions::default().with_tolerance(0.01).with_line_width(line_width); tessellator.tessellate_path ( path, &options, &mut vertex_builder ).ok(); } return buffer; } pub fn save_buffer(path: &str, buffer: PolyBuffer, target_folder: &String) { let mut vertices = Vec::new(); let mut indices = Vec::new(); for point in buffer.vertices.iter() { vertices.push(point.x.to_string()); vertices.push(point.y.to_string()); vertices.push(String::from("0.0")); } for i in buffer.indices.iter() { indices.push(i.to_string()); } let v_code = vertices.join(","); let i_code = indices.join(","); let file_name = path.split("/").last().unwrap().replace(".svg", "").replace(" ", ""); let target_file = format!("{}/{}.js", target_folder, file_name); let className = format!("{}Data", file_name); let code = vec![ String::from("export const "), className, String::from(" = {vertices: ["), v_code, String::from("], indices: ["), i_code, String::from("]};") ].join(""); std::fs::write(target_file, code).ok(); println!("saving: {}", file_name); } fn main() { let (files, target_folder) = get_svg_in_folder(); for entry in files { match entry { Ok(path) => { let file_name = format!("{}", path.display()); let (fill_buffer, _stroke_buffer) = process_file(&file_name); save_buffer(&file_name, fill_buffer, &target_folder); }, Err(e) => println!("{:?}", e) } } }
mod path_to_path; extern crate lyon; extern crate glob; use std::env; use glob::glob; use std::fs; use lyon::tessellation::{FillOptions, FillTessellator, StrokeTessellator, StrokeOptions}; use lyon::path::math::{Point}; use lyon::path::Path; use lyon::tessellation::geometry_builder::{VertexBuffers, simple_builder}; type PolyBuffer = VertexBuffers<Point, u16>; fn ensure_result_folder(folder: &str) { fs::create_dir_all(folder).ok(); } fn get_svg_in_folder() -> (glob::Paths, String) { let executable = env::current_exe().unwrap(); let path = match executable.parent() { Some(name) => name, _ => panic!() }; let folder = path.display(); let folder_str = format!("{}", folder); let target_folder = format!("{}/result", folder); ensure_result_folder(&target_folder); let glob_query = format!("{}/*.svg", folder_str); let glob_query_str = glob_query.as_str(); return (glob(glob_query_str).expect("Failed to read glob pattern"), target_folder); } fn process_file(filename: &str) -> (PolyBuffer, PolyBuffer) { let opt = usvg::Options::default(); let file_data = std::fs::read(filename).unwrap(); let rtree = usvg::Tree::from_data(&file_data, &opt.to_ref()).unwrap(); let mut fill_builder = lyon::path::Path::builder(); let mut stroke_builder = lyon::path::Path::builder(); for node in rtree.root().descendants() { if let usvg::NodeKind::Path(ref p) = *node.borrow() { if let Some(_) = p.fill { path_to_path::to_path(&p.data, &mut fill_builder); } if let Some(_) = p.stroke { path_to_path::to_path(&p.data, &mut stroke_builder); } } } let fill_buffer = create_fill(&fill_builder.build()); let stroke_buffer = create_stroke(&stroke_builder.build(), 5.0); return (fill_buffer, stroke_buffer); } pub fn create_fill(path: &Path) -> PolyBuffer { let mut buffer: PolyBuffer = VertexBuffers::new(); { let mut vertex_builder = simple_builder(&mut buffer); let mut tessellator = FillTessellator::new(); tessellator .tessellate_path(path, &FillOptions::default(), &mut vertex_builder) .ok(); } return buffer; } pub fn create_stroke(path: &Path, line_width: f32) -> PolyBuffer { let mut buffer: PolyBuffer = VertexBuffers::new(); { let mut vertex_builder = simple_builder(&mut buffer); let mut tessellator = StrokeTessellator::new(); let options = StrokeOptions::default().with_tolerance(0.01).with_line_width(line_width); tessellator.tessellate_path ( path, &options, &mut vertex_builder ).ok(); } return buffer; } pub fn save_buffer(path: &str, buffer: PolyBuffer, target_folder: &String) { let mut vertices = Vec::new(); let mut indices = Vec::new(); for point in buffer.vertices.iter() { vertices.push(point.x.to_string()); vertices.push(point.y.to_string()); vertices.push(String::from("0.0")); } for i in buffer.indices.iter() { indices.push(i.to_string()); } let v_code = vertices.join(","); let i_code = indices.join(","); let file_name = path.split("/").last().unwrap().replace(".svg", "").replace(" ", ""); let target_file = format!("{}/{}.js", target_folder, file_name); let className = format!("{}Data", file_name); let code = vec![ String::from("export const "), className, String::from(" = {vertices: ["), v_code, String::from("], indices: ["), i_code, String::from("]};") ].join(""); std::fs::write(target_file, code).ok(); println!("saving: {}", file_name); }
fn main() { let (files, target_folder) = get_svg_in_folder(); for entry in files { match entry { Ok(path) => { let file_name = format!("{}", path.display()); let (fill_buffer, _stroke_buffer) = process_file(&file_name); save_buffer(&file_name, fill_buffer, &target_folder); }, Err(e) => println!("{:?}", e) } } }
function_block-full_function
[]
Rust
utils/test-env/src/utils.rs
hboshnak/casper-nft-cep47
09b40b0caf4cfc6f73d1e5f7d5b9c868228f7621
use std::path::PathBuf; use casper_engine_test_support::{ DeployItemBuilder, ExecuteRequestBuilder, InMemoryWasmTestBuilder, ARG_AMOUNT, DEFAULT_ACCOUNT_ADDR, DEFAULT_PAYMENT, }; use casper_execution_engine::core::engine_state::ExecuteRequest; use casper_types::{ account::AccountHash, bytesrepr::FromBytes, runtime_args, system::mint, CLTyped, ContractHash, Key, RuntimeArgs, StoredValue, U512, }; pub fn query<T: FromBytes + CLTyped>( builder: &InMemoryWasmTestBuilder, base: Key, path: &[String], ) -> T { builder .query(None, base, path) .expect("should be stored value.") .as_cl_value() .expect("should be cl value.") .clone() .into_t() .expect("Wrong type in query result.") } pub fn fund_account(account: &AccountHash) -> ExecuteRequest { let deploy_item = DeployItemBuilder::new() .with_address(*DEFAULT_ACCOUNT_ADDR) .with_authorization_keys(&[*DEFAULT_ACCOUNT_ADDR]) .with_empty_payment_bytes(runtime_args! {ARG_AMOUNT => *DEFAULT_PAYMENT}) .with_transfer_args(runtime_args! { mint::ARG_AMOUNT => U512::from(30_000_000_000_000_u64), mint::ARG_TARGET => *account, mint::ARG_ID => <Option::<u64>>::None }) .with_deploy_hash([1; 32]) .build(); ExecuteRequestBuilder::from_deploy_item(deploy_item).build() } pub enum DeploySource { Code(PathBuf), ByHash { hash: ContractHash, method: String }, } pub fn deploy( builder: &mut InMemoryWasmTestBuilder, deployer: &AccountHash, source: &DeploySource, args: RuntimeArgs, success: bool, block_time: Option<u64>, ) { let mut deploy_builder = DeployItemBuilder::new() .with_empty_payment_bytes(runtime_args! {ARG_AMOUNT => *DEFAULT_PAYMENT}) .with_address(*deployer) .with_authorization_keys(&[*deployer]); deploy_builder = match source { DeploySource::Code(path) => deploy_builder.with_session_code(path, args), DeploySource::ByHash { hash, method } => { deploy_builder.with_stored_session_hash(*hash, method, args) } }; let mut execute_request_builder = ExecuteRequestBuilder::from_deploy_item(deploy_builder.build()); if let Some(ustamp) = block_time { execute_request_builder = execute_request_builder.with_block_time(ustamp) } let exec = builder.exec(execute_request_builder.build()); if success { exec.expect_success() } else { exec.expect_failure() } .commit(); } pub fn query_dictionary_item( builder: &InMemoryWasmTestBuilder, key: Key, dictionary_name: Option<String>, dictionary_item_key: String, ) -> Result<StoredValue, String> { let empty_path = vec![]; let dictionary_key_bytes = dictionary_item_key.as_bytes(); let address = match key { Key::Account(_) | Key::Hash(_) => { if let Some(name) = dictionary_name { let stored_value = builder.query(None, key, &[])?; let named_keys = match &stored_value { StoredValue::Account(account) => account.named_keys(), StoredValue::Contract(contract) => contract.named_keys(), _ => { return Err( "Provided base key is nether an account or a contract".to_string() ) } }; let dictionary_uref = named_keys .get(&name) .and_then(Key::as_uref) .ok_or_else(|| "No dictionary uref was found in named keys".to_string())?; Key::dictionary(*dictionary_uref, dictionary_key_bytes) } else { return Err("No dictionary name was provided".to_string()); } } Key::URef(uref) => Key::dictionary(uref, dictionary_key_bytes), Key::Dictionary(address) => Key::Dictionary(address), _ => return Err("Unsupported key type for a query to a dictionary item".to_string()), }; builder.query(None, address, &empty_path) }
use std::path::PathBuf; use casper_engine_test_support::{ DeployItemBuilder, ExecuteRequestBuilder, InMemoryWasmTestBuilder, ARG_AMOUNT, DEFAULT_ACCOUNT_ADDR, DEFAULT_PAYMENT, }; use casper_execution_engine::core::engine_state::ExecuteRequest; use casper_types::{ account::AccountHash, bytesrepr::FromBytes, runtime_args, system::mint, CLTyped, ContractHash, Key, RuntimeArgs, StoredValue, U512, }; pub fn query<T: FromBytes + CLTyped>( builder: &InMemoryWasmTestBuilder, base: Key, path: &[String], ) -> T { builder .query(None, base, path) .expect("should be stored value.") .as_cl_value() .expect("should be cl value.") .clone() .into_t() .expect("Wrong type in query result.") } pub fn fund_account(account: &AccountHash) -> ExecuteRequest { let deploy_item = DeployItemBuilder::new() .with_address(*DEFAULT_ACCOUNT_ADDR) .with_authorization_keys(&[*DEFAULT_ACCOUNT_ADDR]) .with_empty_payment_bytes(runtime_args! {ARG_AMOUNT => *DEFAULT_PAYMENT}) .with_transfer_args(runtime_args! { mint::ARG_AMOUNT => U512::from(30_000_000_000_000_u64), mint::ARG_TARGET => *account, mint::ARG_ID => <Option::<u64>>::None }) .with_deploy_hash([1; 32]) .build(); ExecuteRequestBuilder::from_deploy_item(deploy_item).build() } pub enum DeploySource { Code(PathBuf), ByHash { hash: ContractHash, method: String }, } pub fn deploy( builder: &mut InMemoryWasmTestBuilder, deployer: &AccountHash, source: &DeploySource, args: RuntimeArgs, success: bool, block_time: Option<u64>, ) { let mut deploy_builder = DeployItemBuilder::new() .with_empty_payment_bytes(runtime_args! {ARG_AMOUNT => *DEFAULT_PAYMENT}) .with_address(*deployer) .with_authorization_keys(&[*deployer]); deploy_builder = match source { DeploySource::Code(path) => deploy_builder.with_session_code(path, args), DeploySource::ByHash { hash, method } => { deploy_builder.with_stored_session_hash(*hash, method, args) } }; let mut execute_request_builder = ExecuteRequestBuilder::from_deploy_item(deploy_builder.build()); if let Some(ustamp) = block_time { execute_request_builder = execute_request_builder.with_block_time(ustamp) } let exec = builder.exec(execute_request_builder.build()); if success { exec.expect_success() } else { exec.expect_failure() } .commit(); } pub fn query_dictionary_item( builder: &InMemoryWasmTestBuilder, key: Key, dictionary_name: Option<String>, dictionary_item_key: String, ) -> Result<StoredValue, String> { let empty_path = vec![]; let dictionary_key_bytes = dictionary_item_key.as_bytes(); let address = match key { Key::Account(_) | Key::Hash(_) => { if let Some(name) = dictionary_name { let stored_value = builder.query(None, key, &[])?;
let named_keys = match &stored_value { StoredValue::Account(account) => account.named_keys(), StoredValue::Contract(contract) => contract.named_keys(), _ => { return Err( "Provided base key is nether an account or a contract".to_string() ) } }; let dictionary_uref = named_keys .get(&name) .and_then(Key::as_uref) .ok_or_else(|| "No dictionary uref was found in named keys".to_string())?; Key::dictionary(*dictionary_uref, dictionary_key_bytes) } else { return Err("No dictionary name was provided".to_string()); } } Key::URef(uref) => Key::dictionary(uref, dictionary_key_bytes), Key::Dictionary(address) => Key::Dictionary(address), _ => return Err("Unsupported key type for a query to a dictionary item".to_string()), }; builder.query(None, address, &empty_path) }
function_block-function_prefix_line
[ { "content": "pub fn key_and_value_to_str<T: CLTyped + ToBytes>(key: &Key, value: &T) -> String {\n\n let mut bytes_a = key.to_bytes().unwrap_or_revert();\n\n let mut bytes_b = value.to_bytes().unwrap_or_revert();\n\n\n\n bytes_a.append(&mut bytes_b);\n\n\n\n let bytes = runtime::blake2b(bytes_a);\n\n hex::encode(bytes)\n\n}\n\n\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 0, "score": 189845.53484329194 }, { "content": "pub fn key_and_value_to_str<T: CLTyped + ToBytes>(key: &Key, value: &T) -> String {\n\n let mut hasher = VarBlake2b::new(32).unwrap();\n\n hasher.update(key.to_bytes().unwrap());\n\n hasher.update(value.to_bytes().unwrap());\n\n let mut ret = [0u8; 32];\n\n hasher.finalize_variable(|hash| ret.clone_from_slice(hash));\n\n hex::encode(ret)\n\n}\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 1, "score": 189845.53484329194 }, { "content": "pub fn key_to_str(key: &Key) -> String {\n\n match key {\n\n Key::Account(account) => account.to_string(),\n\n Key::Hash(package) => hex::encode(package),\n\n _ => runtime::revert(ApiError::UnexpectedKeyVariant),\n\n }\n\n}\n\n\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 3, "score": 157556.8656053706 }, { "content": "pub fn key_to_str(key: &Key) -> String {\n\n match key {\n\n Key::Account(account) => account.to_string(),\n\n Key::Hash(package) => hex::encode(package),\n\n _ => panic!(\"Unexpected key type\"),\n\n }\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 4, "score": 157556.86560537058 }, { "content": "pub fn keys_to_str(key_a: &Key, key_b: &Key) -> String {\n\n let mut bytes_a = key_a.to_bytes().unwrap_or_revert();\n\n let mut bytes_b = key_b.to_bytes().unwrap_or_revert();\n\n\n\n bytes_a.append(&mut bytes_b);\n\n\n\n let bytes = runtime::blake2b(bytes_a);\n\n hex::encode(bytes)\n\n}\n\n\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 5, "score": 150692.9584510703 }, { "content": "fn deploy() -> (TestEnv, CEP47Instance, AccountHash) {\n\n let env = TestEnv::new();\n\n let owner = env.next_user();\n\n let token = CEP47Instance::new(&env, NAME, owner, NAME, SYMBOL, meta::contract_meta());\n\n (env, token, owner)\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 6, "score": 129050.960026642 }, { "content": "pub fn set_key<T: ToBytes + CLTyped>(name: &str, value: T) {\n\n match runtime::get_key(name) {\n\n Some(key) => {\n\n let key_ref = key.try_into().unwrap_or_revert();\n\n storage::write(key_ref, value);\n\n }\n\n None => {\n\n let key = storage::new_uref(value).into();\n\n runtime::put_key(name, key);\n\n }\n\n }\n\n}\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 8, "score": 127661.54142669812 }, { "content": "pub fn name() -> String {\n\n get_key(NAME).unwrap_or_revert()\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 10, "score": 118824.3335922788 }, { "content": "pub fn symbol() -> String {\n\n get_key(SYMBOL).unwrap_or_revert()\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 11, "score": 118824.3335922788 }, { "content": "pub fn set_name(name: String) {\n\n set_key(NAME, name);\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 13, "score": 110445.22977411136 }, { "content": "pub fn set_symbol(symbol: String) {\n\n set_key(SYMBOL, symbol);\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 14, "score": 110445.22977411136 }, { "content": "pub fn get_key<T: FromBytes + CLTyped>(name: &str) -> Option<T> {\n\n match runtime::get_key(name) {\n\n None => None,\n\n Some(value) => {\n\n let key = value.try_into().unwrap_or_revert();\n\n let value = storage::read(key).unwrap_or_revert().unwrap_or_revert();\n\n Some(value)\n\n }\n\n }\n\n}\n\n\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 15, "score": 108954.8951755093 }, { "content": "pub fn contract_package_hash() -> ContractPackageHash {\n\n let call_stacks = get_call_stack();\n\n let last_entry = call_stacks.last().unwrap_or_revert();\n\n let package_hash: Option<ContractPackageHash> = match last_entry {\n\n CallStackElement::StoredContract {\n\n contract_package_hash,\n\n contract_hash: _,\n\n } => Some(*contract_package_hash),\n\n _ => None,\n\n };\n\n package_hash.unwrap_or_revert()\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 16, "score": 98830.49237488872 }, { "content": "fn element_to_key(element: &CallStackElement) -> Key {\n\n match element {\n\n CallStackElement::Session { account_hash } => (*account_hash).into(),\n\n CallStackElement::StoredSession {\n\n account_hash,\n\n contract_package_hash: _,\n\n contract_hash: _,\n\n } => (*account_hash).into(),\n\n CallStackElement::StoredContract {\n\n contract_package_hash,\n\n contract_hash: _,\n\n } => (*contract_package_hash).into(),\n\n }\n\n}\n", "file_path": "utils/contract-utils/src/contract_context.rs", "rank": 17, "score": 74900.02661733553 }, { "content": "pub fn meta() -> Meta {\n\n get_key(META).unwrap_or_revert()\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 18, "score": 74703.0866267062 }, { "content": "pub fn total_supply() -> U256 {\n\n get_key(TOTAL_SUPPLY).unwrap_or_default()\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 19, "score": 72995.38552293048 }, { "content": "pub fn set_meta(meta: Meta) {\n\n set_key(META, meta);\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 20, "score": 69444.69208991379 }, { "content": "pub fn emit(event: &CEP47Event) {\n\n let mut events = Vec::new();\n\n let package = contract_package_hash();\n\n match event {\n\n CEP47Event::Mint {\n\n recipient,\n\n token_ids,\n\n } => {\n\n for token_id in token_ids {\n\n let mut param = BTreeMap::new();\n\n param.insert(CONTRACT_PACKAGE_HASH, package.to_string());\n\n param.insert(\"event_type\", \"cep47_mint_one\".to_string());\n\n param.insert(\"recipient\", recipient.to_string());\n\n param.insert(\"token_id\", token_id.to_string());\n\n events.push(param);\n\n }\n\n }\n\n CEP47Event::Burn { owner, token_ids } => {\n\n for token_id in token_ids {\n\n let mut param = BTreeMap::new();\n", "file_path": "cep47/src/data.rs", "rank": 21, "score": 69444.69208991379 }, { "content": "pub fn set_total_supply(total_supply: U256) {\n\n set_key(TOTAL_SUPPLY, total_supply);\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 22, "score": 66570.28430268994 }, { "content": "#[test]\n\nfn test_deploy() {\n\n let (_, token, _) = deploy();\n\n assert_eq!(token.name(), NAME);\n\n assert_eq!(token.symbol(), SYMBOL);\n\n assert_eq!(token.meta(), meta::contract_meta());\n\n assert_eq!(token.total_supply(), U256::zero());\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 23, "score": 60472.205359645064 }, { "content": "pub trait ContractStorage {\n\n fn call_stack(&self) -> &[CallStackElement];\n\n}\n\n\n\n#[derive(Default)]\n\npub struct OnChainContractStorage {\n\n call_stack: OnceCell<Vec<CallStackElement>>,\n\n}\n\n\n\nimpl ContractStorage for OnChainContractStorage {\n\n fn call_stack(&self) -> &[CallStackElement] {\n\n let call_stack = self.call_stack.get_or_init(runtime::get_call_stack);\n\n call_stack.as_slice()\n\n }\n\n}\n", "file_path": "utils/contract-utils/src/contract_storage.rs", "rank": 24, "score": 39069.56722745998 }, { "content": "#[test]\n\nfn test_approve() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_metas = vec![\n\n meta::red_dragon(),\n\n meta::blue_dragon(),\n\n meta::black_dragon(),\n\n meta::gold_dragon(),\n\n ];\n\n let token_ids = vec![\n\n TokenId::zero(),\n\n TokenId::one(),\n\n TokenId::from(1),\n\n TokenId::from(2),\n\n ];\n\n\n\n token.mint_many(owner, user, token_ids.clone(), token_metas);\n\n\n\n token.approve(user, owner, vec![token_ids[0], token_ids[3]]);\n\n assert_eq!(\n\n token.get_approved(user, token_ids[0]).unwrap(),\n\n Key::Account(owner)\n\n );\n\n assert_eq!(\n\n token.get_approved(user, token_ids[3]).unwrap(),\n\n Key::Account(owner)\n\n );\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 25, "score": 35781.03958643999 }, { "content": "#[test]\n\nfn test_token_meta() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_id = TokenId::zero();\n\n let token_meta = meta::red_dragon();\n\n\n\n token.mint_one(owner, user, token_id, token_meta.clone());\n\n\n\n let user_token_meta = token.token_meta(token_id);\n\n assert_eq!(user_token_meta.unwrap(), token_meta);\n\n\n\n let first_user_token = token.get_token_by_index(Key::Account(user), U256::zero());\n\n assert_eq!(first_user_token, Some(token_id));\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 26, "score": 34998.660107228214 }, { "content": "#[test]\n\nfn test_mint_copies() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_meta = meta::red_dragon();\n\n let token_ids = vec![TokenId::zero(), TokenId::one(), TokenId::from(2)];\n\n token.mint_copies(owner, user, token_ids.clone(), token_meta, 3);\n\n let first_user_token = token.get_token_by_index(Key::Account(user), U256::from(0));\n\n let second_user_token = token.get_token_by_index(Key::Account(user), U256::from(1));\n\n let third_user_token = token.get_token_by_index(Key::Account(user), U256::from(2));\n\n let fourth_user_token = token.get_token_by_index(Key::Account(user), U256::from(3));\n\n assert_eq!(token.total_supply(), U256::from(3));\n\n assert_eq!(token.balance_of(Key::Account(user)), U256::from(3));\n\n assert_eq!(\n\n token.owner_of(first_user_token.unwrap()).unwrap(),\n\n Key::Account(user)\n\n );\n\n assert_eq!(\n\n token.owner_of(second_user_token.unwrap()).unwrap(),\n\n Key::Account(user)\n\n );\n\n assert_eq!(\n\n token.owner_of(third_user_token.unwrap()).unwrap(),\n\n Key::Account(user)\n\n );\n\n assert_eq!(first_user_token, Some(token_ids[0]));\n\n assert_eq!(second_user_token, Some(token_ids[1]));\n\n assert_eq!(third_user_token, Some(token_ids[2]));\n\n assert_eq!(fourth_user_token, None);\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 27, "score": 34998.660107228214 }, { "content": "#[test]\n\nfn test_mint_one() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_id = TokenId::zero();\n\n let token_meta = meta::red_dragon();\n\n\n\n token.mint_one(owner, user, token_id, token_meta);\n\n let first_user_token = token.get_token_by_index(Key::Account(user), U256::from(0));\n\n let second_user_token = token.get_token_by_index(Key::Account(user), U256::from(1));\n\n assert_eq!(first_user_token, Some(token_id));\n\n assert_eq!(token.total_supply(), U256::one());\n\n assert_eq!(token.balance_of(Key::Account(user)), U256::one());\n\n assert_eq!(second_user_token, None);\n\n assert_eq!(token.owner_of(token_id).unwrap(), Key::Account(user));\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 28, "score": 34998.660107228214 }, { "content": "#[test]\n\nfn test_mint_many() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_metas = vec![meta::red_dragon(), meta::gold_dragon()];\n\n let token_ids = vec![TokenId::zero(), TokenId::one()];\n\n token.mint_many(owner, user, token_ids.clone(), token_metas);\n\n let first_user_token = token.get_token_by_index(Key::Account(user), U256::from(0));\n\n let second_user_token = token.get_token_by_index(Key::Account(user), U256::from(1));\n\n let third_user_token = token.get_token_by_index(Key::Account(user), U256::from(2));\n\n assert_eq!(token.total_supply(), U256::from(2));\n\n assert_eq!(token.balance_of(Key::Account(user)), U256::from(2));\n\n assert_eq!(\n\n token.owner_of(first_user_token.unwrap()).unwrap(),\n\n Key::Account(user)\n\n );\n\n assert_eq!(\n\n token.owner_of(second_user_token.unwrap()).unwrap(),\n\n Key::Account(user)\n\n );\n\n assert_eq!(first_user_token, Some(token_ids[0]));\n\n assert_eq!(second_user_token, Some(token_ids[1]));\n\n assert_eq!(third_user_token, None);\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 29, "score": 34998.660107228214 }, { "content": "#[test]\n\nfn test_burn_many() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_metas = vec![\n\n meta::red_dragon(),\n\n meta::blue_dragon(),\n\n meta::black_dragon(),\n\n meta::gold_dragon(),\n\n ];\n\n let token_ids = vec![\n\n TokenId::zero(),\n\n TokenId::one(),\n\n TokenId::from(2),\n\n TokenId::from(3),\n\n ];\n\n\n\n token.mint_many(owner, user, token_ids.clone(), token_metas);\n\n\n\n token.burn_many(user, user, vec![token_ids[0], token_ids[3]]);\n\n assert_eq!(token.total_supply(), U256::from(2));\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 30, "score": 34998.660107228214 }, { "content": "#[test]\n\nfn test_burn_one() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_metas = vec![meta::red_dragon(), meta::gold_dragon()];\n\n let token_ids = vec![TokenId::zero(), TokenId::one()];\n\n token.mint_many(owner, user, token_ids.clone(), token_metas);\n\n\n\n token.burn_one(user, user, token_ids[0]);\n\n assert_eq!(token.total_supply(), U256::from(1));\n\n assert_eq!(token.balance_of(Key::Account(user)), U256::from(1));\n\n\n\n let new_first_user_token = token.get_token_by_index(Key::Account(user), U256::from(0));\n\n let new_second_user_token = token.get_token_by_index(Key::Account(user), U256::from(1));\n\n assert_eq!(new_first_user_token, Some(token_ids[1]));\n\n assert_eq!(new_second_user_token, None);\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 31, "score": 34998.660107228214 }, { "content": "#[test]\n\nfn test_transfer_token() {\n\n let (env, token, owner) = deploy();\n\n let ali = env.next_user();\n\n let bob = env.next_user();\n\n let token_metas = vec![meta::red_dragon(), meta::gold_dragon()];\n\n let token_ids = vec![TokenId::zero(), TokenId::one()];\n\n\n\n token.mint_many(owner, ali, token_ids.clone(), token_metas);\n\n\n\n assert_eq!(token.total_supply(), U256::from(2));\n\n assert_eq!(token.balance_of(Key::Account(ali)), U256::from(2));\n\n assert_eq!(token.owner_of(token_ids[0]).unwrap(), Key::Account(ali));\n\n assert_eq!(token.owner_of(token_ids[1]).unwrap(), Key::Account(ali));\n\n\n\n token.transfer(ali, bob, vec![token_ids[0]]);\n\n let new_first_ali_token = token.get_token_by_index(Key::Account(ali), U256::from(0));\n\n let new_second_ali_token = token.get_token_by_index(Key::Account(ali), U256::from(1));\n\n let new_first_bob_token = token.get_token_by_index(Key::Account(bob), U256::from(0));\n\n let new_second_bob_token = token.get_token_by_index(Key::Account(bob), U256::from(1));\n\n println!(\"{:?}\", new_first_ali_token);\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 32, "score": 34998.660107228214 }, { "content": "pub trait ContractContext<Storage: ContractStorage> {\n\n fn storage(&self) -> &Storage;\n\n\n\n fn get_caller(&self) -> Key {\n\n let call_stack = self.storage().call_stack();\n\n let caller = call_stack.get(call_stack.len() - 2);\n\n element_to_key(caller.unwrap_or_revert())\n\n }\n\n\n\n fn self_addr(&self) -> Key {\n\n let call_stack = self.storage().call_stack();\n\n element_to_key(call_stack.last().unwrap_or_revert())\n\n }\n\n}\n\n\n", "file_path": "utils/contract-utils/src/contract_context.rs", "rank": 33, "score": 34566.34057925176 }, { "content": "#[test]\n\nfn test_transfer_from_tokens_with_approve() {\n\n let (env, token, owner) = deploy();\n\n let ali = env.next_user();\n\n let bob = env.next_user();\n\n let token_metas = vec![meta::red_dragon(), meta::gold_dragon()];\n\n let token_ids = vec![TokenId::zero(), TokenId::one()];\n\n\n\n token.mint_many(owner, ali, token_ids.clone(), token_metas);\n\n assert_eq!(token.total_supply(), U256::from(2));\n\n assert_eq!(token.balance_of(Key::Account(ali)), U256::from(2));\n\n assert_eq!(token.owner_of(token_ids[0]).unwrap(), Key::Account(ali));\n\n assert_eq!(token.owner_of(token_ids[1]).unwrap(), Key::Account(ali));\n\n token.approve(ali, owner, vec![TokenId::one()]);\n\n token.transfer_from(owner, ali, bob, vec![TokenId::one()]);\n\n let new_first_ali_token = token.get_token_by_index(Key::Account(ali), U256::from(0));\n\n let new_second_ali_token = token.get_token_by_index(Key::Account(ali), U256::from(1));\n\n let new_first_bob_token = token.get_token_by_index(Key::Account(bob), U256::from(0));\n\n let new_second_bob_token = token.get_token_by_index(Key::Account(bob), U256::from(1));\n\n assert_eq!(token.total_supply(), U256::from(2));\n\n assert_eq!(token.balance_of(Key::Account(ali)), U256::from(1));\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 34, "score": 34268.2098132714 }, { "content": "#[test]\n\nfn test_token_metadata_update() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_id = TokenId::zero();\n\n\n\n token.mint_one(owner, user, token_id, meta::red_dragon());\n\n\n\n token.update_token_meta(owner, token_id, meta::gold_dragon());\n\n assert_eq!(token.token_meta(token_id).unwrap(), meta::gold_dragon());\n\n}\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 35, "score": 34268.2098132714 }, { "content": "#[test]\n\nfn test_burn_many_from_allowance_with_approve() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_metas = vec![\n\n meta::red_dragon(),\n\n meta::blue_dragon(),\n\n meta::black_dragon(),\n\n meta::gold_dragon(),\n\n ];\n\n let token_ids = vec![\n\n TokenId::zero(),\n\n TokenId::one(),\n\n TokenId::from(2),\n\n TokenId::from(3),\n\n ];\n\n\n\n token.mint_many(owner, user, token_ids.clone(), token_metas);\n\n\n\n token.approve(user, owner, vec![token_ids[0], token_ids[2]]);\n\n token.burn_many(owner, user, vec![token_ids[0], token_ids[2]]);\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 36, "score": 33584.68469629786 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_transfer_from_tokens_without_approve() {\n\n let (env, token, owner) = deploy();\n\n let ali = env.next_user();\n\n let bob = env.next_user();\n\n let token_metas = vec![meta::red_dragon(), meta::gold_dragon()];\n\n let token_ids = vec![TokenId::zero(), TokenId::one()];\n\n\n\n token.mint_many(owner, ali, token_ids.clone(), token_metas);\n\n\n\n assert_eq!(token.total_supply(), U256::from(2));\n\n assert_eq!(token.balance_of(Key::Account(ali)), U256::from(2));\n\n assert_eq!(token.owner_of(token_ids[0]).unwrap(), Key::Account(ali));\n\n assert_eq!(token.owner_of(token_ids[1]).unwrap(), Key::Account(ali));\n\n token.transfer_from(owner, ali, bob, vec![token_ids[0]]);\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 37, "score": 33584.68469629786 }, { "content": "pub trait CEP47<Storage: ContractStorage>: ContractContext<Storage> {\n\n fn init(&mut self, name: String, symbol: String, meta: Meta) {\n\n data::set_name(name);\n\n data::set_symbol(symbol);\n\n data::set_meta(meta);\n\n data::set_total_supply(U256::zero());\n\n Owners::init();\n\n OwnedTokens::init();\n\n Metadata::init();\n\n Allowances::init();\n\n }\n\n\n\n fn name(&self) -> String {\n\n data::name()\n\n }\n\n\n\n fn symbol(&self) -> String {\n\n data::symbol()\n\n }\n\n\n", "file_path": "cep47/src/cep47.rs", "rank": 38, "score": 33319.64221489294 }, { "content": "#[test]\n\n#[should_panic]\n\nfn test_burn_many_from_allowance_without_approve() {\n\n let (env, token, owner) = deploy();\n\n let user = env.next_user();\n\n let token_metas = vec![\n\n meta::red_dragon(),\n\n meta::blue_dragon(),\n\n meta::black_dragon(),\n\n meta::gold_dragon(),\n\n ];\n\n let token_ids = vec![\n\n TokenId::zero(),\n\n TokenId::one(),\n\n TokenId::from(2),\n\n TokenId::from(3),\n\n ];\n\n\n\n token.mint_many(owner, user, token_ids.clone(), token_metas);\n\n\n\n token.burn_many(owner, user, vec![token_ids[0], token_ids[1]]);\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 39, "score": 32943.703668745264 }, { "content": "pub trait AdminControl<Storage: ContractStorage>: ContractContext<Storage> {\n\n fn init(&mut self) {\n\n Admins::init();\n\n }\n\n\n\n fn add_admin(&mut self, address: Key) {\n\n self.assert_caller_is_admin();\n\n self.add_admin_without_checked(address);\n\n }\n\n\n\n fn disable_admin(&mut self, address: Key) {\n\n self.assert_caller_is_admin();\n\n Admins::instance().disable_admin(&address);\n\n }\n\n\n\n fn add_admin_without_checked(&mut self, address: Key) {\n\n Admins::instance().add_admin(&address);\n\n }\n\n\n\n fn assert_caller_is_admin(&self) {\n", "file_path": "utils/contract-utils/src/admin_control.rs", "rank": 40, "score": 31037.930089411206 }, { "content": " .expect(\"Wrong type in query result.\"),\n\n Err(e) => {\n\n println!(\"{}\", e);\n\n None\n\n }\n\n }\n\n }\n\n\n\n pub fn query_account_named_key<T: CLTyped + FromBytes>(\n\n &self,\n\n account: AccountHash,\n\n path: &[String],\n\n ) -> T {\n\n query(&self.builder, Key::Account(account), path)\n\n }\n\n}\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 41, "score": 25.04460610385926 }, { "content": " )\n\n }\n\n\n\n pub fn query_dictionary<T: CLTyped + FromBytes>(\n\n &self,\n\n contract_hash: [u8; 32],\n\n dict_name: String,\n\n dictionary_item_key: String,\n\n ) -> Option<T> {\n\n match query_dictionary_item(\n\n &self.builder,\n\n Key::Hash(contract_hash),\n\n Some(dict_name),\n\n dictionary_item_key,\n\n ) {\n\n Ok(value) => value\n\n .as_cl_value()\n\n .expect(\"should be cl value.\")\n\n .clone()\n\n .into_t()\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 43, "score": 23.784559053023322 }, { "content": "use std::path::PathBuf;\n\n\n\nuse casper_types::{\n\n account::AccountHash, bytesrepr::FromBytes, CLTyped, ContractHash, RuntimeArgs,\n\n};\n\n\n\nuse crate::{utils::DeploySource, TestEnv};\n\n\n\npub struct TestContract {\n\n env: TestEnv,\n\n name: String,\n\n contract_owner: AccountHash,\n\n}\n\n\n\nimpl TestContract {\n\n pub fn new(\n\n env: &TestEnv,\n\n wasm: &str,\n\n name: &str,\n\n sender: AccountHash,\n", "file_path": "utils/test-env/src/test_contract.rs", "rank": 44, "score": 21.816343152147375 }, { "content": " }\n\n\n\n pub fn query_named_key<T: CLTyped + FromBytes>(&self, key: String) -> T {\n\n let contract_name = format!(\"{}_contract_hash\", self.name);\n\n self.env\n\n .query_account_named_key(self.contract_owner, &[contract_name, key])\n\n }\n\n\n\n pub fn contract_hash(&self) -> [u8; 32] {\n\n let key = format!(\"{}_contract_hash_wrapped\", self.name);\n\n self.env\n\n .query_account_named_key(self.contract_owner, &[key])\n\n }\n\n\n\n pub fn call_contract(&self, sender: AccountHash, entry_point: &str, session_args: RuntimeArgs) {\n\n let session_code = DeploySource::ByHash {\n\n hash: ContractHash::new(self.contract_hash()),\n\n method: entry_point.to_string(),\n\n };\n\n self.env.run(sender, session_code, session_args);\n\n }\n\n}\n", "file_path": "utils/test-env/src/test_contract.rs", "rank": 45, "score": 21.760629419105612 }, { "content": "\n\n pub fn run(&self, sender: AccountHash, session_code: DeploySource, session_args: RuntimeArgs) {\n\n deploy(\n\n &mut self.state.lock().unwrap().builder,\n\n &sender,\n\n &session_code,\n\n session_args,\n\n true,\n\n None,\n\n )\n\n }\n\n\n\n pub fn next_user(&self) -> AccountHash {\n\n self.state.lock().unwrap().next_user()\n\n }\n\n\n\n pub fn query_dictionary<T: CLTyped + FromBytes>(\n\n &self,\n\n contract_hash: [u8; 32],\n\n dict_name: &str,\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 46, "score": 21.286451812561047 }, { "content": "use std::sync::{Arc, Mutex};\n\n\n\nuse casper_engine_test_support::{InMemoryWasmTestBuilder, DEFAULT_RUN_GENESIS_REQUEST};\n\nuse casper_types::{\n\n account::AccountHash, bytesrepr::FromBytes, CLTyped, Key, PublicKey, RuntimeArgs, SecretKey,\n\n};\n\n\n\nuse crate::utils::{deploy, fund_account, query, query_dictionary_item, DeploySource};\n\n\n\n#[derive(Clone)]\n\npub struct TestEnv {\n\n state: Arc<Mutex<TestEnvState>>,\n\n}\n\n\n\nimpl TestEnv {\n\n pub fn new() -> TestEnv {\n\n TestEnv {\n\n state: Arc::new(Mutex::new(TestEnvState::new())),\n\n }\n\n }\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 47, "score": 21.149005326107176 }, { "content": " mut args: RuntimeArgs,\n\n ) -> TestContract {\n\n let session_code = PathBuf::from(wasm);\n\n args.insert(\"contract_name\", name).unwrap();\n\n env.run(sender, DeploySource::Code(session_code), args);\n\n\n\n TestContract {\n\n env: env.clone(),\n\n name: String::from(name),\n\n contract_owner: sender,\n\n }\n\n }\n\n\n\n pub fn query_dictionary<T: CLTyped + FromBytes>(\n\n &self,\n\n dict_name: &str,\n\n key: String,\n\n ) -> Option<T> {\n\n self.env\n\n .query_dictionary(self.contract_hash(), dict_name, key)\n", "file_path": "utils/test-env/src/test_contract.rs", "rank": 48, "score": 20.94694240015008 }, { "content": " TestEnvState { builder, accounts }\n\n }\n\n\n\n pub fn next_user(&mut self) -> AccountHash {\n\n self.accounts.pop().unwrap()\n\n }\n\n\n\n pub fn _run(\n\n &mut self,\n\n sender: AccountHash,\n\n session_code: DeploySource,\n\n session_args: RuntimeArgs,\n\n ) {\n\n deploy(\n\n &mut self.builder,\n\n &sender,\n\n &session_code,\n\n session_args,\n\n true,\n\n None,\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 49, "score": 20.12800555155079 }, { "content": "\n\n TestEnvState { builder, accounts }\n\n }\n\n\n\n pub fn _new_with_users(user_secrets: &[[u8; 32]]) -> TestEnvState {\n\n let mut builder = InMemoryWasmTestBuilder::default();\n\n builder.run_genesis(&DEFAULT_RUN_GENESIS_REQUEST).commit();\n\n\n\n let mut accounts = Vec::new();\n\n for user_secret in user_secrets {\n\n let secret_key: SecretKey = SecretKey::ed25519_from_bytes(user_secret).unwrap();\n\n let public_key: PublicKey = (&secret_key).into();\n\n let account_hash = AccountHash::from(&public_key);\n\n accounts.push(account_hash);\n\n builder\n\n .exec(fund_account(&account_hash))\n\n .expect_success()\n\n .commit();\n\n }\n\n\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 50, "score": 20.10762525287007 }, { "content": " key: String,\n\n ) -> Option<T> {\n\n self.state\n\n .lock()\n\n .unwrap()\n\n .query_dictionary(contract_hash, dict_name.to_string(), key)\n\n }\n\n\n\n pub fn query_account_named_key<T: CLTyped + FromBytes>(\n\n &self,\n\n account: AccountHash,\n\n path: &[String],\n\n ) -> T {\n\n self.state\n\n .lock()\n\n .unwrap()\n\n .query_account_named_key(account, path)\n\n }\n\n}\n\n\n\nimpl Default for TestEnv {\n\n fn default() -> Self {\n\n TestEnv::new()\n\n }\n\n}\n\n\n", "file_path": "utils/test-env/src/test_env.rs", "rank": 51, "score": 19.774370105853457 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse blake2::{\n\n digest::{Update, VariableOutput},\n\n VarBlake2b,\n\n};\n\nuse casper_types::{\n\n account::AccountHash, bytesrepr::ToBytes, runtime_args, CLTyped, Key, RuntimeArgs, U256,\n\n};\n\nuse test_env::{TestContract, TestEnv};\n\n\n\npub type TokenId = U256;\n\npub type Meta = BTreeMap<String, String>;\n\n\n\npub struct CEP47Instance(TestContract);\n\n\n\nimpl CEP47Instance {\n\n pub fn new(\n\n env: &TestEnv,\n\n contract_name: &str,\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 53, "score": 16.66040525501727 }, { "content": "\n\n pub fn approve<T: Into<Key>>(&self, sender: AccountHash, spender: T, token_ids: Vec<TokenId>) {\n\n self.0.call_contract(\n\n sender,\n\n \"approve\",\n\n runtime_args! {\"spender\" => spender.into(), \"token_ids\" => token_ids},\n\n )\n\n }\n\n\n\n pub fn get_approved<T: Into<Key>>(&self, owner: T, token_id: TokenId) -> Option<Key> {\n\n self.0.query_dictionary(\n\n \"allowances\",\n\n key_and_value_to_str::<String>(&owner.into(), &token_id.to_string()),\n\n )\n\n }\n\n\n\n pub fn update_token_meta(&self, sender: AccountHash, token_id: TokenId, token_meta: Meta) {\n\n self.0.call_contract(\n\n sender,\n\n \"update_token_meta\",\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 54, "score": 14.855566247063171 }, { "content": "\n\n pub fn get_by_keys<T: CLTyped + FromBytes>(&self, keys: (&Key, &Key)) -> Option<T> {\n\n self.get(&keys_to_str(keys.0, keys.1))\n\n }\n\n\n\n pub fn set<T: CLTyped + ToBytes>(&self, key: &str, value: T) {\n\n storage::dictionary_put(self.uref, key, Some(value));\n\n }\n\n\n\n pub fn set_by_key<T: CLTyped + ToBytes>(&self, key: &Key, value: T) {\n\n self.set(&key_to_str(key), value);\n\n }\n\n\n\n pub fn set_by_keys<T: CLTyped + ToBytes>(&self, keys: (&Key, &Key), value: T) {\n\n self.set(&keys_to_str(keys.0, keys.1), value)\n\n }\n\n\n\n pub fn remove<T: CLTyped + ToBytes>(&self, key: &str) {\n\n storage::dictionary_put(self.uref, key, Option::<T>::None);\n\n }\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 55, "score": 13.833279542420133 }, { "content": "use std::collections::BTreeMap;\n\n\n\nuse casper_types::{account::AccountHash, Key, U256};\n\nuse test_env::TestEnv;\n\n\n\nuse crate::cep47_instance::{CEP47Instance, Meta, TokenId};\n\n\n\nconst NAME: &str = \"DragonsNFT\";\n\nconst SYMBOL: &str = \"DGNFT\";\n\n\n\nmod meta {\n\n use super::{BTreeMap, Meta};\n\n pub fn contract_meta() -> Meta {\n\n let mut meta = BTreeMap::new();\n\n meta.insert(\"origin\".to_string(), \"fire\".to_string());\n\n meta\n\n }\n\n\n\n pub fn red_dragon() -> Meta {\n\n let mut meta = BTreeMap::new();\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 56, "score": 13.661269938042441 }, { "content": "use alloc::string::{String, ToString};\n\nuse core::convert::TryInto;\n\n\n\nuse casper_contract::{\n\n contract_api::{runtime, storage},\n\n unwrap_or_revert::UnwrapOrRevert,\n\n};\n\nuse casper_types::{\n\n bytesrepr::{FromBytes, ToBytes},\n\n ApiError, CLTyped, Key, URef,\n\n};\n\n\n\npub struct Dict {\n\n uref: URef,\n\n}\n\n\n\nimpl Dict {\n\n pub fn instance(name: &str) -> Dict {\n\n let key = runtime::get_key(name).unwrap_or_revert();\n\n let uref = *key.as_uref().unwrap_or_revert();\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 58, "score": 12.965730600274448 }, { "content": " runtime_args! {\n\n \"token_id\" => token_id,\n\n \"token_meta\" => token_meta\n\n },\n\n )\n\n }\n\n\n\n pub fn get_token_by_index<T: Into<Key>>(&self, account: T, index: U256) -> Option<TokenId> {\n\n self.0.query_dictionary(\n\n \"owned_tokens_by_index\",\n\n key_and_value_to_str(&account.into(), &index),\n\n )\n\n }\n\n\n\n pub fn balance_of<T: Into<Key>>(&self, account: T) -> U256 {\n\n self.0\n\n .query_dictionary(\"balances\", key_to_str(&account.into()))\n\n .unwrap_or_default()\n\n }\n\n\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 59, "score": 12.716405676986426 }, { "content": "\n\n pub fn remove_by_key<T: CLTyped + ToBytes>(&self, key: &Key) {\n\n self.remove::<T>(&key_to_str(key));\n\n }\n\n\n\n pub fn remove_by_vec_of_keys<T: CLTyped + ToBytes>(&self, keys: (&Key, &Key)) {\n\n self.remove::<T>(&keys_to_str(keys.0, keys.1))\n\n }\n\n}\n\n\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 60, "score": 12.582780270685545 }, { "content": "use alloc::{\n\n collections::BTreeMap,\n\n string::{String, ToString},\n\n vec::Vec,\n\n};\n\nuse casper_contract::{\n\n contract_api::{runtime::get_call_stack, storage},\n\n unwrap_or_revert::UnwrapOrRevert,\n\n};\n\nuse casper_types::{system::CallStackElement, ContractPackageHash, Key, URef, U256};\n\nuse contract_utils::{get_key, key_and_value_to_str, key_to_str, set_key, Dict};\n\n\n\nuse crate::{event::CEP47Event, Meta, TokenId};\n\n\n\nconst BALANCES_DICT: &str = \"balances\";\n\npub const ALLOWANCES_DICT: &str = \"allowances\";\n\nconst METADATA_DICT: &str = \"metadata\";\n\nconst OWNERS_DICT: &str = \"owners\";\n\nconst OWNED_TOKENS_BY_INDEX_DICT: &str = \"owned_tokens_by_index\";\n\nconst OWNED_INDEXES_BY_TOKEN_DICT: &str = \"owned_indexes_by_token\";\n", "file_path": "cep47/src/data.rs", "rank": 61, "score": 12.413264972416101 }, { "content": " \"token_metas\" => token_metas\n\n },\n\n )\n\n }\n\n\n\n pub fn burn_one<T: Into<Key>>(&self, sender: AccountHash, owner: T, token_id: TokenId) {\n\n self.0.call_contract(\n\n sender,\n\n \"burn\",\n\n runtime_args! {\n\n \"owner\" => owner.into(),\n\n \"token_ids\" => vec![token_id]\n\n },\n\n )\n\n }\n\n\n\n pub fn burn_many<T: Into<Key>>(&self, sender: AccountHash, owner: T, token_ids: Vec<TokenId>) {\n\n self.0.call_contract(\n\n sender,\n\n \"burn\",\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 62, "score": 11.849439502396521 }, { "content": " pub fn owner_of(&self, token_id: TokenId) -> Option<Key> {\n\n self.0.query_dictionary(\"owners\", token_id.to_string())\n\n }\n\n\n\n pub fn token_meta(&self, token_id: TokenId) -> Option<Meta> {\n\n self.0.query_dictionary(\"metadata\", token_id.to_string())\n\n }\n\n\n\n pub fn name(&self) -> String {\n\n self.0.query_named_key(String::from(\"name\"))\n\n }\n\n\n\n pub fn symbol(&self) -> String {\n\n self.0.query_named_key(String::from(\"symbol\"))\n\n }\n\n\n\n pub fn total_supply(&self) -> U256 {\n\n self.0.query_named_key(String::from(\"total_supply\"))\n\n }\n\n\n\n pub fn meta(&self) -> Meta {\n\n self.0.query_named_key(String::from(\"meta\"))\n\n }\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 63, "score": 11.609222950271116 }, { "content": " ))\n\n }\n\n\n\n pub fn set(&self, owner: &Key, token_id: &TokenId, value: Key) {\n\n self.dict.set(\n\n &key_and_value_to_str::<String>(owner, &token_id.to_string()),\n\n value,\n\n );\n\n }\n\n\n\n pub fn remove(&self, owner: &Key, token_id: &TokenId) {\n\n self.dict.remove::<Key>(&key_and_value_to_str::<String>(\n\n owner,\n\n &token_id.to_string(),\n\n ));\n\n }\n\n}\n\n\n", "file_path": "cep47/src/data.rs", "rank": 64, "score": 11.076914643003148 }, { "content": "use alloc::vec::Vec;\n\nuse casper_types::Key;\n\n\n\nuse crate::TokenId;\n\n\n\npub enum CEP47Event {\n\n Mint {\n\n recipient: Key,\n\n token_ids: Vec<TokenId>,\n\n },\n\n Burn {\n\n owner: Key,\n\n token_ids: Vec<TokenId>,\n\n },\n\n Approve {\n\n owner: Key,\n\n spender: Key,\n\n token_ids: Vec<TokenId>,\n\n },\n\n Transfer {\n\n sender: Key,\n\n recipient: Key,\n\n token_ids: Vec<TokenId>,\n\n },\n\n MetadataUpdate {\n\n token_id: TokenId,\n\n },\n\n}\n", "file_path": "cep47/src/event.rs", "rank": 65, "score": 11.022154524011285 }, { "content": " }\n\n\n\n pub fn get(&self, key: &TokenId) -> Option<Key> {\n\n self.dict.get(&key.to_string())\n\n }\n\n\n\n pub fn set(&self, key: &TokenId, value: Key) {\n\n self.dict.set(&key.to_string(), value);\n\n }\n\n\n\n pub fn remove(&self, key: &TokenId) {\n\n self.dict.remove::<Key>(&key.to_string());\n\n }\n\n}\n\n\n\npub struct Metadata {\n\n dict: Dict,\n\n}\n\n\n\nimpl Metadata {\n", "file_path": "cep47/src/data.rs", "rank": 66, "score": 11.014446221901427 }, { "content": " runtime_args! {\n\n \"owner\" => owner.into(),\n\n \"token_ids\" => token_ids\n\n },\n\n )\n\n }\n\n\n\n pub fn transfer<T: Into<Key>>(\n\n &self,\n\n sender: AccountHash,\n\n recipient: T,\n\n token_ids: Vec<TokenId>,\n\n ) {\n\n self.0.call_contract(\n\n sender,\n\n \"transfer\",\n\n runtime_args! {\n\n \"recipient\" => recipient.into(),\n\n \"token_ids\" => token_ids\n\n },\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 67, "score": 11.010497701030058 }, { "content": " )\n\n }\n\n\n\n pub fn transfer_from<T: Into<Key>>(\n\n &self,\n\n sender: AccountHash,\n\n owner: T,\n\n recipient: T,\n\n token_ids: Vec<TokenId>,\n\n ) {\n\n self.0.call_contract(\n\n sender,\n\n \"transfer_from\",\n\n runtime_args! {\n\n \"sender\" => owner.into(),\n\n \"recipient\" => recipient.into(),\n\n \"token_ids\" => token_ids\n\n },\n\n )\n\n }\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 68, "score": 10.849628859729307 }, { "content": "#![no_std]\n\n#[macro_use]\n\nextern crate alloc;\n\n\n\nmod cep47;\n\npub mod data;\n\npub mod event;\n\n\n\npub use cep47::{Error, CEP47};\n\npub use contract_utils;\n\n\n\nuse alloc::{collections::BTreeMap, string::String};\n\nuse casper_types::U256;\n\npub type TokenId = U256;\n\npub type Meta = BTreeMap<String, String>;\n", "file_path": "cep47/src/lib.rs", "rank": 69, "score": 10.56656818904412 }, { "content": " \"recipient\" => recipient.into(),\n\n \"token_ids\" => vec![token_id],\n\n \"token_metas\" => vec![token_meta]\n\n },\n\n )\n\n }\n\n\n\n pub fn mint_copies<T: Into<Key>>(\n\n &self,\n\n sender: AccountHash,\n\n recipient: T,\n\n token_ids: Vec<TokenId>,\n\n token_meta: Meta,\n\n count: u32,\n\n ) {\n\n self.0.call_contract(\n\n sender,\n\n \"mint_copies\",\n\n runtime_args! {\n\n \"recipient\" => recipient.into(),\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 70, "score": 10.548907905320197 }, { "content": " pub fn instance() -> Metadata {\n\n Metadata {\n\n dict: Dict::instance(METADATA_DICT),\n\n }\n\n }\n\n\n\n pub fn init() {\n\n Dict::init(METADATA_DICT)\n\n }\n\n\n\n pub fn get(&self, key: &TokenId) -> Option<Meta> {\n\n self.dict.get(&key.to_string())\n\n }\n\n\n\n pub fn set(&self, key: &TokenId, value: Meta) {\n\n self.dict.set(&key.to_string(), value);\n\n }\n\n\n\n pub fn remove(&self, key: &TokenId) {\n\n self.dict.remove::<Meta>(&key.to_string());\n", "file_path": "cep47/src/data.rs", "rank": 71, "score": 10.416822951201972 }, { "content": "use crate::{\n\n data::{self, Allowances, Metadata, OwnedTokens, Owners},\n\n event::CEP47Event,\n\n Meta, TokenId,\n\n};\n\nuse alloc::{string::String, vec::Vec};\n\nuse casper_types::{ApiError, Key, U256};\n\nuse contract_utils::{ContractContext, ContractStorage};\n\nuse core::convert::TryInto;\n\n\n\n#[repr(u16)]\n\npub enum Error {\n\n PermissionDenied = 1,\n\n WrongArguments = 2,\n\n TokenIdAlreadyExists = 3,\n\n TokenIdDoesntExist = 4,\n\n}\n\n\n\nimpl From<Error> for ApiError {\n\n fn from(error: Error) -> ApiError {\n\n ApiError::User(error as u16)\n\n }\n\n}\n\n\n", "file_path": "cep47/src/cep47.rs", "rank": 72, "score": 10.18330197144397 }, { "content": " \"token_ids\" => token_ids,\n\n \"token_meta\" => token_meta,\n\n \"count\" => count\n\n },\n\n )\n\n }\n\n\n\n pub fn mint_many<T: Into<Key>>(\n\n &self,\n\n sender: AccountHash,\n\n recipient: T,\n\n token_ids: Vec<TokenId>,\n\n token_metas: Vec<Meta>,\n\n ) {\n\n self.0.call_contract(\n\n sender,\n\n \"mint\",\n\n runtime_args! {\n\n \"recipient\" => recipient.into(),\n\n \"token_ids\" => token_ids,\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 73, "score": 10.14567054767118 }, { "content": " Dict { uref }\n\n }\n\n\n\n pub fn init(name: &str) {\n\n storage::new_dictionary(name).unwrap_or_revert();\n\n }\n\n\n\n pub fn at(uref: URef) -> Dict {\n\n Dict { uref }\n\n }\n\n\n\n pub fn get<T: CLTyped + FromBytes>(&self, key: &str) -> Option<T> {\n\n storage::dictionary_get(self.uref, key)\n\n .unwrap_or_revert()\n\n .unwrap_or_default()\n\n }\n\n\n\n pub fn get_by_key<T: CLTyped + FromBytes>(&self, key: &Key) -> Option<T> {\n\n self.get(&key_to_str(key))\n\n }\n", "file_path": "utils/contract-utils/src/data.rs", "rank": 74, "score": 9.623549481321964 }, { "content": "#![no_std]\n\n#![feature(once_cell)]\n\n\n\nextern crate alloc;\n\n\n\nmod admin_control;\n\nmod contract_context;\n\nmod contract_storage;\n\nmod data;\n\n\n\npub use admin_control::AdminControl;\n\npub use contract_context::ContractContext;\n\npub use contract_storage::{ContractStorage, OnChainContractStorage};\n\npub use data::{get_key, key_and_value_to_str, key_to_str, set_key, Dict};\n", "file_path": "utils/contract-utils/src/lib.rs", "rank": 75, "score": 9.416958717369731 }, { "content": " sender,\n\n \"constructor\",\n\n runtime_args! {\n\n \"name\" => name,\n\n \"symbol\" => symbol,\n\n \"meta\" => meta},\n\n );\n\n }\n\n\n\n pub fn mint_one<T: Into<Key>>(\n\n &self,\n\n sender: AccountHash,\n\n recipient: T,\n\n token_id: TokenId,\n\n token_meta: Meta,\n\n ) {\n\n self.0.call_contract(\n\n sender,\n\n \"mint\",\n\n runtime_args! {\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 76, "score": 9.319030834577974 }, { "content": "\n\npub struct Allowances {\n\n dict: Dict,\n\n}\n\n\n\nimpl Allowances {\n\n pub fn instance() -> Allowances {\n\n Allowances {\n\n dict: Dict::instance(ALLOWANCES_DICT),\n\n }\n\n }\n\n\n\n pub fn init() {\n\n Dict::init(ALLOWANCES_DICT)\n\n }\n\n\n\n pub fn get(&self, owner: &Key, token_id: &TokenId) -> Option<Key> {\n\n self.dict.get(&key_and_value_to_str::<String>(\n\n owner,\n\n &token_id.to_string(),\n", "file_path": "cep47/src/data.rs", "rank": 77, "score": 9.075743417839854 }, { "content": " });\n\n Ok(token_ids)\n\n }\n\n\n\n fn mint_copies(\n\n &mut self,\n\n recipient: Key,\n\n token_ids: Vec<TokenId>,\n\n token_meta: Meta,\n\n count: u32,\n\n ) -> Result<Vec<TokenId>, Error> {\n\n let token_metas = vec![token_meta; count.try_into().unwrap()];\n\n self.mint(recipient, token_ids, token_metas)\n\n }\n\n\n\n fn burn(&mut self, owner: Key, token_ids: Vec<TokenId>) -> Result<(), Error> {\n\n let spender = self.get_caller();\n\n if spender != owner {\n\n for token_id in &token_ids {\n\n if !self.is_approved(owner, *token_id, spender) {\n", "file_path": "cep47/src/cep47.rs", "rank": 78, "score": 8.570642879359957 }, { "content": " fn get_approved(&self, owner: Key, token_id: TokenId) -> Option<Key> {\n\n Allowances::instance().get(&owner, &token_id)\n\n }\n\n\n\n fn transfer(&mut self, recipient: Key, token_ids: Vec<TokenId>) -> Result<(), Error> {\n\n self.transfer_from(self.get_caller(), recipient, token_ids)\n\n }\n\n\n\n fn transfer_from(\n\n &mut self,\n\n owner: Key,\n\n recipient: Key,\n\n token_ids: Vec<TokenId>,\n\n ) -> Result<(), Error> {\n\n let spender = self.get_caller();\n\n\n\n if owner != spender {\n\n let allowances_dict = Allowances::instance();\n\n for token_id in &token_ids {\n\n if !self.is_approved(owner, *token_id, spender) {\n", "file_path": "cep47/src/cep47.rs", "rank": 79, "score": 8.465492357363376 }, { "content": "use alloc::vec::Vec;\n\nuse core::lazy::OnceCell;\n\n\n\nuse casper_contract::contract_api::runtime;\n\nuse casper_types::system::CallStackElement;\n\n\n", "file_path": "utils/contract-utils/src/contract_storage.rs", "rank": 80, "score": 8.437914671887837 }, { "content": " }\n\n\n\n pub fn set_token(&self, owner: &Key, value: &TokenId) {\n\n let length = self.get_balances(owner);\n\n self.indexes_dict\n\n .set(&key_and_value_to_str(owner, value), length);\n\n self.tokens_dict\n\n .set(&key_and_value_to_str(owner, &length), *value);\n\n self.set_balances(owner, length + 1);\n\n }\n\n\n\n pub fn remove_token(&self, owner: &Key, value: &TokenId) {\n\n let length = self.get_balances(owner);\n\n let index = self.get_index_by_token(owner, value).unwrap_or_revert();\n\n match length.cmp(&(index + 1)) {\n\n core::cmp::Ordering::Equal => {\n\n self.tokens_dict\n\n .remove::<TokenId>(&key_and_value_to_str(owner, &(length - 1)));\n\n self.set_balances(owner, length - 1);\n\n }\n", "file_path": "cep47/src/data.rs", "rank": 81, "score": 8.404085593236593 }, { "content": " return Err(Error::PermissionDenied);\n\n }\n\n allowances_dict.remove(&owner, token_id);\n\n }\n\n }\n\n self.transfer_from_internal(owner, recipient, token_ids)\n\n }\n\n\n\n fn transfer_from_internal(\n\n &mut self,\n\n owner: Key,\n\n recipient: Key,\n\n token_ids: Vec<TokenId>,\n\n ) -> Result<(), Error> {\n\n let owners_dict = Owners::instance();\n\n let owned_tokens_dict = OwnedTokens::instance();\n\n\n\n for token_id in &token_ids {\n\n match owners_dict.get(token_id) {\n\n Some(owner_of_key) => {\n", "file_path": "cep47/src/cep47.rs", "rank": 82, "score": 8.139698680712804 }, { "content": " meta.insert(\"color\".to_string(), \"red\".to_string());\n\n meta\n\n }\n\n\n\n pub fn blue_dragon() -> Meta {\n\n let mut meta = BTreeMap::new();\n\n meta.insert(\"color\".to_string(), \"blue\".to_string());\n\n meta\n\n }\n\n\n\n pub fn black_dragon() -> Meta {\n\n let mut meta = BTreeMap::new();\n\n meta.insert(\"color\".to_string(), \"black\".to_string());\n\n meta\n\n }\n\n\n\n pub fn gold_dragon() -> Meta {\n\n let mut meta = BTreeMap::new();\n\n meta.insert(\"color\".to_string(), \"gold\".to_string());\n\n meta\n\n }\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 83, "score": 8.047551995234578 }, { "content": " }\n\n }\n\n CEP47Event::Transfer {\n\n sender,\n\n recipient,\n\n token_ids,\n\n } => {\n\n for token_id in token_ids {\n\n let mut param = BTreeMap::new();\n\n param.insert(CONTRACT_PACKAGE_HASH, package.to_string());\n\n param.insert(\"event_type\", \"cep47_transfer_token\".to_string());\n\n param.insert(\"sender\", sender.to_string());\n\n param.insert(\"recipient\", recipient.to_string());\n\n param.insert(\"token_id\", token_id.to_string());\n\n events.push(param);\n\n }\n\n }\n\n CEP47Event::MetadataUpdate { token_id } => {\n\n let mut param = BTreeMap::new();\n\n param.insert(CONTRACT_PACKAGE_HASH, package.to_string());\n", "file_path": "cep47/src/data.rs", "rank": 84, "score": 7.969418211785491 }, { "content": " Dict::init(OWNED_INDEXES_BY_TOKEN_DICT);\n\n Dict::init(BALANCES_DICT);\n\n }\n\n\n\n pub fn get_token_by_index(&self, owner: &Key, index: &U256) -> Option<TokenId> {\n\n self.tokens_dict.get(&key_and_value_to_str(owner, index))\n\n }\n\n\n\n pub fn get_index_by_token(&self, owner: &Key, value: &TokenId) -> Option<U256> {\n\n self.indexes_dict.get(&key_and_value_to_str(owner, value))\n\n }\n\n\n\n pub fn get_balances(&self, owner: &Key) -> U256 {\n\n self.balances_dict\n\n .get(&key_to_str(owner))\n\n .unwrap_or_default()\n\n }\n\n\n\n pub fn set_balances(&self, owner: &Key, value: U256) {\n\n self.balances_dict.set(&key_to_str(owner), value);\n", "file_path": "cep47/src/data.rs", "rank": 85, "score": 7.93166768677359 }, { "content": " param.insert(CONTRACT_PACKAGE_HASH, package.to_string());\n\n param.insert(\"event_type\", \"cep47_burn_one\".to_string());\n\n param.insert(\"owner\", owner.to_string());\n\n param.insert(\"token_id\", token_id.to_string());\n\n events.push(param);\n\n }\n\n }\n\n CEP47Event::Approve {\n\n owner,\n\n spender,\n\n token_ids,\n\n } => {\n\n for token_id in token_ids {\n\n let mut param = BTreeMap::new();\n\n param.insert(CONTRACT_PACKAGE_HASH, package.to_string());\n\n param.insert(\"event_type\", \"cep47_approve_token\".to_string());\n\n param.insert(\"owner\", owner.to_string());\n\n param.insert(\"spender\", spender.to_string());\n\n param.insert(\"token_id\", token_id.to_string());\n\n events.push(param);\n", "file_path": "cep47/src/data.rs", "rank": 86, "score": 7.791701608836077 }, { "content": " return Err(Error::PermissionDenied);\n\n }\n\n }\n\n }\n\n self.burn_internal(owner, token_ids)\n\n }\n\n\n\n fn burn_internal(&mut self, owner: Key, token_ids: Vec<TokenId>) -> Result<(), Error> {\n\n let owners_dict = Owners::instance();\n\n let owned_tokens_dict = OwnedTokens::instance();\n\n let metadata_dict = Metadata::instance();\n\n let allowances_dict = Allowances::instance();\n\n\n\n for token_id in &token_ids {\n\n match owners_dict.get(token_id) {\n\n Some(owner_of_key) => {\n\n if owner_of_key != owner {\n\n return Err(Error::PermissionDenied);\n\n }\n\n }\n", "file_path": "cep47/src/cep47.rs", "rank": 87, "score": 7.706222410597229 }, { "content": " sender: AccountHash,\n\n name: &str,\n\n symbol: &str,\n\n meta: Meta,\n\n ) -> CEP47Instance {\n\n CEP47Instance(TestContract::new(\n\n env,\n\n \"cep47-token.wasm\",\n\n contract_name,\n\n sender,\n\n runtime_args! {\n\n \"name\" => name,\n\n \"symbol\" => symbol,\n\n \"meta\" => meta\n\n },\n\n ))\n\n }\n\n\n\n pub fn constructor(&self, sender: AccountHash, name: &str, symbol: &str, meta: Meta) {\n\n self.0.call_contract(\n", "file_path": "cep47-tests/src/cep47_instance.rs", "rank": 88, "score": 7.573229241939817 }, { "content": " }\n\n }\n\n true\n\n }\n\n\n\n fn mint(\n\n &mut self,\n\n recipient: Key,\n\n token_ids: Vec<TokenId>,\n\n token_metas: Vec<Meta>,\n\n ) -> Result<Vec<TokenId>, Error> {\n\n if token_ids.len() != token_metas.len() {\n\n return Err(Error::WrongArguments);\n\n };\n\n\n\n for token_id in &token_ids {\n\n if self.owner_of(*token_id).is_some() {\n\n return Err(Error::TokenIdAlreadyExists);\n\n }\n\n }\n", "file_path": "cep47/src/cep47.rs", "rank": 89, "score": 7.462458121845092 }, { "content": " let caller = self.get_caller();\n\n if !self.is_admin(caller) {\n\n runtime::revert(ApiError::User(20));\n\n }\n\n }\n\n\n\n fn is_admin(&self, address: Key) -> bool {\n\n Admins::instance().is_admin(&address)\n\n }\n\n}\n\n\n", "file_path": "utils/contract-utils/src/admin_control.rs", "rank": 90, "score": 7.418267752078462 }, { "content": "use casper_contract::unwrap_or_revert::UnwrapOrRevert;\n\nuse casper_types::{system::CallStackElement, Key};\n\n\n\nuse crate::ContractStorage;\n\n\n", "file_path": "utils/contract-utils/src/contract_context.rs", "rank": 91, "score": 7.367915988249523 }, { "content": " Ok(())\n\n }\n\n\n\n fn approve(&mut self, spender: Key, token_ids: Vec<TokenId>) -> Result<(), Error> {\n\n let caller = self.get_caller();\n\n for token_id in &token_ids {\n\n match self.owner_of(*token_id) {\n\n None => return Err(Error::WrongArguments),\n\n Some(owner) if owner != caller => return Err(Error::PermissionDenied),\n\n Some(_) => Allowances::instance().set(&caller, token_id, spender),\n\n }\n\n }\n\n self.emit(CEP47Event::Approve {\n\n owner: caller,\n\n spender,\n\n token_ids,\n\n });\n\n Ok(())\n\n }\n\n\n", "file_path": "cep47/src/cep47.rs", "rank": 92, "score": 7.196194639341913 }, { "content": "use casper_contract::contract_api::runtime;\n\nuse casper_types::{ApiError, Key};\n\n\n\nuse crate::{ContractContext, ContractStorage, Dict};\n\n\n\nconst ADMINS_DICT: &str = \"admins\";\n\n\n", "file_path": "utils/contract-utils/src/admin_control.rs", "rank": 93, "score": 7.020357453683332 }, { "content": " });\n\n Ok(())\n\n }\n\n\n\n fn is_approved(&self, owner: Key, token_id: TokenId, spender: Key) -> bool {\n\n let allowances_dict = Allowances::instance();\n\n if let Some(spender_of) = allowances_dict.get(&owner, &token_id) {\n\n if spender_of == spender {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n\n\n fn emit(&mut self, event: CEP47Event) {\n\n data::emit(&event);\n\n }\n\n}\n", "file_path": "cep47/src/cep47.rs", "rank": 94, "score": 5.8270419751528895 }, { "content": " fn set_token_meta(&mut self, token_id: TokenId, meta: Meta) -> Result<(), Error> {\n\n if self.owner_of(token_id).is_none() {\n\n return Err(Error::TokenIdDoesntExist);\n\n };\n\n\n\n let metadata_dict = Metadata::instance();\n\n metadata_dict.set(&token_id, meta);\n\n\n\n self.emit(CEP47Event::MetadataUpdate { token_id });\n\n Ok(())\n\n }\n\n\n\n fn get_token_by_index(&self, owner: Key, index: U256) -> Option<TokenId> {\n\n OwnedTokens::instance().get_token_by_index(&owner, &index)\n\n }\n\n\n\n fn validate_token_ids(&self, token_ids: Vec<TokenId>) -> bool {\n\n for token_id in &token_ids {\n\n if self.owner_of(*token_id).is_some() {\n\n return false;\n", "file_path": "cep47/src/cep47.rs", "rank": 95, "score": 5.798227875250252 }, { "content": "mod test_contract;\n\nmod test_env;\n\nmod utils;\n\nuse crate::test_env as other_test_env;\n\n\n\npub use other_test_env::TestEnv;\n\npub use test_contract::TestContract;\n", "file_path": "utils/test-env/src/lib.rs", "rank": 96, "score": 5.577104412895471 }, { "content": " core::cmp::Ordering::Greater => {\n\n let last = self.get_token_by_index(owner, &(length - 1));\n\n self.indexes_dict.set(\n\n &key_and_value_to_str(owner, &last.unwrap_or_revert()),\n\n index,\n\n );\n\n self.tokens_dict.set(\n\n &key_and_value_to_str(owner, &index),\n\n last.unwrap_or_revert(),\n\n );\n\n self.tokens_dict\n\n .remove::<TokenId>(&key_and_value_to_str(owner, &(length - 1)));\n\n self.set_balances(owner, length - 1);\n\n }\n\n core::cmp::Ordering::Less => {}\n\n }\n\n self.indexes_dict\n\n .remove::<U256>(&key_and_value_to_str(owner, value));\n\n }\n\n}\n", "file_path": "cep47/src/data.rs", "rank": 97, "score": 5.33123485352249 }, { "content": "const CONTRACT_PACKAGE_HASH: &str = \"contract_package_hash\";\n\n\n\npub const NAME: &str = \"name\";\n\npub const META: &str = \"meta\";\n\npub const SYMBOL: &str = \"symbol\";\n\npub const TOTAL_SUPPLY: &str = \"total_supply\";\n\n\n\npub struct Owners {\n\n dict: Dict,\n\n}\n\n\n\nimpl Owners {\n\n pub fn instance() -> Owners {\n\n Owners {\n\n dict: Dict::instance(OWNERS_DICT),\n\n }\n\n }\n\n\n\n pub fn init() {\n\n Dict::init(OWNERS_DICT)\n", "file_path": "cep47/src/data.rs", "rank": 98, "score": 5.270102987620259 }, { "content": " assert_eq!(token.balance_of(Key::Account(user)), U256::from(2));\n\n\n\n let new_first_user_token = token.get_token_by_index(Key::Account(user), U256::from(0));\n\n let new_second_user_token = token.get_token_by_index(Key::Account(user), U256::from(1));\n\n let new_third_user_token = token.get_token_by_index(Key::Account(user), U256::from(2));\n\n let new_fourth_user_token = token.get_token_by_index(Key::Account(user), U256::from(3));\n\n assert_eq!(new_first_user_token, Some(token_ids[2]));\n\n assert_eq!(new_second_user_token, Some(token_ids[1]));\n\n assert_eq!(new_third_user_token, None);\n\n assert_eq!(new_fourth_user_token, None);\n\n}\n\n\n", "file_path": "cep47-tests/src/cep47_tests.rs", "rank": 99, "score": 4.837174721235229 } ]
Rust
src/main.rs
rcarmo/rrss2imap
1fff615262bcf5f98ae74bfacec6f902f732b9de
extern crate structopt; #[macro_use] extern crate log; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate flexi_logger; extern crate treexml; extern crate chrono; extern crate rfc822_sanitizer; extern crate unidecode; extern crate tera; #[macro_use] extern crate lazy_static; #[macro_use] extern crate human_panic; extern crate kuchiki; extern crate imap; extern crate native_tls; extern crate base64; extern crate atom_syndication; extern crate reqwest; extern crate rss; extern crate xhtmlchardet; extern crate url; extern crate tree_magic; extern crate emailmessage; extern crate openssl_probe; extern crate regex; extern crate custom_error; extern crate async_std; extern crate tokio; extern crate futures; use flexi_logger::Logger; use std::path::PathBuf; use structopt::StructOpt; use std::error::Error; mod config; mod export; mod feed_errors; mod feed_reader; mod feed_utils; mod feed; mod image_to_data; mod import; mod message; mod settings; mod store; mod syndication; #[derive(Debug, StructOpt)] #[structopt(author=env!("CARGO_PKG_AUTHORS"))] struct RRSS2IMAP { #[structopt(short, long, parse(from_occurrences))] verbose: u8, #[structopt(subcommand)] cmd: Command } #[derive(Debug, StructOpt)] enum Command { #[structopt(name = "new")] New { email: String, }, #[structopt(name = "email")] Email { email: String }, #[structopt(name = "run")] Run, #[structopt(name = "add")] Add { #[structopt(short = "u", long = "url")] url:Option<String>, #[structopt(short = "e", long = "email")] email:Option<String>, #[structopt(short = "d", long = "destination")] destination:Option<String>, #[structopt(short = "i", long = "inline-mages")] inline_images:bool, #[structopt(short = "x", long = "do-not-inline-mages")] do_not_inline_images:bool, parameters: Vec<String>, }, #[structopt(name = "list")] List, #[structopt(name = "reset")] Reset, #[structopt(name = "delete")] Delete { feed: u32, }, #[structopt(name = "export")] Export { #[structopt(parse(from_os_str))] output: Option<PathBuf>, }, #[structopt(name = "import")] Import { #[structopt(parse(from_os_str))] input: Option<PathBuf>, }, } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { if !cfg!(debug_assertions) { setup_panic!(); } let opt = RRSS2IMAP::from_args(); Logger::try_with_env_or_str( match opt.verbose { 0 => "warn", 1 => "warn, rrss2imap = info", 2 => "warn, rrss2imap = info", _ => "trace", }) .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)) .format(match opt.verbose { 0 => flexi_logger::colored_default_format, 1 => flexi_logger::colored_default_format, 2 => flexi_logger::colored_detailed_format, _ => flexi_logger::colored_with_thread, }) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); openssl_probe::init_ssl_cert_env_vars(); let store_path = store::find_store(); let store_result = store::Store::load(&store_path); match store_result { Ok(mut store) => { match opt.cmd { Command::New { email } => store.init_config(email), Command::Email { email } => store.set_email(email), Command::List => store.list(), Command::Add { url, email, destination, inline_images, do_not_inline_images, parameters } => store.add(url, email, destination, store.settings.config.inline(inline_images, do_not_inline_images), parameters), Command::Delete { feed } => store.delete(feed), Command::Reset => store.reset(), Command::Run => { let handle = tokio::spawn(async move { store.run().await }); let _res = handle.await; } Command::Export { output } => store.export(output), Command::Import { input } => store.import(input), } }, Err(e) => { error!("Impossible to open store {}\n{}", store_path.to_string_lossy(), e); } } Ok(()) }
extern crate structopt; #[macro_use] extern crate log; extern crate serde; #[macro_use] extern crate serde_derive; extern crate serde_json; extern crate flexi_logger; extern crate treexml; extern crate chrono; extern crate rfc822_sanitizer; extern crate unidecode; extern crate tera; #[macro_use] extern crate lazy_static; #[macro_use] extern crate human_panic; extern crate kuchiki; extern crate imap; extern crate native_tls; extern crate base64; extern crate atom_syndication; extern crate reqwest; extern crate rss; extern crate xhtmlchardet; extern crate url; extern crate tree_magic; extern crate emailmessage; extern crate openssl_probe; extern crate regex; extern crate custom_error; extern crate async_std; extern crate tokio; extern crate futures; use flexi_logger::Logger; use std::path::PathBuf; use structopt::StructOpt; use std::error::Error; mod config; mod export; mod feed_errors; mod feed_reader; mod feed_utils; mod feed; mod image_to_data; mod import; mod message; mod settings; mod store; mod syndication; #[derive(Debug, StructOpt)] #[structopt(author=env!("CARGO_PKG_AUTHORS"))] struct RRSS2IMAP { #[structopt(short, long, parse(from_occurrences))] verbose: u8, #[structopt(subcommand)] cmd: Command } #[derive(Debug, StructOpt)] enum Command { #[structopt(name = "new")] New { email: String, }, #[structopt(name = "email")] Email { email: String }, #[structopt(name = "run")] Run, #[structopt(name = "add")] Add { #[structopt(short = "u", long = "url")] url:Option<String>, #[structopt(short = "e", long = "email")] email:Option<String>, #[structopt(short = "d", long = "destination")] destination:Option<String>, #[structopt(short = "i", long = "inline-mages")] inline_images:bool, #[structopt(short = "x", long = "do-not-inline-mages")] do_not_inline_images:bool, parameters: Vec<String>, }, #[structopt(name = "list")] List, #[structopt(name = "reset")] Reset, #[structopt(name = "delete")] Delete { feed: u32, }, #[structopt(name = "export")] Export { #[structopt(parse(from_os_str))] output: Option<PathBuf>, }, #[structopt(name = "import")] Import { #[structopt(parse(from_os_str))] input: Option<PathBuf>, }, } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { if !cfg!(debug_assertions) { setup_panic!(); } let opt = RRSS2IMAP::from_args(); Logger::try_with_env_or_str( match opt.verbose { 0 => "warn", 1 => "warn, rrss2imap = info", 2 => "warn, rrss2imap = info", _ => "trace", }) .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)) .format(match opt.verbose { 0 => flexi_logger::colored_default_format, 1 => flexi_logger::colored_default_format, 2 => flexi_logger::colored_detailed_format, _ =>
flexi_logger::colored_with_thread, }) .start() .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); openssl_probe::init_ssl_cert_env_vars(); let store_path = store::find_store(); let store_result = store::Store::load(&store_path); match store_result { Ok(mut store) => { match opt.cmd { Command::New { email } => store.init_config(email), Command::Email { email } => store.set_email(email), Command::List => store.list(), Command::Add { url, email, destination, inline_images, do_not_inline_images, parameters } => store.add(url, email, destination, store.settings.config.inline(inline_images, do_not_inline_images), parameters), Command::Delete { feed } => store.delete(feed), Command::Reset => store.reset(), Command::Run => { let handle = tokio::spawn(async move { store.run().await }); let _res = handle.await; } Command::Export { output } => store.export(output), Command::Import { input } => store.import(input), } }, Err(e) => { error!("Impossible to open store {}\n{}", store_path.to_string_lossy(), e); } } Ok(()) }
function_block-function_prefix_line
[ { "content": "fn group_feeds(to_store: &Store) -> HashMap<String, Vec<Feed>> {\n\n to_store.feeds.iter().fold(HashMap::new(), |mut map, feed| {\n\n let feed = feed.clone();\n\n let folder = feed.config.get_folder(&to_store.settings.config);\n\n if !map.contains_key(&folder) {\n\n map.insert(folder.clone(), vec![]);\n\n }\n\n let mut updated = vec![feed];\n\n updated.append(map.get_mut(&folder).unwrap());\n\n map.insert(folder, updated);\n\n // Return value of closure (which is *not* a return statement ;-)\n\n map\n\n })\n\n}\n\n\n", "file_path": "src/export.rs", "rank": 0, "score": 138331.82388514566 }, { "content": "fn sanitize_email(email:&String, domain:&String)->String {\n\n lazy_static! {\n\n static ref EMAIL_AND_NAME_DETECTOR:Regex = \n\n Regex::new(\"([[:alpha:]_%\\\\+\\\\-\\\\.]+@[[:alpha:]_%\\\\+\\\\-]+\\\\.[[:alpha:]_%\\\\+\\\\-]+{1,}) \\\\(([^\\\\)]*)\\\\)\").unwrap();\n\n }\n\n lazy_static! {\n\n static ref BAD_CHARACTER_REMOVER:Regex = \n\n Regex::new(\"[^[:alnum:].]\").unwrap();\n\n }\n\n if EMAIL_AND_NAME_DETECTOR.is_match(email) {\n\n let captures = EMAIL_AND_NAME_DETECTOR.captures(email).unwrap();\n\n return format!(\"{} <{}>\", captures.get(2).unwrap().as_str(), captures.get(1).unwrap().as_str());\n\n } else {\n\n // When no email is provided, use domain name\n\n let email = if email.is_empty() {\n\n domain\n\n } else {\n\n email\n\n };\n\n // Remove bad characters\n", "file_path": "src/feed_utils.rs", "rank": 1, "score": 130723.85619718357 }, { "content": "fn write(to_file: &PathBuf, to_store: HashMap<String, Vec<Feed>>) {\n\n // warn!(\"exporting feeds {:?}\", to_store);\n\n // Prepare the document by setting all boilerplate elements (root, head, body, ...)\n\n let mut root = Element::new(\"opml\");\n\n root.attributes\n\n .insert(\"version\".to_owned(), \"1.0\".to_owned());\n\n let mut header = Element::new(\"head\");\n\n let mut title = Element::new(\"title\");\n\n title.text = Some(\"rrss2imap OPML Export\".to_owned());\n\n header.children.push(title);\n\n root.children.push(header);\n\n let mut body = Element::new(\"body\");\n\n // Now fill body with outline elements generated from feeds\n\n for (folder, elements) in to_store {\n\n let mut folder_element = Element::new(\"outline\");\n\n folder_element\n\n .attributes\n\n .insert(\"text\".to_owned(), folder.clone());\n\n folder_element\n\n .attributes\n", "file_path": "src/export.rs", "rank": 4, "score": 108525.35242615441 }, { "content": "pub fn export(to_file: &PathBuf, to_store: &Store) {\n\n // First group feeds per storage folder\n\n let grouped = group_feeds(to_store);\n\n // Then write this map of lists\n\n write(to_file, grouped);\n\n}\n\n\n", "file_path": "src/export.rs", "rank": 5, "score": 102466.90021796721 }, { "content": "pub fn sanitize_message_authors(message_authors:Vec<String>, domain:String)->Vec<String> {\n\n let fixed = message_authors\n\n .iter()\n\n .map(|author| {\n\n sanitize_email(author, &domain)\n\n })\n\n .collect();\n\n fixed\n\n}\n\n\n", "file_path": "src/feed_utils.rs", "rank": 6, "score": 102183.3544985556 }, { "content": "pub fn import(from_file: &PathBuf, to_store: &mut Store) {\n\n let mut file =\n\n File::open(from_file).unwrap_or_else(|_| panic!(\"Unable to open file {:?}\", from_file));\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)\n\n .unwrap_or_else(|_| panic!(\"Unable to read file {:?}\", from_file));\n\n\n\n let doc = Document::parse(contents.as_bytes()).unwrap();\n\n let root = doc.root.unwrap();\n\n\n\n // old style parsing is good, because it is old :-)\n\n for element in root.children {\n\n match element.name.as_ref() {\n\n \"head\" => debug!(\"Reading {}\", element),\n\n \"body\" => import_body(element, to_store, &\"\".to_owned()),\n\n _ => error!(\"element {:?} was unexpected, please fill a bug !\", element),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/import.rs", "rank": 7, "score": 97860.81064043686 }, { "content": "fn import_body(body: Element, to_store: &mut Store, folder: &str) {\n\n for element in body.children {\n\n match element.name.as_ref() {\n\n \"outline\" => import_outline(element, to_store, folder),\n\n _ => error!(\"element {:?} was unexpected, please fill a bug!\", element),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/import.rs", "rank": 8, "score": 84116.19723038435 }, { "content": "fn import_outline(outline: Element, to_store: &mut Store, folder: &str) {\n\n if outline.children.is_empty() {\n\n // An outline without children is considered an OPML entry. Does it have the right set of attributes ?\n\n if outline.attributes.contains_key(\"type\")\n\n && outline.attributes.contains_key(\"text\")\n\n && outline.attributes.contains_key(\"xmlUrl\")\n\n {\n\n let url = outline.attributes.get(\"xmlUrl\");\n\n let feed = Feed {\n\n url: url.unwrap().to_string(),\n\n config: Config {\n\n email: None,\n\n folder: Some(folder.to_string()),\n\n from: None,\n\n inline_image_as_data: false,\n\n },\n\n last_updated: Feed::at_epoch(),\n\n last_message: None,\n\n };\n\n to_store.add_feed(feed);\n", "file_path": "src/import.rs", "rank": 9, "score": 84116.19723038435 }, { "content": "fn trim_to_chars(text:&str, characters:Vec<&str>)->String {\n\n let mut remaining = text;\n\n for cutter in characters {\n\n let elements:Vec<&str> = remaining.split(cutter).collect();\n\n remaining = elements[0].trim();\n\n }\n\n remaining.to_string()\n\n}\n\n\n", "file_path": "src/feed_utils.rs", "rank": 10, "score": 70768.43306028629 }, { "content": "/// Calculate the location of the `config.json` store file.\n\n/// If `config.json` is found in the current directory, use it for backward\n\n/// compatibility. Otherwise, return a path inside the project directory\n\n/// (~/.config/rrss2imap/ on Linux, system-specific on macOS and Windows).\n\npub fn find_store() -> PathBuf {\n\n let mut path = PathBuf::from(STORE);\n\n if !path.exists() {\n\n // The current directory takes precedence over project directory\n\n // for existing configurations for backward compatibility.\n\n if let Some(proj_dirs) = ProjectDirs::from(\"org\", \"Rrss2imap\", \"rrss2imap\") {\n\n path = proj_dirs.config_dir().to_path_buf();\n\n path.push(STORE);\n\n }\n\n }\n\n path\n\n}\n\n\n\nimpl Store {\n\n /// Initialize a Store object from a config file at the given path. If the\n\n /// config file does not exist, return a Store object with default values.\n\n pub fn load(path: &PathBuf) -> Result<Store,UnusableStore> {\n\n if path.exists() {\n\n info!(\"Reading config file {}\", path.to_string_lossy());\n\n // First read the file\n", "file_path": "src/store.rs", "rank": 11, "score": 60455.2170047266 }, { "content": "pub fn transform(document: NodeRef, _settings: &Settings) -> NodeRef {\n\n for node_ref in document.select(\"img\").unwrap() {\n\n // note we unwrapped the inner node to have its attributes available\n\n let node = node_ref.as_node().as_element();\n\n if let Some(data) = node {\n\n let attributes = &mut data.attributes.borrow_mut();\n\n if let Some(src) = attributes.get(\"src\") {\n\n // Now download image source and base64 encode it !\n\n debug!(\"reading image from {}\", src);\n\n/* if let Ok(mut response) = reqwest::get(src).await {\n\n let image_bytes = response.bytes().await.unwrap();\n\n let encoded = base64::encode(&image_bytes);\n\n let image_mime_type = tree_magic::from_u8(&image_bytes);\n\n*/ if let Ok(mut response) = reqwest::blocking::get(src) {\n\n let mut image: Vec<u8> = vec![];\n\n response.copy_to(&mut image).unwrap();\n\n let image_bytes = image.as_slice();\n\n let encoded = base64::encode(image_bytes);\n\n let image_mime_type = tree_magic::from_u8(image_bytes);\n\n attributes.insert(\n\n \"src\",\n\n format!(\"data:{};base64,{}\", image_mime_type, encoded),\n\n );\n\n }\n\n }\n\n }\n\n }\n\n document\n\n}\n", "file_path": "src/image_to_data.rs", "rank": 12, "score": 49480.661075511045 }, { "content": "use custom_error::custom_error;\n\n\n\ncustom_error!{\n\n pub UnparseableFeed\n\n DateIsNotRFC2822{value:String} = \"Date {value} is not RFC-2822 compliant\",\n\n DateIsNotRFC3339{value:String} = \"Date {value} is not RFC-3339 compliant\",\n\n DateIsNeitherRFC2822NorRFC3339{value:String} = \"Date {value} is neither RFC-2822 nor RFC-3339 compliant\",\n\n ChronoCantParse{source: chrono::ParseError} = \"chrono can't parse date\",\n\n NoDateFound = \"absolutly no date field was found in feed\",\n\n CantExtractImages{source: super::message::UnprocessableMessage} = \"Seems like it was not possible to read message contained images\"\n\n}", "file_path": "src/feed_errors.rs", "rank": 13, "score": 48924.16353964724 }, { "content": "/// This is a shameless copy of https://github.com/tomshen/rust-syndication to have it working with recent versions of\n\n/// both RSS and atom crates\n\nuse std::str::FromStr;\n\n\n\n/// Possible feeds types\n\npub enum Feed {\n\n Atom(atom_syndication::Feed),\n\n RSS(rss::Channel),\n\n}\n\n\n\n/// Parse a value to a feed.\n\nimpl FromStr for Feed {\n\n type Err = &'static str;\n\n\n\n /// Each supported enum value is tested after the other.\n\n /// We first try to load atom, then RSS.\n\n /// If none work, an error is returned\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s.parse::<atom_syndication::Feed>() {\n\n Ok(feed) => Ok(Feed::Atom(feed)),\n", "file_path": "src/syndication.rs", "rank": 14, "score": 26954.229354545943 }, { "content": " _ => match s.parse::<rss::Channel>() {\n\n Ok(channel) => Ok(Feed::RSS(channel)),\n\n _ => Err(\"Could not parse XML as Atom or RSS from input\"),\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl ToString for Feed {\n\n fn to_string(&self) -> String {\n\n match *self {\n\n Feed::Atom(ref atom_feed) => atom_feed.to_string(),\n\n Feed::RSS(ref rss_channel) => rss_channel.to_string(),\n\n }\n\n }\n\n}\n", "file_path": "src/syndication.rs", "rank": 15, "score": 26951.653008952122 }, { "content": "use super::settings::*;\n\n\n\n/// This structure defines the feed-level config.\n\n/// All elements here may be configured twice : once at feed level, and once at global level.\n\n/// Obviously, all elements which are not defined at feed level use global configuration\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub struct Config {\n\n /// When set, contains the email address used\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub email: Option<String>,\n\n /// When set, contains the folder in which entries for feed will be written\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub folder: Option<String>,\n\n /// When defined, this from field will be used instead of trying to construct it from feed title\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub from: Option<String>,\n\n /// When set to true, images will be inlined\n\n #[serde(\n\n skip_serializing_if = \"Settings::is_false\",\n\n default = \"Settings::default_false\"\n", "file_path": "src/config.rs", "rank": 16, "score": 26770.022993407118 }, { "content": " )]\n\n pub inline_image_as_data: bool,\n\n}\n\n\n\nimpl Config {\n\n /// Creates a new instance with all fields set to default \"falsy\" values : options are set to none and booleans to false\n\n pub fn new() -> Config {\n\n Config {\n\n email: None,\n\n folder: None,\n\n inline_image_as_data: false,\n\n from: None,\n\n }\n\n }\n\n\n\n /// Creates a string view of config.\n\n /// More precisely, outputs the email address and folder in which entries are to be written\n\n /// A default config is given for options set to None.\n\n pub fn to_string(self, default: &Config) -> String {\n\n return format!(\n", "file_path": "src/config.rs", "rank": 17, "score": 26768.105721475546 }, { "content": "use std::path::PathBuf;\n\n\n\nuse std::fs::File;\n\nuse std::io::Read;\n\n\n\nuse super::config::Config;\n\nuse super::feed::Feed;\n\nuse super::store::Store;\n\n\n\nuse treexml::*;\n\n\n", "file_path": "src/import.rs", "rank": 18, "score": 26766.890280173022 }, { "content": "use std::path::PathBuf;\n\n\n\nuse std::fs;\n\n\n\nuse std::collections::HashMap;\n\n\n\nuse super::feed::Feed;\n\nuse super::store::Store;\n\n\n\nuse treexml::*;\n\n\n", "file_path": "src/export.rs", "rank": 19, "score": 26764.21479189312 }, { "content": " \"(to: {}) {}\",\n\n self.email.unwrap_or_else(|| format!(\n\n \"{} (default)\",\n\n default.clone().email.unwrap_or_else(|| \"\".to_owned())\n\n )),\n\n self.folder.unwrap_or_else(|| format!(\n\n \"{} (default)\",\n\n default.clone().folder.unwrap_or_else(|| \"\".to_owned())\n\n ))\n\n );\n\n }\n\n\n\n /// Used by serde to skip serialization of default config for feeds\n\n /// This method check if config is the default one (consisting only into None options)\n\n pub fn is_none(config: &Config) -> bool {\n\n config.email.is_none()\n\n && config.folder.is_none()\n\n && config.from.is_none()\n\n && !config.inline_image_as_data\n\n }\n", "file_path": "src/config.rs", "rank": 20, "score": 26763.801815513118 }, { "content": " } else {\n\n error!(\"outline {:?} has no children, but doesn't has the right set of attributes. Please fill a bug!\", outline.attributes);\n\n }\n\n } else {\n\n // An outline with children is considered an OPML folder. Does it have the right set of attributes ?\n\n if outline.attributes.contains_key(\"text\") && outline.attributes.contains_key(\"title\") {\n\n let folder = &outline.attributes[\"text\"];\n\n import_body(outline.clone(), to_store, &folder.to_string());\n\n } else {\n\n error!(\"outline {:?} has children, but doesn't has the right set of attributes. Please fill a bug!\", outline.attributes);\n\n }\n\n }\n\n}\n", "file_path": "src/import.rs", "rank": 21, "score": 26763.402143423515 }, { "content": "\n\n /// Clear all content from this config excepted email address\n\n pub fn clear(&mut self) {\n\n self.folder = None;\n\n }\n\n\n\n /// Get the email value for that feed, be it defined locally or from the default config\n\n pub fn get_email(&self, default: &Config) -> String {\n\n self.clone()\n\n .email\n\n .unwrap_or_else(|| default.clone().email.unwrap_or_else(|| \"\".to_owned()))\n\n }\n\n\n\n /// Get the folder value for that feed, be it defined locally or from the default config\n\n pub fn get_folder(&self, default: &Config) -> String {\n\n self.clone()\n\n .folder\n\n .unwrap_or_else(|| default.clone().folder.unwrap_or_else(|| \"\".to_owned()))\n\n }\n\n\n", "file_path": "src/config.rs", "rank": 22, "score": 26763.05699910164 }, { "content": " .insert(\"title\".to_owned(), folder.clone());\n\n for feed in elements {\n\n let mut outline = Element::new(\"outline\");\n\n outline\n\n .attributes\n\n .insert(\"type\".to_owned(), \"rss\".to_owned());\n\n outline\n\n .attributes\n\n .insert(\"text\".to_owned(), feed.url.clone());\n\n outline\n\n .attributes\n\n .insert(\"xmlUrl\".to_owned(), feed.url.clone());\n\n folder_element.children.push(outline);\n\n }\n\n body.children.push(folder_element);\n\n }\n\n // Don't forget to add body after, otherwise we enter into the dangerous realm of borrowed values\n\n root.children.push(body);\n\n let mut document = Document::new();\n\n document.root = Some(root);\n\n fs::write(to_file, format!(\"{}\", document))\n\n .unwrap_or_else(|_| panic!(\"Unable to write file {:?}\", to_file));\n\n}\n", "file_path": "src/export.rs", "rank": 23, "score": 26762.64519002409 }, { "content": " /// Compute an inline flag by resolving the two flags with this struct inline images status\n\n pub fn inline(&self, inline:bool, do_not_inline:bool)->bool {\n\n if self.inline_image_as_data {\n\n !do_not_inline\n\n } else {\n\n inline\n\n }\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 24, "score": 26752.17040778335 }, { "content": "use chrono::NaiveDateTime;\n\n\n\nuse super::feed::Feed;\n\nuse super::image_to_data;\n\nuse super::settings::*;\n\nuse tera::Context;\n\nuse tera::Tera;\n\n\n\nuse kuchiki::traits::*;\n\n\n\nuse emailmessage::{header, Message as Email, SinglePart};\n\nuse emailmessage::header::EmailDate;\n\nuse emailmessage::Mailbox;\n\n\n\nuse custom_error::custom_error;\n\n\n\ncustom_error!{pub UnprocessableMessage\n\n CantPutDateInMessage{ value:String } = \"EmailMessage can't parse date from {value}\",\n\n CantPutFirstAuthorInMessage { value:String } = \"Unable to parse first author {value}.\n\n Please consider adding in feed config the \\\"from\\\": ... field\",\n", "file_path": "src/message.rs", "rank": 25, "score": 26532.087640490532 }, { "content": " Ok(text) => {\n\n debug!(\"===========================\\nWriting message content to IMAP\\n{}\\n===========================\", \n\n text);\n\n match settings.email.append(&folder, &text) {\n\n Ok(_) => debug!(\"Successfully written {}\", self.title),\n\n Err(e) => error!(\n\n \"{}\\nUnable to select mailbox {}. Item titled {} won't be written\",\n\n e, &folder, self.title\n\n ),\n\n }\n\n },\n\n Err(error) => {\n\n warn!(\"Couldn(t write message {:?} from feed {} due to {}\", self.links, feed.url, error);\n\n }\n\n }\n\n }\n\n\n\n fn build_message(&self, feed: &Feed, settings: &Settings) -> Result<String, UnprocessableMessage> {\n\n let content = self.extract_content(feed, settings)?;\n\n debug!(\"===========================\\nCreating message content\\n{}\\n===========================\", content);\n", "file_path": "src/message.rs", "rank": 26, "score": 26528.020268602042 }, { "content": "///\n\n/// Structure for storing message data prior to having these messages written to IMAP.\n\n/// This structure serves as a common interface for Item/Entry\n\npub struct Message {\n\n /// List of message authors\n\n pub authors: Vec<String>,\n\n /// Message content. Image extraction should happen BEFORE that storage.\n\n pub content: String,\n\n /// Message id\n\n pub id: String,\n\n pub last_date: NaiveDateTime,\n\n pub links: Vec<String>,\n\n pub title: String,\n\n}\n\n\n\nimpl Message {\n\n pub fn write_to_imap(&self, feed: &Feed, settings: &Settings) {\n\n let folder = feed.config.get_folder(&settings.config);\n\n let content = self.build_message(feed, settings);\n\n match content {\n", "file_path": "src/message.rs", "rank": 27, "score": 26525.407396359886 }, { "content": " /// Makes a valid HTML file out of the given Item.\n\n /// This method provides all the transformation that should happen\n\n fn extract_content(&self, feed: &Feed, settings: &Settings) -> Result<String, UnprocessableMessage> {\n\n Ok(TERA.render(\"message.html\", &self.build_context(feed, settings)?)\n\n .unwrap())\n\n }\n\n\n\n ///\n\n /// Process the feed effective content.\n\n /// This should allow\n\n /// * image transformation into base64 when needed\n\n ///\n\n pub fn get_processed_content(content:&String, feed: &Feed, settings: &Settings) -> Result<String, UnprocessableMessage> {\n\n if feed.config.inline_image_as_data || settings.config.inline_image_as_data {\n\n let mut document = kuchiki::parse_html().one(content.clone());\n\n // So, take content, pass it through html5ever (thanks to select, and transform each image !)\n\n document = image_to_data::transform(document, settings);\n\n let mut bytes = vec![];\n\n if document.serialize(&mut bytes).is_err() {\n\n return Err(UnprocessableMessage::CantWriteTransformedMessage);\n", "file_path": "src/message.rs", "rank": 28, "score": 26523.352595951485 }, { "content": " }\n\n Ok(String::from_utf8(bytes).unwrap())\n\n } else {\n\n Ok(content.clone())\n\n }\n\n }\n\n\n\n fn build_context(&self, feed: &Feed, settings: &Settings) -> Result<Context, UnprocessableMessage> {\n\n let mut context = Context::new();\n\n context.insert(\"feed_entry\", &self.content);\n\n context.insert(\"links\", &self.links);\n\n context.insert(\"id\", &self.id);\n\n context.insert(\"title\", &self.title);\n\n context.insert(\"from\", &self.authors);\n\n context.insert(\"to\", &feed.config.get_email(&settings.config));\n\n context.insert(\"date\", &self.date_text());\n\n Ok(context)\n\n }\n\n\n\n fn date_text(&self) -> String {\n\n self.last_date\n\n .format(\"%a, %d %b %Y %H:%M:%S -0000\")\n\n .to_string()\n\n }\n\n}\n", "file_path": "src/message.rs", "rank": 29, "score": 26520.67616199106 }, { "content": " let date:Result<EmailDate, _> = self.date_text().parse();\n\n if date.is_err() {\n\n return Err(UnprocessableMessage::CantPutDateInMessage { value : self.date_text() });\n\n }\n\n let to_addr = settings.config.email.as_ref().unwrap_or(&settings.email.user);\n\n let mut builder = Email::builder()\n\n .subject(&*self.title)\n\n .date(date.unwrap())\n\n .to(to_addr.parse().unwrap())\n\n ;\n\n\n\n match &feed.config.from {\n\n Some(from) => {\n\n builder = builder.from(from.parse().unwrap());\n\n }\n\n None => {\n\n if self.authors.is_empty() {\n\n builder = builder.from(\"[email protected]\".parse().unwrap());\n\n } else {\n\n let first_author = &self.authors[0];\n", "file_path": "src/message.rs", "rank": 30, "score": 26519.293735485287 }, { "content": " CantWriteTransformedMessage = \"Can't re-write transformed message after image Base64'ing\"\n\n}\n\n\n\n\n\nlazy_static! {\n\n pub static ref TERA: Tera = {\n\n let message = include_str!(\"../templates/message.html\");\n\n let mut tera = match Tera::new(\"templates/**/*\") {\n\n Ok(t) => t,\n\n Err(e) => {\n\n println!(\"Can't compile tera template: {}\", e);\n\n ::std::process::exit(1);\n\n }\n\n };\n\n tera.add_raw_template(\"message.html\", message).expect(\"There should be a message.html template\");\n\n tera.autoescape_on(vec![]);\n\n tera\n\n };\n\n}\n\n\n", "file_path": "src/message.rs", "rank": 31, "score": 26518.680686779873 }, { "content": " let parsed_first_author:Result<Mailbox, _> = first_author.parse();\n\n if parsed_first_author.is_err() {\n\n return Err(UnprocessableMessage::CantPutFirstAuthorInMessage { value : first_author.clone() });\n\n }\n\n builder = builder.from(parsed_first_author.unwrap());\n\n }\n\n }\n\n }\n\n\n\n let email: Email<SinglePart<String>> = builder.mime_body(\n\n SinglePart::builder()\n\n .header(header::ContentType(\n\n \"text/html; charset=utf8\".parse().unwrap(),\n\n ))\n\n .header(header::ContentTransferEncoding::QuotedPrintable)\n\n .body(content),\n\n );\n\n Ok(email.to_string())\n\n }\n\n\n", "file_path": "src/message.rs", "rank": 32, "score": 26513.082909962985 }, { "content": " /// imap server we want to connect to\n\n server: String,\n\n /// username used to connect to that server\n\n pub user: String,\n\n /// password used to connect to that server.\n\n /// **WARNING** THis password is in **no way** encrypted, which makes rrss2imap a \"not-so-secured\" software\n\n password: String,\n\n /// secured connection state\n\n #[serde(default = \"Email::default_secure\")]\n\n secure: Secure,\n\n #[serde(default = \"Email::default_retry_max_count\")]\n\n retry_max_count: u8,\n\n #[serde(default = \"Email::default_retry_delay\")]\n\n retry_delay: u64,\n\n}\n\n\n\n/// Imap effective connection type (ie once connection has been established).\n\n/// This enum presents a simple interface allowing seamless access for (un)secured servers.\n\n#[derive(Debug)]\n\npub enum Imap {\n", "file_path": "src/settings.rs", "rank": 33, "score": 26394.365831946718 }, { "content": " }\n\n }\n\n\n\n /// default secure port, used by serde\n\n pub fn default_secure() -> Secure {\n\n Secure::Yes(993)\n\n }\n\n /// default max retries number, used by serde\n\n pub fn default_retry_max_count() -> u8 {\n\n 3\n\n }\n\n /// default retry delay, used by serde\n\n pub fn default_retry_delay() -> u64 {\n\n 1\n\n }\n\n /// Constructs a default email config, used in Settings by serde\n\n pub fn default() -> Email {\n\n Email {\n\n server: \"Set your email server address here\".to_owned(),\n\n user: \"Set your imap server user name (it may be your email address or not)\".to_owned(),\n", "file_path": "src/settings.rs", "rank": 34, "score": 26393.815742532377 }, { "content": "use imap::error::Result;\n\nuse imap::Session;\n\nuse std::{thread, time};\n\n\n\nuse super::config::Config;\n\n\n\n/// Secured connection or not ?\n\n/// Whichever is chosen, user has to give the port as parameter\n\n#[derive(Debug, Clone, Deserialize, Serialize)]\n\npub enum Secure {\n\n No(u16),\n\n Yes(u16),\n\n}\n\n/// mail config\n\n/// I SHOULD allow a kind of Keepass access.\n\n/// But as code isn't expected to run on any kind of UI-aware machine (but on a headless Raspbian),\n\n/// I can't connect it to Keepass.\n\n/// So I should implement a kind of secure storage\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Email {\n", "file_path": "src/settings.rs", "rank": 35, "score": 26393.44162870646 }, { "content": " Imap::Secured(imap_session)\n\n }\n\n}\n\n\n\n/// Store-level config\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Settings {\n\n /// when set to true, no reading statis will be persisted.\n\n /// As a consequence, messages may be read more than once\n\n #[serde(\n\n skip_serializing_if = \"Settings::is_false\",\n\n default = \"Settings::default_false\"\n\n )]\n\n pub do_not_save: bool,\n\n /// inline all images as base64 data\n\n /* #[serde(\n\n skip_serializing_if = \"Settings::is_false\",\n\n default = \"Settings::default_false\"\n\n )]\n\n pub inline_image_as_data: bool,\n", "file_path": "src/settings.rs", "rank": 36, "score": 26392.37955437215 }, { "content": " Secured(Session<native_tls::TlsStream<std::net::TcpStream>>),\n\n Insecured(Session<std::net::TcpStream>),\n\n}\n\n\n\nimpl Imap {\n\n /// Appends a new message to the given server.\n\n pub fn append<S: AsRef<str>, B: AsRef<[u8]>>(&mut self, mailbox: S, content: B) -> Result<()> {\n\n match self {\n\n Imap::Secured(ref mut session) => session.append(mailbox, content),\n\n Imap::Insecured(ref mut session) => session.append(mailbox, content),\n\n }\n\n }\n\n}\n\n\n\nimpl Email {\n\n /// Appends a new message to the given server.\n\n /// This method decorates the Imap::append method by adding retry ability.\n\n pub fn append<S: AsRef<str>, B: AsRef<[u8]>>(&self, mailbox: &S, content: &B) -> Result<()> {\n\n let mut count = 0;\n\n loop {\n", "file_path": "src/settings.rs", "rank": 37, "score": 26390.9750152391 }, { "content": " */\n\n #[serde(default = \"Email::default\")]\n\n pub email: Email,\n\n #[serde(default = \"Config::new\")]\n\n pub config: Config,\n\n}\n\n\n\nimpl Settings {\n\n pub fn is_false(value: &bool) -> bool {\n\n !value\n\n }\n\n pub fn default_false() -> bool {\n\n false\n\n }\n\n /*\n\n pub fn is_true(value: &bool) -> bool {\n\n !!value\n\n }\n\n pub fn default_true() -> bool {\n\n true\n", "file_path": "src/settings.rs", "rank": 38, "score": 26389.376707993873 }, { "content": " }\n\n */\n\n pub fn default() -> Settings {\n\n Settings {\n\n do_not_save: false,\n\n email: Email::default(),\n\n config: Config::new(),\n\n }\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 39, "score": 26388.53750905091 }, { "content": " count += 1;\n\n let mut imap = self.start();\n\n let result = imap.append(mailbox, content);\n\n if result.is_err() {\n\n if count > self.retry_max_count {\n\n return result;\n\n } else {\n\n error!(\n\n \"Previous append attempt failed with {}. Retrying ({}/{})in {} s.!\",\n\n result.unwrap_err(),\n\n count,\n\n self.retry_max_count,\n\n self.retry_delay\n\n );\n\n // TODO maybe remove that once code is parallel\n\n thread::sleep(time::Duration::from_secs(self.retry_delay));\n\n }\n\n } else {\n\n return result;\n\n }\n", "file_path": "src/settings.rs", "rank": 40, "score": 26386.737451384 }, { "content": " // we pass in the domain twice to check that the server's TLS\n\n // certificate is valid for the domain we're connecting to.\n\n let client = imap::connect((self.server.as_str(), port), &self.server, &tls)\n\n .unwrap_or_else(|_| panic!(\"Couldn't connect to {}:{}\", self.server, port));\n\n\n\n // the client we have here is unauthenticated.\n\n // to do anything useful with the e-mails, we need to log in\n\n let imap_session = client\n\n .login(&self.user, &self.password)\n\n .unwrap_or_else(|_| {\n\n panic!(\n\n \"Couldn't securely connect to {}:{} for login {}\",\n\n self.server, port, self.user\n\n )\n\n });\n\n\n\n debug!(\n\n \"Successfully connected to SECURE imap server {}\",\n\n self.server\n\n );\n", "file_path": "src/settings.rs", "rank": 41, "score": 26385.607779965827 }, { "content": " password: \"Set your imap server password (yup, in clear, this is very bad)\".to_owned(),\n\n secure: Email::default_secure(),\n\n retry_max_count: Email::default_retry_max_count(),\n\n retry_delay: Email::default_retry_delay(),\n\n }\n\n }\n\n\n\n /// starts connection to selected imap server, whatever it is\n\n pub fn start(&self) -> Imap {\n\n match self.secure {\n\n Secure::Yes(port) => self.start_secure(port),\n\n Secure::No(_port) => panic!(\"rrss2map no more supports unsecured connection to IMAP server due to evolutions of IMAP library (see https://github.com/jonhoo/rust-imap/pull/140)\"),\n\n }\n\n }\n\n\n\n fn start_secure(&self, port: u16) -> Imap {\n\n let tls = native_tls::TlsConnector::builder()\n\n .build()\n\n .expect(\"Couldn't create TLS connector\");\n\n\n", "file_path": "src/settings.rs", "rank": 42, "score": 26385.159256647763 }, { "content": " /// see [import](rrss2imap::import::import) for implementation details\n\n pub fn import(&mut self, file: Option<PathBuf>) {\n\n let path_to_read = file.expect(\"Can't import file if no file is given\");\n\n info!(\"importing content from {:?}\", path_to_read);\n\n let count = self.feeds.len();\n\n import::import(&path_to_read, self);\n\n self.dirty = true;\n\n info!(\n\n \"imported {} feeds from {:?}\",\n\n self.feeds.len() - count,\n\n path_to_read\n\n );\n\n }\n\n\n\n /// Add a feed to the feeds list and immediatly save the store.\n\n pub fn add(&mut self, url:Option<String>, email:Option<String>, destination:Option<String>, inline:bool, parameters: Vec<String>) {\n\n let to_add:Feed = if url.is_some() {\n\n Feed::from_all(url, email, destination, inline)\n\n } else {\n\n Feed::from_vec(parameters)\n", "file_path": "src/store.rs", "rank": 48, "score": 26005.649534207478 }, { "content": " };\n\n info!(\"adding \\\"{:?}\\\"\", to_add);\n\n self.add_feed(to_add);\n\n self.dirty = true;\n\n }\n\n\n\n /// Delete the feed which id is given as parameter.\n\n /// The use of a number is a compatibility requirement\n\n pub fn delete(&mut self, feed: u32) {\n\n let f = self.feeds.remove(feed as usize);\n\n self.dirty = true;\n\n info!(\"Removed {:?}\", f);\n\n }\n\n\n\n /// Reset the config file by removing all feeds and config\n\n pub fn reset(&mut self) {\n\n self.feeds.clear();\n\n self.settings.config.clear();\n\n self.dirty = true;\n\n info!(\"store has been cleared to contain only {:?}\", self);\n", "file_path": "src/store.rs", "rank": 52, "score": 26004.11999766716 }, { "content": " }\n\n }\n\n\n\n /// Set a new value for email and save file (prior to obviously exiting)\n\n pub fn set_email(&mut self, email: String) {\n\n self.settings.config.email = Some(email);\n\n self.dirty = true;\n\n self.save();\n\n }\n\n\n\n /// Exports config into an OPML file\n\n /// see [export](rrss2imap::export::export) for implementation details\n\n pub fn export(&self, file: Option<PathBuf>) {\n\n let path_to_write = file.expect(\"Can't export file if no file is given\");\n\n warn!(\"exporting content to {:?}\", path_to_write);\n\n export::export(&path_to_write, self);\n\n info!(\"exported feeds to {:?}\", path_to_write);\n\n }\n\n\n\n /// Import rss feeds provided as an opml file\n", "file_path": "src/store.rs", "rank": 55, "score": 26001.45934829489 }, { "content": "\n\ncustom_error!{pub UnusableStore\n\n IO{source:std::io::Error} = \"input/output error\",\n\n JsonParseError{source:serde_json::Error} = \"Can't parse JSON content of store\"\n\n}\n\n\n\n\n\n/// Main application structure.\n\n/// This structure is read/written from/to a JSON file\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct Store {\n\n /// Contains all application settings\n\n pub settings: Settings,\n\n /// Contains all feeds being read\n\n pub feeds: Vec<Feed>,\n\n #[serde(skip)]\n\n pub dirty:bool,\n\n #[serde(skip)]\n\n pub path: PathBuf\n\n}\n\n\n\n/// Name of the file from which config is read/written. As of today, this name is not expected to change.\n\npub const STORE: &str = \"config.json\";\n\n\n\n/// Calculate the location of the `config.json` store file.\n\n/// If `config.json` is found in the current directory, use it for backward\n\n/// compatibility. Otherwise, return a path inside the project directory\n\n/// (~/.config/rrss2imap/ on Linux, system-specific on macOS and Windows).\n", "file_path": "src/store.rs", "rank": 58, "score": 25999.32739831036 }, { "content": " let mut file = File::open(path)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n // Then deserialize its content\n\n let mut store: Store =\n\n serde_json::from_str(&contents)?;\n\n store.path = path.to_owned();\n\n // And return it\n\n Ok(store)\n\n } else {\n\n info!(\"Using fresh config file {}\", path.to_string_lossy());\n\n Ok(Store {\n\n settings: Settings::default(),\n\n feeds: vec![],\n\n dirty: false,\n\n path: path.to_owned()\n\n })\n\n }\n\n }\n\n\n", "file_path": "src/store.rs", "rank": 61, "score": 25996.911033875007 }, { "content": "extern crate directories;\n\n\n\nuse directories::ProjectDirs;\n\nuse std::path::{PathBuf, Path};\n\n\n\nuse std::fs;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\n\n\n\n\n\n\nuse futures::stream::StreamExt;\n\nuse futures::stream::futures_unordered::FuturesUnordered;\n\n\n\nuse super::export;\n\nuse super::feed::Feed;\n\nuse super::import;\n\nuse super::settings::Settings;\n\n\n\nuse custom_error::custom_error;\n", "file_path": "src/store.rs", "rank": 62, "score": 25996.85745903903 }, { "content": " /// This is done in a way compatible with rss2imap original layout.\n\n /// As a consequence, new elements (like image inlining) are not visible\n\n pub fn list(&self) {\n\n let lines: Vec<String> = self\n\n .feeds\n\n .iter()\n\n .enumerate()\n\n .map(|(i, f)| format!(\"{} : {}\", i, f.to_string(&self.settings.config)))\n\n .collect();\n\n println!(\"{}\", &lines.join(\"\\n\"));\n\n }\n\n\n\n /// If the feed url is not already in the store, adds it\n\n pub fn add_feed(&mut self, to_add: Feed) {\n\n // We never add the same feed twice. To ensure that, we check that no feed has the same url\n\n let tested = self.feeds.clone();\n\n let already_existing: Vec<&Feed> = tested.iter().filter(|f| f.url == to_add.url).collect();\n\n if already_existing.is_empty() {\n\n self.feeds.push(to_add);\n\n } else {\n", "file_path": "src/store.rs", "rank": 63, "score": 25996.771940913877 }, { "content": " }\n\n\n\n /// Run all rss to imap transformation\n\n /// Each feed is read and immediatly written in this thread.\n\n /// This should be rewritten to allow optimization/parallelism\n\n pub async fn run(&mut self) {\n\n self.dirty = true;\n\n let client = reqwest::blocking::Client::builder()\n\n .build().unwrap();\n\n let feeds_length = self.feeds.len();\n\n // Initialize mail server before processing feeds\n\n self.feeds = self.feeds\n\n .iter().enumerate()\n\n .map(|element| element.1.read(element.0, &feeds_length, &client, &self.settings))\n\n .collect::<FuturesUnordered<_>>()\n\n .collect::<Vec<Feed>>()\n\n .await;\n\n }\n\n\n\n /// Prints all the feeds to stdout.\n", "file_path": "src/store.rs", "rank": 64, "score": 25995.36441691995 }, { "content": " /// Save all informations in the store file\n\n fn save(&self) {\n\n info!(\"Saving config file {}\", self.path.to_string_lossy());\n\n let serialized = serde_json::to_string_pretty(self).expect(\"Can't serialize Store to JSON\");\n\n let directory = self.path.parent().unwrap_or(Path::new(\".\"));\n\n fs::create_dir_all(directory)\n\n .unwrap_or_else(|_| panic!(\"Unable to create directory for file {}\", self.path.to_string_lossy()));\n\n fs::write(&self.path, serialized)\n\n .unwrap_or_else(|_| panic!(\"Unable to write file {}\", self.path.to_string_lossy()));\n\n }\n\n\n\n /// Create a new configuration file with the given email.\n\n pub fn init_config(&mut self, email: String) {\n\n if self.path.exists() {\n\n warn!(\"Config file {} already exists, leaving it unchanged.\", self.path.to_string_lossy());\n\n } else {\n\n println!(\"Config file {} created, please edit it to finish configuration.\", self.path.to_string_lossy());\n\n self.settings.config.email = Some(email);\n\n self.dirty = true;\n\n self.save();\n", "file_path": "src/store.rs", "rank": 66, "score": 25995.279834647572 }, { "content": " error!(\n\n \"We already read this feed with the following configuration {:?}\",\n\n already_existing\n\n );\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Store {\n\n fn drop(&mut self) {\n\n if self.dirty {\n\n if self.settings.do_not_save {\n\n error!(\"do_not_save flag is set in config.json. NOT SAVING {} !\", self.path.to_string_lossy())\n\n } else {\n\n info!(\"store has been modified. Saving {} !\", self.path.to_string_lossy());\n\n self.save();\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/store.rs", "rank": 67, "score": 25994.367460766192 }, { "content": "use chrono::{NaiveDateTime};\n\n\n\nuse super::config::*;\n\n\n\nuse super::feed_reader::*;\n\nuse super::settings::*;\n\nuse super::syndication;\n\nuse reqwest::blocking::Client;\n\n\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Feed {\n\n /// Contains url of feed\n\n pub url: String,\n\n /// Contains specific configuration for field\n\n #[serde(skip_serializing_if = \"Config::is_none\", default = \"Config::new\")]\n\n pub config: Config,\n\n /// Last time the feed was read\n\n #[serde(default = \"Feed::at_epoch\")]\n\n pub last_updated: NaiveDateTime,\n\n /// Last message stored in IMAP, allows to correctly process feeds even when no date is provided\n", "file_path": "src/feed.rs", "rank": 68, "score": 24869.143980550507 }, { "content": " },\n\n last_updated: Feed::at_epoch(),\n\n last_message: None\n\n }\n\n }\n\n\n\n pub fn to_string(&self, config: &Config) -> String {\n\n return format!(\"{} {}\", self.url, self.config.clone().to_string(config));\n\n }\n\n\n\n pub async fn read(&self, index:usize, count:&usize, client:&Client, settings: &Settings) -> Feed {\n\n info!(\"Reading feed {}/{} from {}\", index+1, count, self.url);\n\n match client.get(&self.url).send() {\n\n Ok(response) => match response.text() {\n\n Ok(text) => match text.parse::<syndication::Feed>() {\n\n Ok(parsed) => {\n\n return match parsed {\n\n syndication::Feed::Atom(atom_feed) => {\n\n AtomReader {}.read(self, &atom_feed, settings)\n\n }\n", "file_path": "src/feed.rs", "rank": 69, "score": 24865.610817900622 }, { "content": " /// which, mind you, is totally possible according to RSS specification\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub last_message: Option<String>\n\n}\n\n\n\nimpl Feed {\n\n /// Creates a new naivedatetime with a default value (which is, to my mind) a sensible default for computers\n\n pub fn at_epoch() -> NaiveDateTime {\n\n NaiveDateTime::from_timestamp(0, 0)\n\n }\n\n\n\n // Convert the parameters vec into a valid feed (if possible)\n\n pub fn from_vec(parameters: Vec<String>) -> Feed {\n\n let mut consumed = parameters;\n\n let url: String = consumed\n\n .pop()\n\n .expect(\"You must at least define an url to add.\");\n\n let mut email: Option<String> = None;\n\n let mut folder: Option<String> = None;\n\n // If there is a second parameter, it can be either email or folder\n", "file_path": "src/feed.rs", "rank": 70, "score": 24861.67577030938 }, { "content": " url,\n\n config: Config {\n\n email,\n\n folder,\n\n from: None,\n\n inline_image_as_data: false,\n\n },\n\n last_updated: Feed::at_epoch(),\n\n last_message: None\n\n }\n\n }\n\n\n\n pub fn from_all(url:Option<String>, email:Option<String>, destination:Option<String>, inline:bool) -> Feed {\n\n Feed {\n\n url: url.unwrap(),\n\n config: Config {\n\n email,\n\n folder: destination,\n\n from: None,\n\n inline_image_as_data: inline,\n", "file_path": "src/feed.rs", "rank": 71, "score": 24860.634193418355 }, { "content": " syndication::Feed::RSS(rss_feed) => {\n\n RssReader {}.read(self, &rss_feed, settings)\n\n }\n\n }\n\n }\n\n Err(e) => error!(\"Content ar {} is neither Atom, nor RSS {}.\\nTODO check real content type to help user.\", &self.url, e),\n\n },\n\n Err(e) => error!(\"There is no text at {} due to error {}\", &self.url, e),\n\n },\n\n Err(e) => error!(\"Unable to get {} due to {}.\\nTODO Add better http response analysis !\", &self.url, e),\n\n }\n\n self.clone()\n\n }\n\n\n\n}\n", "file_path": "src/feed.rs", "rank": 72, "score": 24859.40912234412 }, { "content": " if !consumed.is_empty() {\n\n let second = consumed.pop().unwrap();\n\n // If second parameters contains an @, I suppose it is an email address\n\n if second.contains('@') {\n\n debug!(\n\n \"Second add parameter {} is considered an email address\",\n\n second\n\n );\n\n email = Some(second)\n\n } else {\n\n warn!(\"Second add parameter {} is NOT considered an email address, but a folder. NO MORE ARGUMENTS WILL BE PROCESSED\", second);\n\n folder = Some(second)\n\n }\n\n }\n\n // If there is a third parameter, it is the folder.\n\n // But if folder was already defined, there is an error !\n\n if !consumed.is_empty() && folder == None {\n\n folder = Some(consumed.pop().unwrap());\n\n }\n\n Feed {\n", "file_path": "src/feed.rs", "rank": 73, "score": 24855.266079876907 }, { "content": "/// The reader trait allow reading data from a web source.\n\n/// It is supposed to be derived for Rss and Atom, but it's only a try currently ...\n\npub trait Reader<EntryType, FeedType> {\n\n fn process_message(&self, feed:&Feed, settings:&Settings, message:&Message)->Message {\n\n Message {\n\n authors: message.authors.clone(),\n\n content: Message::get_processed_content(&message.content, feed, settings).unwrap(),\n\n id: message.id.clone(),\n\n last_date: message.last_date,\n\n links: message.links.clone(),\n\n title: message.title.clone(),\n\n }\n\n }\n\n\n\n /// Find in the given input feed the new messages\n\n /// A message is considered new if it has a date which is nearer than feed last processed date\n\n /// or (because RSS and Atom feeds may not have dates) if its id is not yet the id of the last\n\n /// processed feed\n\n fn find_new_messages(&self, feed:&Feed, sorted_messages:&[&Message])->(usize, usize, bool) {\n\n let head:usize = 0;\n\n let mut tail:usize = 0;\n\n let mut found = false;\n", "file_path": "src/feed_reader.rs", "rank": 74, "score": 24787.0098831472 }, { "content": "use chrono::{DateTime, Utc, FixedOffset, NaiveDateTime};\n\n\n\nuse super::feed_errors::*;\n\nuse super::message::*;\n\nuse super::settings::*;\n\nuse atom_syndication::Entry as AtomEntry;\n\nuse atom_syndication::Feed as AtomFeed;\n\nuse rss::Channel as RssChannel;\n\nuse rss::Item as RssItem;\n\nuse url::Url;\n\n\n\nuse super::feed::*;\n\nuse super::feed_utils::*;\n\n\n\n/// The reader trait allow reading data from a web source.\n\n/// It is supposed to be derived for Rss and Atom, but it's only a try currently ...\n", "file_path": "src/feed_reader.rs", "rank": 75, "score": 23456.56124002471 }, { "content": " }\n\n\n\n fn extract_messages(&self, source:&AtomFeed)->Vec<Result<Message, UnparseableFeed>> {\n\n source.entries()\n\n .iter()\n\n .map(|e| self.extract(e, source))\n\n .collect()\n\n }\n\n}\n\n\n\npub struct RssReader {}\n\n\n\nimpl RssReader {\n\n fn extract_authors_from_rss(entry: &RssItem, feed: &RssChannel) -> Vec<String> {\n\n let domain = RssReader::find_rss_domain(feed);\n\n // This is where we also transform author names into urls in order\n\n // to have valid email addresses everywhere\n\n let message_authors: Vec<String>;\n\n match entry.author() {\n\n Some(l) => message_authors = vec![l.to_owned()],\n", "file_path": "src/feed_reader.rs", "rank": 76, "score": 23455.375551695986 }, { "content": " feed_date, feed.last_updated\n\n );\n\n let extracted:Vec<Result<Message, UnparseableFeed>> = self.extract_messages(source);\n\n\n\n let date_errors = extracted.iter()\n\n .filter(|e| e.is_err())\n\n .fold(0, |acc, _| acc + 1);\n\n if date_errors==0 {\n\n self.write_new_messages(feed, settings, extracted)\n\n } else {\n\n warn!(\"There were problems getting content from feed {}. It may not be complete ...\n\n I strongly suggest you enter an issue on GitHub by following this link\n\n https://github.com/Riduidel/rrss2imap/issues/new?title=Incorrect%20feed&body=Feed%20at%20url%20{}%20doesn't%20seems%20to%20be%20parseable\", \n\n feed.url, feed.url);\n\n feed.clone()\n\n }\n\n }\n\n}\n\n\n\npub struct AtomReader {}\n", "file_path": "src/feed_reader.rs", "rank": 77, "score": 23454.994275968158 }, { "content": " _ => message_authors = vec![feed.title().to_owned()],\n\n }\n\n sanitize_message_authors(message_authors, domain)\n\n }\n\n fn find_rss_domain(feed: &RssChannel) -> String {\n\n return Some(feed.link())\n\n .map(|href| Url::parse(href).unwrap())\n\n // then get host\n\n .map(|url| url.host_str().unwrap().to_string())\n\n // and return value\n\n .unwrap_or(\"todo.find.domain.atom\".to_string());\n\n }\n\n\n\n fn try_hard_to_parse(date:String) -> Result<DateTime<FixedOffset>, UnparseableFeed> {\n\n let parsed = rfc822_sanitizer::parse_from_rfc2822_with_fallback(&date);\n\n if parsed.is_ok() {\n\n Ok(parsed?)\n\n } else {\n\n let retry = DateTime::parse_from_rfc3339(&date);\n\n if retry.is_ok() {\n", "file_path": "src/feed_reader.rs", "rank": 78, "score": 23452.296625864994 }, { "content": " }\n\n (head, tail, found)\n\n }\n\n\n\n fn write_new_messages(&self, feed:&Feed, settings:&Settings, extracted:Vec<Result<Message, UnparseableFeed>>)->Feed {\n\n let sorted_messages:Vec<&Message> = extracted.iter()\n\n .filter(|e| e.is_ok())\n\n .map(|e| e.as_ref().unwrap())\n\n .collect::<Vec<&Message>>();\n\n let (head, tail, found) = self.find_new_messages(feed, &sorted_messages);\n\n let filtered_messages:&[&Message] = if found {\n\n &sorted_messages[head..tail]\n\n } else {\n\n sorted_messages.as_slice()\n\n };\n\n\n\n // And write the messages into IMAP and the feed into JSON\n\n let written_messages:Vec<Message> = filtered_messages.iter()\n\n .map(|message| self.process_message(feed, settings, message))\n\n .inspect(|e| if !settings.do_not_save { e.write_to_imap(feed, settings) } )\n", "file_path": "src/feed_reader.rs", "rank": 79, "score": 23450.869566287565 }, { "content": " .iter()\n\n .filter(|link| link.rel() == \"self\" || link.rel() == \"alternate\").find(|link| !link.href().is_empty())\n\n // Get the link\n\n .map(|link| link.href())\n\n // Transform it into an url\n\n .map(|href| Url::parse(href).unwrap())\n\n // then get host\n\n .map(|url| url.host_str().unwrap().to_string())\n\n // and return value\n\n .unwrap_or(\"todo.find.domain.rss\".to_string());\n\n }\n\n}\n\n\n\nimpl Reader<AtomEntry, AtomFeed> for AtomReader {\n\n fn extract(&self, entry: &AtomEntry, source: &AtomFeed) -> Result<Message, UnparseableFeed> {\n\n let authors = AtomReader::extract_authors_from_atom(entry, source);\n\n let last_date = entry\n\n .updated()\n\n .naive_utc();\n\n let content = match entry.content() {\n", "file_path": "src/feed_reader.rs", "rank": 80, "score": 23450.241401253483 }, { "content": "\n\nimpl AtomReader {\n\n fn extract_authors_from_atom(entry: &AtomEntry, feed: &AtomFeed) -> Vec<String> {\n\n let domain = AtomReader::find_atom_domain(feed);\n\n // This is where we also transform author names into urls in order\n\n // to have valid email addresses everywhere\n\n let mut message_authors: Vec<String> = entry\n\n .authors()\n\n .iter()\n\n .map(|a| a.name().to_owned())\n\n .collect();\n\n if message_authors.is_empty() {\n\n message_authors = vec![feed.title().to_owned().to_string()]\n\n }\n\n sanitize_message_authors(message_authors, domain)\n\n }\n\n \n\n fn find_atom_domain(feed: &AtomFeed) -> String {\n\n return feed\n\n .links()\n", "file_path": "src/feed_reader.rs", "rank": 81, "score": 23449.79885461029 }, { "content": " Some(message) => {\n\n returned.last_updated = message.last_date;\n\n returned.last_message = Some(message.id.clone());\n\n },\n\n _ => {}\n\n }\n\n }\n\n returned\n\n }\n\n\n\n fn extract(&self, entry:&EntryType, source:&FeedType) -> Result<Message, UnparseableFeed>;\n\n fn read_feed_date(&self, source:&FeedType)->NaiveDateTime;\n\n\n\n fn extract_messages(&self, source:&FeedType)->Vec<Result<Message,UnparseableFeed>>;\n\n \n\n fn read(&self, feed:&Feed, source:&FeedType, settings:&Settings)->Feed {\n\n debug!(\"reading feed {}\", &feed.url);\n\n let feed_date = self.read_feed_date(source);\n\n info!(\n\n \"Feed date is {} while previous read date is {}\",\n", "file_path": "src/feed_reader.rs", "rank": 82, "score": 23449.595723321756 }, { "content": " .collect();\n\n let mut last_message:Option<&Message> = written_messages.iter()\n\n // ok, there is a small problem here: if at least two elements have the same value - which is the case when feed\n\n // elements have no dates - the LAST one is used (which is **not** what we want)\n\n // see https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.max_by_key\n\n .max_by_key(|e| e.last_date.timestamp());\n\n // So, to overcome last problem, if first filtered message has same date than last_message, we replace last by first\n\n // As RSS feeds are supposed to put the latest emitted message in first position\n\n match last_message {\n\n Some(last) => if filtered_messages.len()>1 && filtered_messages[0].last_date==last.last_date {\n\n last_message = Some(filtered_messages[0].clone());\n\n },\n\n _ => {}\n\n }\n\n \n\n let mut returned = feed.clone();\n\n if settings.do_not_save {\n\n warn!(\"do_not_save is set. As a consequence, feed won't be updated\");\n\n } else {\n\n match last_message {\n", "file_path": "src/feed_reader.rs", "rank": 83, "score": 23449.09996954877 }, { "content": " let trimmed:String = trim_to_chars(email, vec![\"|\", \":\", \"-\", \"<\", \">\"]);\n\n let lowercased = trimmed.to_lowercase();\n\n let tuple = (trimmed,\n\n BAD_CHARACTER_REMOVER.replace_all(&lowercased, \"_\")\n\n );\n\n return format!(\"{} <{}@{}>\", tuple.0, tuple.1, domain);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n mod email_tests {\n\n use super::super::*;\n\n\n\n #[test]\n\n fn can_create_email_from_flo() {\n\n assert_eq!(\"F(lo) <[email protected]>\", sanitize_email(&\"F(lo)\".to_string(), &\"linuxfr.org\".to_string()));\n\n }\n\n\n\n #[test]\n", "file_path": "src/feed_utils.rs", "rank": 84, "score": 23447.661418824795 }, { "content": " id,\n\n last_date: last_date?.naive_utc(),\n\n links,\n\n title: entry.title().unwrap_or(\"\").to_owned(),\n\n };\n\n Ok(message)\n\n }\n\n\n\n fn extract_messages(&self, source:&RssChannel)->Vec<Result<Message, UnparseableFeed>> {\n\n source.items()\n\n .iter()\n\n .map(|e| self.extract(e, source))\n\n .collect()\n\n }\n\n\n\n fn read_feed_date(&self, source:&RssChannel)->NaiveDateTime {\n\n let n = Utc::now();\n\n let feed_date_text = match source.pub_date() {\n\n Some(p) => p.to_owned(),\n\n None => match source.last_build_date() {\n", "file_path": "src/feed_reader.rs", "rank": 85, "score": 23447.251622838143 }, { "content": " // First step is to fix HTML, so load it using html5ever\n\n // (because there is no better html parser than a real browser one)\n\n // TODO implement image inlining\n\n .to_owned();\n\n let links = match entry.link() {\n\n Some(l) => vec![l.to_owned()],\n\n _ => vec![],\n\n };\n\n let id = if links.is_empty() {\n\n match entry.guid() {\n\n Some(g) => g.value().to_owned(),\n\n _ => \"no id\".to_owned(),\n\n }\n\n } else {\n\n links[0].clone()\n\n };\n\n let last_date = RssReader::extract_date_from_rss(entry, source);\n\n let message = Message {\n\n authors,\n\n content,\n", "file_path": "src/feed_reader.rs", "rank": 86, "score": 23444.796734758344 }, { "content": " );\n\n if feed.pub_date().is_some() {\n\n let pub_date = feed.pub_date().unwrap().to_owned();\n\n RssReader::try_hard_to_parse(pub_date)\n\n } else if feed.last_build_date().is_some() {\n\n let last_pub_date = feed.last_build_date().unwrap().to_owned();\n\n RssReader::try_hard_to_parse(last_pub_date)\n\n } else {\n\n Ok(DateTime::<FixedOffset>::from_utc(Feed::at_epoch(), FixedOffset::east(0)))\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Reader<RssItem, RssChannel> for RssReader {\n\n fn extract(&self, entry: &RssItem, source: &RssChannel) -> Result<Message, UnparseableFeed> {\n\n let authors = RssReader::extract_authors_from_rss(entry, source);\n\n let content = entry\n\n .content()\n\n .unwrap_or_else(|| entry.description().unwrap_or(\"\"))\n", "file_path": "src/feed_reader.rs", "rank": 87, "score": 23444.229745153185 }, { "content": " Some(content) => content.value().unwrap(),\n\n None => match entry.summary() {\n\n Some(text)=> text.as_str(),\n\n None=>\"\"\n\n }\n\n }\n\n .to_owned();\n\n let message = Message {\n\n authors,\n\n content,\n\n id: entry.id().to_owned(),\n\n last_date,\n\n links: entry.links().iter().map(|l| l.href().to_owned()).collect(),\n\n title: entry.title().as_str().to_string()\n\n };\n\n Ok(message)\n\n }\n\n\n\n fn read_feed_date(&self, source:&AtomFeed)->NaiveDateTime {\n\n source.updated().naive_utc()\n", "file_path": "src/feed_reader.rs", "rank": 88, "score": 23443.901096689813 }, { "content": " // Now do the filter\n\n // This part is not so easy.\n\n // we will first iterate over the various items and for each, check that\n\n // 1 - the message id is not the last read message one\n\n // 2 - if messages have dates, the message date is more recent than the last one\n\n for (position, message) in sorted_messages.iter().enumerate() {\n\n if !found {\n\n match &feed.last_message {\n\n Some(id) => if id==&message.id {\n\n tail = position; \n\n found = true;\n\n break;\n\n },\n\n None => {}\n\n };\n\n if message.last_date<feed.last_updated {\n\n tail = position; \n\n found = true;\n\n }\n\n }\n", "file_path": "src/feed_reader.rs", "rank": 89, "score": 23442.62228071596 }, { "content": "use regex::Regex;\n\n\n", "file_path": "src/feed_utils.rs", "rank": 90, "score": 23441.813682995904 }, { "content": " Ok(retry?)\n\n } else {\n\n Err(UnparseableFeed::DateIsNeitherRFC2822NorRFC3339 {value:date})\n\n }\n\n }\n\n }\n\n \n\n fn extract_date_from_rss(entry: &RssItem, feed: &RssChannel) -> Result<DateTime<FixedOffset>, UnparseableFeed> {\n\n if entry.pub_date().is_some() {\n\n let mut pub_date = entry.pub_date().unwrap().to_owned();\n\n pub_date = pub_date.replace(\"UTC\", \"UT\");\n\n RssReader::try_hard_to_parse(pub_date)\n\n } else if entry.dublin_core_ext().is_some()\n\n && !entry.dublin_core_ext().unwrap().dates().is_empty()\n\n {\n\n let pub_date = &entry.dublin_core_ext().unwrap().dates()[0];\n\n Ok(DateTime::parse_from_rfc3339(pub_date)?)\n\n } else {\n\n debug!(\"feed item {:?} date can't be parsed, as it doesn't have neither pub_date nor dc:pub_date. We will replace it with feed date if possible\",\n\n &entry.link()\n", "file_path": "src/feed_reader.rs", "rank": 91, "score": 23441.681535849286 }, { "content": " fn can_create_email_from_blog_a_part() {\n\n assert_eq!(\"Blog à part <[email protected]>\", sanitize_email(&\"Blog à part\".to_string(), &\"alias.erdorin.org\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn can_create_email_from_xkcd() {\n\n assert_eq!(\"xkcd.com <[email protected]>\", sanitize_email(&\"xkcd.com\".to_string(), &\"xkcd.com\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn can_create_email_from_sex_at_liberation() {\n\n assert_eq!(\"sexes.blogs.liberation.fr <[email protected]>\", \n\n sanitize_email(\n\n &\"sexes.blogs.liberation.fr - Derniers articles\".to_string(), \n\n &\"sexes.blogs.liberation.fr\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn can_create_email_from_real_address_at_sex_at_liberation() {\n\n assert_eq!(\"Agnès Giard <[email protected]>\", \n\n sanitize_email(\n\n &\"[email protected] (Agnès Giard)\".to_string(), \n\n &\"sexes.blogs.liberation.fr\".to_string()));\n\n }\n\n }\n\n}", "file_path": "src/feed_utils.rs", "rank": 92, "score": 23441.33073213525 }, { "content": " Some(l) => l.to_owned(),\n\n None => n.to_rfc2822(),\n\n },\n\n };\n\n DateTime::parse_from_rfc2822(&feed_date_text)\n\n .unwrap()\n\n .naive_utc()\n\n \n\n }\n\n}\n", "file_path": "src/feed_reader.rs", "rank": 93, "score": 23437.272966777626 }, { "content": "#### How to use ?\n\n\n\nThe simplest way to understand what to do is just to run `rrss2imap --help`\n\n\n\nIt should output something like\n\n\n\n FLAGS:\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n \n\n SUBCOMMANDS:\n\n add Adds a new feed given its url\n\n delete Delete the given feed\n\n email Changes email address used in feed file to be the given one\n\n export Export subscriptions as opml file\n\n help Prints this message or the help of the given subcommand(s)\n\n import import the given opml file into subscriptions\n\n list List all feeds configured\n\n new Creates a new feedfile with the given email address\n\n reset Reset feedfile (in other words, remove everything)\n\n run Run feed parsing and transformation\n\n\n\nWhich give you a glimpse of what will happen\n\n\n\nEach of these commands also provide some help, when run with the same `--help` flag.\n\n\n\nThe important operations to memorize are obviously\n\n\n\n#### `rrss2imap new`\n\n\n\nCreates a new `config.json` file. At init time, the config file will only contains `settings` element \n\nwith the email address set. You **have** to set \n\n\n\n* the used imap server\n\n** with user login and password\n\n** and security settings (secure should contain `{\"Yes\": secure port}` for imap/s\n\nor `{\"No\": unsecure port}` for simple imap)\n\n* the default config\n\n** folder will be the full path to an imap folder where entries will fall in\n\n** email will be the recipient email address (which may not be yours for easier filtering)\n\n** Base64 image inlining\n\n* feeds is the list of all rss feeds that can be added\n\n\n\n#### `rrss2imap add`\n\n\n\nThis command will add a new feed to your config. You can directly set here the email recipient as well as the folder\n\n(but not the base64 image inlining parameter)\n\n\n", "file_path": "README.md", "rank": 94, "score": 16723.754931253425 }, { "content": "#### `rrss2imap run`\n\n\n\nTHis is the main command. It will\n\n\n\n1. get all rss/atom feed contents\n\n2. List all new entries in these feeds\n\n3. Transform these entries into valid email messages\n\n4. Push these mail messages directly on IMAP server\n\n\n\n#### `rrss2imap list`\n\n\n\nDisplays a list of the rss feeds. Here is an example\n\n\n\n```\n\n0 : http://tontof.net/?rss (to: Nicolas Delsaux <[email protected]> (default)) RSS/rrss2imap (default)\n\n1 : https://www.brothers-brick.com/feed/ (to: Nicolas Delsaux <[email protected]> (default)) RSS/rrss2imap (default)\n\n2 : https://nicolas-delsaux.hd.free.fr/rss-bridge/?action=display&bridge=LesJoiesDuCode&format=AtomFormat (to: Nicolas Delsaux <[email protected]> (default)) RSS/rrss2imap (default)\n\n```\n\n\n\nPlease notice that each entry has an associated number, which is the one to enter when running `rrss2imap delete <NUMBER>`\n\n\n\n#### `config.json` format\n\n\n\nA typical feedfile will look like this\n\n\n\n```json\n\n {\n\n \"settings\": {\n\n \"email\": {\n\n \"server\": \"the imap server of your mail provider\",\n\n \"user\": \"your imap user name\",\n\n \"password\": \"your imap user password\",\n\n \"secure\": {\n\n \"Yes\": 993 // Set to \"Yes\": port for imaps or \"No\": port for unsecure imap\n\n }\n\n },\n\n // This config is to be used for all feeds\n\n \"config\": {\n\n // This is the email address written in each mail sent. It can be different from the email user\n\n \"email\": \"Nicolas Delsaux <[email protected]>\",\n\n // This is the imap folder in which mails will be written\n\n \"folder\": \"RSS/rrss2imap\"\n\n // Setting this to true will force rrss2imap to transform all images into\n\n // base64. This prevents images from beind downloaded (and is really cool when reading feeds from a smartphone)\n\n // But largely increase each mail size (which can be quite bothering)\n\n \"inline_image_as_data\": true\n\n }\n\n },\n\n \"feeds\": [\n\n {\n\n \"url\": \"http://tontof.net/?rss\",\n\n // This last updated is updated for each entry and should be enough to have rss items correctly read\n\n \"last_updated\": \"2019-05-04T16:53:15\",\n\n \"config\": {\n\n // each config element can be overwritten at the feed level\n\n }\n\n },\n\n```\n", "file_path": "README.md", "rank": 95, "score": 16710.91504915065 }, { "content": "### As a developer\n\n* clone this repository\n\n* run `cargo run`\n\n\n\n#### Prerequisites\n\n\n\nYou need a complete rust build chain\n\n\n\nTo perform a release, you'll also need\n\n\n\n* [cargo release](https://github.com/sunng87/cargo-release)\n\n* [git journal](https://github.com/saschagrunert/git-journal)\n\n\n\n##### Releasing\n\n\n\n1. Run `cargo release`. This will build a version of the code, push it onto crates/io and tag the repository.\n\nThanks to GitHub Actions (and more specifically the `on_tag.yml` one), once the tag is pushed to GitHub, a release is created.\n\n1. Publish the release. This will trigger the `on_release_created.yml` which will build executables for the target platforms and attach them to the release.\n\n\n\nAnd release is done! It was easy, no?\n\n\n\n#### Installing\n\n\n\n1. Dowload latest version from [Github releases page](https://github.com/Riduidel/rrss2imap/releases)\n\n1. Run `rrss2imap new` which will create the `config.json`\n\n1. Fill the missing parts (typically include email configuration)\n\n1. Run with `rrss2imap run`\n\n\n\n### Running the tests\n\n\n\nAutomated tests can be run with `cargo test`\n\n\n\n## Built With\n\n\n\nTake a look at Cargo dependencies\n\n\n\n## Contributing\n\n\n\nPlease read [CONTRIBUTING.md](https://gist.github.com/PurpleBooth/b24679402957c63ec426) for details on our code of conduct, and the process for submitting pull requests to us.\n\n\n\n## Versioning\n\n\n\nWe use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://github.com/your/project/tags). \n\n\n\n## Authors\n\n\n\n* **Nicolas Delsaux** - *Initial work* - [Riduidel](https://github.com/Riduidel)\n\n\n\nSee also the list of [contributors](https://github.com/Riduidel/rrss2imap/contributors) who participated in this project.\n\n\n\n## License\n\n\n\nThis project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details\n\n\n\n## Acknowledgments\n\n\n\n* [Rui Carno](https://github.com/rcarmo) for Python implementation of [rss2imap](https://github.com/rcarmo/rss2imap)\n\n* [Aaron Swartz](https://en.wikipedia.org/wiki/Aaron_Swartz) for [RSS](https://en.wikipedia.org/wiki/RSS) (and [rss2email](https://github.com/rss2email/rss2email))\n\n\n", "file_path": "README.md", "rank": 96, "score": 16695.204136186832 }, { "content": "# rrss2imap\n\n\n\n[![Built with cargo-make](https://sagiegurari.github.io/cargo-make/assets/badges/cargo-make.svg)](https://sagiegurari.github.io/cargo-make)\n\n[![Build Status](https://travis-ci.org/Riduidel/rrss2imap.svg?branch=master)](https://travis-ci.org/Riduidel/rrss2imap)\n\n\n\nrrss2imap is a Rust reimplementation of the classical Python script [rss2imap](https://github.com/rcarmo/rss2imap)\n\n\n\nGoals of this project include\n\n\n\n* Having a reasonably performant implementation of rss2imap\n\n* Learn Rust\n\n* Explore parallel mechanism\n\n* Maybe provide some kind of image embedding (DONE) with cache\n\n\n\n## Getting Started\n\n\n\n### Download rrss2imap\n\n\n\nrrss2imap can be downloaded from [**releases page**](https://github.com/Riduidel/rrss2imap/releases). \n\nIf there is no release for your platform, you can fill an issue ... or if you know Travis, you can even add your platform to `.travis.yml`.\n\n\n\n### As a user\n\n\n\n<!-- cargo-sync-readme start -->\n\n\n\nApplication transforming rss feeds into email by directly pushing the entries into IMP folders.\n\nThis application is an adaption of the rss2imap Python script to Rust.\n\n\n", "file_path": "README.md", "rank": 97, "score": 16691.36971137236 }, { "content": " \n\n\n\n<!-- cargo-sync-readme end -->\n\n\n", "file_path": "README.md", "rank": 98, "score": 16683.39444505998 }, { "content": "use super::settings::*;\n\n\n\nuse kuchiki::*;\n\n\n\n\n\n\n\n\n", "file_path": "src/image_to_data.rs", "rank": 99, "score": 9.490655000371437 } ]
Rust
src/loading/util.rs
zmbush/budgetron
3403a020abbea635e156d2245226120ee87843c6
use { crate::loading::{ alliant, generic::{Genericize, Transaction}, logix, mint, }, budgetronlib::error::{BResult, BudgetError}, csv::Reader, log::info, serde::de::DeserializeOwned, std::{ cmp::min, fmt::Display, fs::File, io::{self, Read, Seek, Stdin, StdinLock}, path::Path, }, }; fn from_reader<TransactionType, R>(file: &mut R) -> BResult<Vec<Transaction>> where TransactionType: Genericize + DeserializeOwned, R: io::Read, { let mut transactions = Vec::new(); for record in Reader::from_reader(file).deserialize() { let record: TransactionType = record?; transactions.push(record.genericize()?); } Ok(transactions) } struct StdinSource<'a> { buf: Vec<u8>, loc: usize, stdin: StdinLock<'a>, } impl<'a> StdinSource<'a> { fn new(stdin: &'a Stdin) -> StdinSource<'a> { StdinSource { buf: Vec::new(), loc: 0, stdin: stdin.lock(), } } } enum Source<'a> { File(File), Stdin(StdinSource<'a>), } impl<'a> Seek for Source<'a> { fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> { match *self { Source::File(ref mut f) => f.seek(pos), Source::Stdin(ref mut source) => match pos { io::SeekFrom::Start(loc) => { source.loc = loc as usize; Ok(source.loc as u64) } io::SeekFrom::Current(diff) => { if diff >= 0 { source.loc += diff as usize; } else { source.loc -= (-diff) as usize; } if source.loc >= source.buf.len() { Err(io::Error::new( io::ErrorKind::UnexpectedEof, "Tried to seek past internal buffer", )) } else { Ok(source.loc as u64) } } io::SeekFrom::End(_) => Err(io::Error::new( io::ErrorKind::InvalidInput, "Stdin has no end", )), }, } } } impl<'a> Read for Source<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match *self { Source::File(ref mut f) => f.read(buf), Source::Stdin(ref mut source) => { if source.loc >= source.buf.len() { let ret = source.stdin.read(buf); if let Ok(size) = ret { source.buf.extend_from_slice(&buf[..size]); source.loc += size; Ok(size) } else { ret } } else { let len = buf.len(); let start = source.loc; let end = min(start + len, source.buf.len()); let readlen = end - start; buf[..readlen].copy_from_slice(&source.buf[start..end]); source.loc = end; Ok(readlen) } } } } } fn from_file_inferred<P: AsRef<Path> + Copy>(filename: P) -> BResult<Vec<Transaction>> { let stdin = io::stdin(); let mut reader = match filename.as_ref().to_str() { Some("-") => Source::Stdin(StdinSource::new(&stdin)), _ => Source::File(File::open(filename)?), }; let mut errors = Vec::new(); macro_rules! parse_exports { ($($type:path),*) => ($(match from_reader::<$type, _>(&mut reader) { Ok(result) => return Ok(result), Err(e) => { errors.push(e); reader.seek(io::SeekFrom::Start(0))?; } })*) } parse_exports!( Transaction, mint::MintExport, logix::LogixExport, alliant::AlliantExport ); Err(BudgetError::Multi(errors)) } pub fn load_from_files<P: AsRef<Path> + Display, Files: Iterator<Item = P>>( filenames: Files, ) -> BResult<Vec<Transaction>> { let mut transactions = Vec::new(); for filename in filenames { info!("Opening file: {}", filename); transactions.append(&mut from_file_inferred(&filename)?); } transactions.sort_by(|a, b| a.date.cmp(&b.date)); Ok(transactions) }
use { crate::loading::{ alliant, generic::{Genericize, Transaction}, logix, mint, }, budgetronlib::error::{BResu
read(buf), Source::Stdin(ref mut source) => { if source.loc >= source.buf.len() { let ret = source.stdin.read(buf); if let Ok(size) = ret { source.buf.extend_from_slice(&buf[..size]); source.loc += size; Ok(size) } else { ret } } else { let len = buf.len(); let start = source.loc; let end = min(start + len, source.buf.len()); let readlen = end - start; buf[..readlen].copy_from_slice(&source.buf[start..end]); source.loc = end; Ok(readlen) } } } } } fn from_file_inferred<P: AsRef<Path> + Copy>(filename: P) -> BResult<Vec<Transaction>> { let stdin = io::stdin(); let mut reader = match filename.as_ref().to_str() { Some("-") => Source::Stdin(StdinSource::new(&stdin)), _ => Source::File(File::open(filename)?), }; let mut errors = Vec::new(); macro_rules! parse_exports { ($($type:path),*) => ($(match from_reader::<$type, _>(&mut reader) { Ok(result) => return Ok(result), Err(e) => { errors.push(e); reader.seek(io::SeekFrom::Start(0))?; } })*) } parse_exports!( Transaction, mint::MintExport, logix::LogixExport, alliant::AlliantExport ); Err(BudgetError::Multi(errors)) } pub fn load_from_files<P: AsRef<Path> + Display, Files: Iterator<Item = P>>( filenames: Files, ) -> BResult<Vec<Transaction>> { let mut transactions = Vec::new(); for filename in filenames { info!("Opening file: {}", filename); transactions.append(&mut from_file_inferred(&filename)?); } transactions.sort_by(|a, b| a.date.cmp(&b.date)); Ok(transactions) }
lt, BudgetError}, csv::Reader, log::info, serde::de::DeserializeOwned, std::{ cmp::min, fmt::Display, fs::File, io::{self, Read, Seek, Stdin, StdinLock}, path::Path, }, }; fn from_reader<TransactionType, R>(file: &mut R) -> BResult<Vec<Transaction>> where TransactionType: Genericize + DeserializeOwned, R: io::Read, { let mut transactions = Vec::new(); for record in Reader::from_reader(file).deserialize() { let record: TransactionType = record?; transactions.push(record.genericize()?); } Ok(transactions) } struct StdinSource<'a> { buf: Vec<u8>, loc: usize, stdin: StdinLock<'a>, } impl<'a> StdinSource<'a> { fn new(stdin: &'a Stdin) -> StdinSource<'a> { StdinSource { buf: Vec::new(), loc: 0, stdin: stdin.lock(), } } } enum Source<'a> { File(File), Stdin(StdinSource<'a>), } impl<'a> Seek for Source<'a> { fn seek(&mut self, pos: io::SeekFrom) -> io::Result<u64> { match *self { Source::File(ref mut f) => f.seek(pos), Source::Stdin(ref mut source) => match pos { io::SeekFrom::Start(loc) => { source.loc = loc as usize; Ok(source.loc as u64) } io::SeekFrom::Current(diff) => { if diff >= 0 { source.loc += diff as usize; } else { source.loc -= (-diff) as usize; } if source.loc >= source.buf.len() { Err(io::Error::new( io::ErrorKind::UnexpectedEof, "Tried to seek past internal buffer", )) } else { Ok(source.loc as u64) } } io::SeekFrom::End(_) => Err(io::Error::new( io::ErrorKind::InvalidInput, "Stdin has no end", )), }, } } } impl<'a> Read for Source<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match *self { Source::File(ref mut f) => f.
random
[ { "content": "CREATE TABLE transactions (\n\n id SERIAL PRIMARY KEY,\n\n date DATE NOT NULL,\n\n person VARCHAR NOT NULL,\n\n description VARCHAR NOT NULL,\n\n original_description VARCHAR,\n\n amount DOUBLE PRECISION NOT NULL,\n\n transaction_type VARCHAR NOT NULL,\n\n category VARCHAR NOT NULL,\n\n original_category VARCHAR NOT NULL,\n\n account_name VARCHAR NOT NULL,\n\n labels VARCHAR NOT NULL,\n\n notes VARCHAR NOT NULL\n\n)\n", "file_path": "migrations/20160827204143_create_transactions/up.sql", "rank": 0, "score": 42406.80306585562 }, { "content": "DROP TABLE transactions\n", "file_path": "migrations/20160827204143_create_transactions/down.sql", "rank": 1, "score": 42406.80306585562 }, { "content": "export default class Transactions extends React.Component<IProps, IState> {\n\n public static defaultProps = {\n\n columns: [\"date\", \"amount\", \"person\", \"description\"],\n\n filter: () => true,\n\n transform: (t: Transaction) => t,\n\n };\n\n\n\n constructor(props: IProps) {\n\n super(props);\n\n\n\n this.state = {\n\n show: {},\n\n };\n\n }\n\n\n\n public toggleDetails(tid: string) {\n\n const { show } = this.state;\n\n show[tid] = !show[tid];\n\n this.setState({ show });\n\n }\n\n\n\n public fetchTransactionDetails(tid: string): Transaction {\n\n const transaction = this.props.transactions.get(tid);\n\n if (transaction) { return transaction; }\n\n\n\n const year = tid.slice(0, 4);\n\n const month = tid.slice(4, 6);\n\n const day = tid.slice(6, 8);\n\n const money = tid.slice(8, 18);\n\n\n\n let type = tid.slice(18, 19);\n\n if (type === \"D\") {\n\n type = \"Debit\";\n\n } else if (type === \"C\") {\n\n type = \"Credit\";\n\n } else if (type === \"T\") {\n\n type = \"Trasnfer\";\n\n }\n\n return new Transaction(\n\n \"Unknown\",\n\n `${money.slice(0, 6)}.${money.slice(6, 10)}`,\n\n \"unknown\",\n\n new Date(`${month}/${day}/${year}`),\n\n \"Unknown\",\n\n \"\",\n\n \"\",\n\n \"\",\n\n \"UNKNOWN\",\n\n \"unknown\",\n\n [\"details not exported\"],\n\n type,\n\n );\n\n }\n\n\n\n public renderHeaders() {\n\n return this.props.columns.map((id) => (\n\n <th key={id}>{Transaction.transactionName(id)}</th>\n\n ));\n\n }\n\n\n\n public renderRowCells(t: Transaction) {\n\n return this.props.columns.map((id) => <td key={id}>{t.render(id)}</td>);\n\n }\n\n\n\n public render() {\n\n const transactions = this.props.transaction_ids\n\n .sort()\n\n .map((tid): [string, Transaction] => [\n\n tid,\n\n this.fetchTransactionDetails(tid),\n\n ])\n\n .filter(this.props.filter)\n\n .map(([tid, t]): [string, Transaction] => [tid, this.props.transform(t)])\n\n .reverse();\n\n return (\n\n <table className={style.table}>\n\n <thead>\n\n <tr>{this.renderHeaders()}</tr>\n\n </thead>\n\n <tbody>\n\n {transactions.map(([tid, transaction]) => [\n\n <tr\n\n key={tid}\n\n onClick={() => this.toggleDetails(tid)}\n\n className={style.normal_row}\n\n >\n\n {this.renderRowCells(transaction)}\n\n </tr>,\n\n <DetailsTable\n\n colSpan={this.props.columns.length}\n\n key={`${tid} details`}\n\n show={this.state.show[tid]}\n\n transaction={transaction}\n\n />,\n\n ])}\n\n </tbody>\n\n </table>\n\n );\n\n }\n", "file_path": "web/src/components/Transactions/index.tsx", "rank": 2, "score": 39356.459835173584 }, { "content": "export function parseTransactions(transactions: {}): Map<string, Transaction> {\n\n const parsedTransactions = new Map();\n\n\n\n Object.entries(transactions).forEach(([uid, transaction]) => {\n\n if (typeof uid === \"string\") {\n\n const t = Transaction.parse(transaction);\n\n if (t) { parsedTransactions.set(uid, t); }\n\n }\n\n });\n\n\n\n return parsedTransactions;\n", "file_path": "web/src/util/data/transactions.tsx", "rank": 3, "score": 38661.22929117894 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::loading::{\n\n generic::{Genericize, Transaction, TransactionType},\n\n money::Money,\n\n },\n\n budgetronlib::{error::BResult, fintime::Date},\n\n serde::Deserialize,\n\n};\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct LogixExport {\n\n account: String,\n", "file_path": "src/loading/logix.rs", "rank": 4, "score": 36450.27616257088 }, { "content": " date: Date,\n\n amount: Money,\n\n balance: Money,\n\n category: String,\n\n description: String,\n\n memo: String,\n\n notes: String,\n\n}\n\n\n\nimpl Genericize for LogixExport {\n\n fn genericize(self) -> BResult<Transaction> {\n\n Ok(Transaction {\n\n uid: None,\n\n date: self.date,\n\n person: \"\".to_owned(),\n\n description: self.description.clone(),\n\n original_description: self.description,\n\n amount: self.amount.abs(),\n\n transaction_type: if self.amount.is_negative() {\n\n TransactionType::Debit\n", "file_path": "src/loading/logix.rs", "rank": 5, "score": 36449.68370098474 }, { "content": " } else {\n\n TransactionType::Credit\n\n },\n\n category: self.category.clone(),\n\n original_category: self.category,\n\n account_name: self.account,\n\n labels: self.memo,\n\n notes: self.notes,\n\n transfer_destination_account: None,\n\n tags: vec![],\n\n })\n\n }\n\n}\n", "file_path": "src/loading/logix.rs", "rank": 6, "score": 36446.62846685636 }, { "content": "#[derive(Debug, Deserialize)]\n\npub enum AlliantTransactionType {\n\n Debit,\n\n Credit,\n\n}\n\n\n\nimpl Into<TransactionType> for AlliantTransactionType {\n\n fn into(self) -> TransactionType {\n\n match self {\n\n AlliantTransactionType::Debit => TransactionType::Debit,\n\n AlliantTransactionType::Credit => TransactionType::Credit,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct AlliantExport {\n\n id: String,\n\n account_id: i64,\n\n reference_id: i64,\n", "file_path": "src/loading/alliant.rs", "rank": 7, "score": 36429.47091304395 }, { "content": " Debit,\n\n Credit,\n\n}\n\n\n\nimpl Into<TransactionType> for MintTransactionType {\n\n fn into(self) -> TransactionType {\n\n match self {\n\n MintTransactionType::Debit => TransactionType::Debit,\n\n MintTransactionType::Credit => TransactionType::Credit,\n\n }\n\n }\n\n}\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"PascalCase\")]\n\npub struct MintExport {\n\n date: Date,\n\n description: String,\n\n #[serde(rename = \"Original Description\")]\n\n original_description: String,\n\n amount: Money,\n", "file_path": "src/loading/mint.rs", "rank": 8, "score": 36429.099287965044 }, { "content": " #[serde(rename = \"Transaction Type\")]\n\n transaction_type: MintTransactionType,\n\n category: String,\n\n #[serde(rename = \"Account Name\")]\n\n account_name: String,\n\n labels: String,\n\n notes: String,\n\n}\n\n\n\nimpl Genericize for MintExport {\n\n fn genericize(self) -> BResult<Transaction> {\n\n Ok(Transaction {\n\n uid: None,\n\n date: self.date,\n\n person: \"\".to_owned(),\n\n description: self.description,\n\n original_description: self.original_description,\n\n amount: self.amount,\n\n transaction_type: self.transaction_type.into(),\n\n category: self.category.clone(),\n", "file_path": "src/loading/mint.rs", "rank": 9, "score": 36428.101569845014 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::loading::{\n\n generic::{Genericize, Transaction, TransactionType},\n\n money::Money,\n\n },\n\n budgetronlib::{error::BResult, fintime::Date},\n\n serde::Deserialize,\n\n};\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum MintTransactionType {\n", "file_path": "src/loading/mint.rs", "rank": 10, "score": 36427.929809231886 }, { "content": " transaction_type: AlliantTransactionType,\n\n amount: Money,\n\n posted_at: Date,\n\n created_at: Date,\n\n nickname: String,\n\n original_name: String,\n\n merchant_id: String,\n\n updated_at: Date,\n\n check_number: Option<i32>,\n\n account_name: String,\n\n tags: String,\n\n}\n\n\n\nimpl Genericize for AlliantExport {\n\n fn genericize(self) -> BResult<Transaction> {\n\n Ok(Transaction {\n\n uid: Some(self.id),\n\n date: self.posted_at,\n\n person: \"\".to_owned(),\n\n description: self.nickname,\n", "file_path": "src/loading/alliant.rs", "rank": 11, "score": 36427.810179506516 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::loading::{\n\n generic::{Genericize, Transaction, TransactionType},\n\n money::Money,\n\n },\n\n budgetronlib::{error::BResult, fintime::Date},\n\n serde::Deserialize,\n\n};\n\n\n\n// \"id\",\"account_id\",\"reference_id\",\"transaction_type\",\"amount\",\"posted_at\",\n\n// \"created_at\",\"nickname\",\"original_name\",\"merchant_id\",\"updated_at\",\n\n// \"check_number\",\"account_name\",\"tags\"\n", "file_path": "src/loading/alliant.rs", "rank": 12, "score": 36425.75464300679 }, { "content": " original_description: self.original_name,\n\n amount: self.amount,\n\n transaction_type: self.transaction_type.into(),\n\n category: self.tags.clone(),\n\n original_category: self.tags,\n\n account_name: self.account_name,\n\n labels: \"\".to_owned(),\n\n notes: \"\".to_owned(),\n\n transfer_destination_account: None,\n\n tags: vec![],\n\n })\n\n }\n\n}\n", "file_path": "src/loading/alliant.rs", "rank": 13, "score": 36424.45784505789 }, { "content": " original_category: self.category,\n\n account_name: self.account_name,\n\n labels: self.labels,\n\n notes: self.notes,\n\n transfer_destination_account: None,\n\n tags: vec![],\n\n })\n\n }\n\n}\n", "file_path": "src/loading/mint.rs", "rank": 14, "score": 36421.40214602096 }, { "content": "ALTER TABLE transactions DROP COLUMN tags;\n", "file_path": "migrations/20170712013213_add_tags_column/down.sql", "rank": 16, "score": 30217.425085403727 }, { "content": "interface IState {\n\n show: { [uid: string]: boolean };\n", "file_path": "web/src/components/Transactions/index.tsx", "rank": 17, "score": 30217.425085403727 }, { "content": "interface IProps {\n\n columns: string[];\n\n transaction_ids: string[];\n\n transactions: Map<string, Transaction>;\n\n\n\n filter: (entry: [string, Transaction]) => boolean;\n\n transform: (t: Transaction) => Transaction;\n", "file_path": "web/src/components/Transactions/index.tsx", "rank": 18, "score": 30217.425085403727 }, { "content": "ALTER TABLE transactions ADD COLUMN tags TEXT[];\n", "file_path": "migrations/20170712013213_add_tags_column/up.sql", "rank": 19, "score": 29405.432287987525 }, { "content": "interface IDetailsTableIProps {\n\n show?: boolean;\n\n colSpan?: number;\n\n transaction: Transaction;\n", "file_path": "web/src/components/Transactions/index.tsx", "rank": 20, "score": 28635.936730161557 }, { "content": "ALTER TABLE transactions DROP COLUMN transfer_destination_account;\n", "file_path": "migrations/20170525020036_add_transfer_destination_account/down.sql", "rank": 21, "score": 27905.687225651076 }, { "content": "ALTER TABLE transactions ADD COLUMN transfer_destination_account VARCHAR;\n", "file_path": "migrations/20170525020036_add_transfer_destination_account/up.sql", "rank": 22, "score": 27211.75597742191 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\npub mod alliant;\n\nmod generic;\n\npub mod logix;\n\npub mod mint;\n\nmod money;\n\nmod util;\n\n\n\npub use self::{\n\n generic::{Transaction, TransactionType},\n\n money::Money,\n\n util::load_from_files,\n\n};\n", "file_path": "src/loading/mod.rs", "rank": 24, "score": 9.94637326964788 }, { "content": "extern crate dotenv;\n\nextern crate itertools;\n\n\n\nuse diesel::pg::PgConnection;\n\nuse diesel::prelude::*;\n\nuse dotenv::dotenv;\n\nuse itertools::Itertools;\n\nuse std::env;\n\n\n\npub struct Transactions {\n\n pub db: PgConnection,\n\n}\n\n\n\nimpl Transactions {\n\n pub fn new_from_env() -> Transactions {\n\n let _ = dotenv();\n\n\n\n Transactions::new(&env::var(\"DATABASE_URL\").expect(\"DATABASE_URL must be set\"))\n\n }\n\n\n", "file_path": "data_store/src/lib.rs", "rank": 26, "score": 7.357579478609384 }, { "content": " fn collate(&self, mut transactions: Vec<Transaction>) -> BResult<Vec<Transaction>> {\n\n for p in &self.processor {\n\n transactions = p.collate(transactions)?;\n\n }\n\n Ok(transactions)\n\n }\n\n}\n\n\n\nimpl Collate for Processor {\n\n fn collate(&self, mut transactions: Vec<Transaction>) -> BResult<Vec<Transaction>> {\n\n use self::Processor::*;\n\n match *self {\n\n Categorize { ref categories } => {\n\n for transaction in &mut transactions {\n\n let cat = &transaction.original_category;\n\n for (key, values) in categories {\n\n if key == cat || (!values.is_empty() && values.contains(&cat.to_owned())) {\n\n transaction.category = key.clone();\n\n }\n\n }\n", "file_path": "src/processing/config.rs", "rank": 27, "score": 6.02525071727277 }, { "content": " pub fn new(database_url: &str) -> Transactions {\n\n Transactions {\n\n db: PgConnection::establish(database_url)\n\n .expect(&format!(\"Error connecting to {}\", database_url)),\n\n }\n\n }\n\n\n\n pub fn set_transactions(&self, transactions: Vec<models::NewTransaction>) {\n\n use schema::transactions;\n\n\n\n diesel::delete(transactions::table)\n\n .execute(&self.db)\n\n .expect(\"Unable to delete the old transactions table\");\n\n for group in &transactions.into_iter().chunks(1000) {\n\n let group = group.collect::<Vec<models::NewTransaction>>();\n\n diesel::insert(&group)\n\n .into(transactions::table)\n\n .execute(&self.db)\n\n .expect(\"Error saving transaction\");\n\n }\n\n }\n\n}\n", "file_path": "data_store/src/lib.rs", "rank": 28, "score": 5.7697306683281315 }, { "content": "use {\n\n crate::{loading::Transaction, reporting::Reporter},\n\n budgetronlib::fintime::{Date, Timeframe},\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::HashMap},\n\n};\n\n\n\npub struct List;\n\nimpl Reporter for List {\n\n fn report<'a, I>(&self, transactions: I, _: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>>,\n\n {\n\n let transactions = transactions.collect::<Vec<_>>();\n\n let start_date =\n\n transactions.last().map(|t| t.date).unwrap_or_default() - Timeframe::Years(3);\n\n let transaction_map = transactions\n\n .into_iter()\n\n .filter_map(|t| {\n\n if t.date >= start_date {\n", "file_path": "src/reporting/list.rs", "rank": 29, "score": 5.744107205260351 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n loading::Transaction,\n\n num_traits::Float,\n\n ordered_float::NotNaN,\n\n reporting::Reporter,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::BTreeMap},\n\n};\n\n\n\npub struct RepeatedTransactions {\n\n threshold: f64,\n\n}\n\n\n\nimpl RepeatedTransactions {\n\n pub fn new(threshold: f64) -> RepeatedTransactions {\n\n RepeatedTransactions { threshold }\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n", "file_path": "src/reporting/repeats.rs", "rank": 30, "score": 4.826637260507638 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Transaction, TransactionType},\n\n reporting::Reporter,\n\n },\n\n budgetronlib::fintime::Date,\n\n serde_json::Value,\n\n std::borrow::Cow,\n\n};\n\n\n\npub struct OnlyType<'a, T>\n\nwhere\n", "file_path": "src/reporting/only_type.rs", "rank": 31, "score": 4.478926667113811 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Transaction, TransactionType},\n\n processing::Collate,\n\n },\n\n budgetronlib::error::BResult,\n\n std::{\n\n cmp::min,\n\n collections::{HashMap, HashSet},\n\n i64,\n\n },\n\n};\n", "file_path": "src/processing/transfers.rs", "rank": 32, "score": 4.44275557814867 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Transaction, TransactionType},\n\n reporting::Reporter,\n\n },\n\n data_store,\n\n serde_json::Value,\n\n std::borrow::Cow,\n\n};\n\n\n\npub struct Database;\n\nimpl Reporter for Database {\n", "file_path": "src/reporting/database.rs", "rank": 33, "score": 4.44275557814867 }, { "content": " pub original_category: String,\n\n pub account_name: String,\n\n pub labels: String,\n\n pub notes: String,\n\n pub transfer_destination_account: Option<String>,\n\n pub tags: Vec<String>,\n\n}\n\n\n\nuse super::schema::transactions;\n\n\n\n#[derive(Insertable)]\n\n#[table_name = \"transactions\"]\n\npub struct NewTransaction<'a> {\n\n pub date: NaiveDate,\n\n pub person: String,\n\n pub description: String,\n\n pub original_description: String,\n\n pub amount: f64,\n\n pub transaction_type: &'a str,\n\n pub category: String,\n\n pub original_category: String,\n\n pub account_name: String,\n\n pub labels: String,\n\n pub notes: String,\n\n pub transfer_destination_account: Option<String>,\n\n pub tags: Vec<String>,\n\n}\n", "file_path": "data_store/src/models.rs", "rank": 34, "score": 4.380923531646829 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Transaction, TransactionType},\n\n reporting::{by_account::ByAccount, by_timeframe::ByTimeframe},\n\n },\n\n budgetronlib::fintime::{Date, Timeframe},\n\n serde,\n\n serde_json::Value,\n\n std::{borrow::Cow, fmt},\n\n};\n\n\n", "file_path": "src/reporting/mod.rs", "rank": 35, "score": 4.372180326237697 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Transaction, TransactionType},\n\n reporting::Reporter,\n\n },\n\n budgetronlib::fintime::Date,\n\n serde::Serialize,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, fmt},\n\n};\n\n\n\npub struct ByAccount<'a, T>\n", "file_path": "src/reporting/by_account.rs", "rank": 36, "score": 4.372180326237697 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction, TransactionType},\n\n reporting::Reporter,\n\n },\n\n budgetronlib::fintime::Date,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::BTreeMap},\n\n};\n\n\n\npub struct NetWorth;\n\n\n", "file_path": "src/reporting/net_worth.rs", "rank": 37, "score": 4.30386443998249 }, { "content": "// Copyright 2019 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction, TransactionType},\n\n reporting::Reporter,\n\n },\n\n budgetronlib::fintime::Date,\n\n serde::Serialize,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::HashMap},\n\n};\n\n\n\npub struct IncomeExpenseRatio {\n", "file_path": "src/reporting/income_expense_ratio.rs", "rank": 38, "score": 4.237698750168287 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction, TransactionType},\n\n reporting::{config::ReportOptions, timeseries::Timeseries, Reporter},\n\n },\n\n budgetronlib::fintime::Date,\n\n serde::Serialize,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, fmt},\n\n};\n\n\n\npub struct Cashflow {\n", "file_path": "src/reporting/cashflow.rs", "rank": 39, "score": 4.205390060392396 }, { "content": "impl Reporter for NetWorth {\n\n fn report<'a, I>(&self, transactions: I, _: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>>,\n\n {\n\n let mut worth = BTreeMap::new();\n\n for transaction in transactions {\n\n *worth\n\n .entry(transaction.account_name.clone())\n\n .or_insert_with(Money::zero) += match transaction.transaction_type {\n\n TransactionType::Credit => transaction.amount,\n\n TransactionType::Debit | TransactionType::Transfer => -transaction.amount,\n\n };\n\n if let TransactionType::Transfer = transaction.transaction_type {\n\n *worth\n\n .entry(\n\n transaction\n\n .transfer_destination_account\n\n .clone()\n\n .expect(\"transfer records should have a transfer_destination_account\"),\n", "file_path": "src/reporting/net_worth.rs", "rank": 40, "score": 4.177181364401617 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction, TransactionType},\n\n reporting::{config::ReportOptions, timeseries::Timeseries, Reporter},\n\n },\n\n budgetronlib::fintime::Date,\n\n serde::Serialize,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::HashMap},\n\n};\n\n\n\npub struct Categories {\n", "file_path": "src/reporting/categories.rs", "rank": 41, "score": 4.142260261625976 }, { "content": " (*seen\n\n .entry((\n\n transaction.description.clone(),\n\n transaction.transaction_type,\n\n ))\n\n .or_insert_with(|| Vec::new()))\n\n .push(transaction);\n\n }\n\n let seen = seen\n\n .into_iter()\n\n .filter_map(|((amt, transaction_type), transactions)| {\n\n if transactions.len() > 2 {\n\n let (count, total) = transactions\n\n .windows(2)\n\n .map(|w| w[1].date - w[0].date)\n\n .fold((0, 0), |curr, d| (curr.0 + 1, curr.1 + d));\n\n Some((\n\n format!(\"{:?} {:?}\", transaction_type, amt),\n\n Report {\n\n transactions,\n", "file_path": "src/reporting/repeats.rs", "rank": 42, "score": 4.132244973745165 }, { "content": " },\n\n ..Default::default()\n\n };\n\n for transaction in transactions {\n\n {\n\n let entry: &mut CategoryEntry = report\n\n .categories\n\n .entry(transaction.category.clone())\n\n .or_insert_with(Default::default);\n\n entry.amount += match transaction.transaction_type {\n\n TransactionType::Credit => transaction.amount,\n\n TransactionType::Debit => -transaction.amount,\n\n _ => Money::zero(),\n\n };\n\n entry.transactions.push(transaction.uid());\n\n }\n\n let ts_data = report.ts_data();\n\n if let Some(ref mut ts) = report.timeseries {\n\n ts.add(transaction.date, ts_data);\n\n }\n", "file_path": "src/reporting/categories.rs", "rank": 43, "score": 4.097755690205297 }, { "content": " return Some(tag);\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\nimpl Reporter for IncomeExpenseRatio {\n\n fn report<'a, I>(&self, transactions: I, _: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>>,\n\n {\n\n let mut report = IncomeExpenseReport::new(&self.income_tags, &self.expense_tags);\n\n\n\n for transaction in transactions {\n\n match transaction.transaction_type {\n\n TransactionType::Credit => report.credit.update(&transaction),\n\n TransactionType::Debit => report.debit.update(&transaction),\n\n _ => {}\n\n }\n", "file_path": "src/reporting/income_expense_ratio.rs", "rank": 44, "score": 4.095183950586107 }, { "content": " fn report<'a, I>(&self, transactions: I) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>>,\n\n {\n\n let db = data_store::Transactions::new_from_env();\n\n let mut all_transactions = Vec::new();\n\n for t in transactions {\n\n let t = t.into_owned();\n\n all_transactions.push(data_store::models::NewTransaction {\n\n date: t.date.date.naive_utc(),\n\n person: t.person,\n\n description: t.description,\n\n original_description: t.original_description,\n\n amount: t.amount.to_f64(),\n\n transaction_type: match t.transaction_type {\n\n TransactionType::Debit => \"Debit\",\n\n TransactionType::Credit => \"Credit\",\n\n TransactionType::Transfer => \"Transfer\",\n\n },\n\n category: t.category,\n", "file_path": "src/reporting/database.rs", "rank": 45, "score": 4.059803419565711 }, { "content": "\n\nimpl Default for TransactionType {\n\n fn default() -> TransactionType {\n\n TransactionType::Credit\n\n }\n\n}\n\n\n\nimpl TransactionType {\n\n pub fn is_credit(self) -> bool {\n\n TransactionType::Credit == self\n\n }\n\n\n\n pub fn is_debit(self) -> bool {\n\n TransactionType::Debit == self\n\n }\n\n\n\n pub fn is_transfer(self) -> bool {\n\n TransactionType::Transfer == self\n\n }\n\n}\n", "file_path": "src/loading/generic.rs", "rank": 46, "score": 4.056157135565443 }, { "content": " if found_transfer.0 == i || found_transfer.1 == i {\n\n break;\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n for (i, destination_account) in to_update {\n\n if let Some(transaction) = transactions.get_mut(i) {\n\n transaction.transfer_destination_account = Some(destination_account);\n\n transaction.transaction_type = TransactionType::Transfer;\n\n }\n\n }\n\n\n\n let mut to_delete: Vec<_> = to_delete.into_iter().collect();\n\n to_delete.sort();\n\n to_delete.reverse();\n\n\n\n for i in to_delete {\n\n transactions.remove(i);\n\n }\n\n Ok(transactions)\n\n }\n\n}\n", "file_path": "src/processing/transfers.rs", "rank": 47, "score": 4.023731414550925 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction, TransactionType},\n\n reporting::{config::ReportOptions, timeseries::Timeseries, Reporter},\n\n },\n\n budgetronlib::fintime::{Date, Timeframe},\n\n serde::{Deserialize, Serialize},\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::HashMap},\n\n};\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 48, "score": 4.021634621654426 }, { "content": " where\n\n I: Iterator<Item = Cow<'b, Transaction>>,\n\n {\n\n let (transactions, _): (Vec<_>, Vec<_>) =\n\n transactions.partition(|t| t.transaction_type == self.t);\n\n\n\n self.inner.report(transactions.into_iter(), end_date)\n\n }\n\n\n\n fn key(&self) -> Option<String> {\n\n Some(format!(\"only_type_{:?}\", self.t))\n\n }\n\n}\n", "file_path": "src/reporting/only_type.rs", "rank": 49, "score": 4.017009434453479 }, { "content": " }\n\n }\n\n AssignOwners { ref owners } => {\n\n for transaction in &mut transactions {\n\n for (owner, matcher) in owners {\n\n if matcher.matches(transaction) {\n\n transaction.person = owner.clone();\n\n }\n\n }\n\n }\n\n }\n\n OverrideOwners { ref owner_override } => {\n\n for transaction in &mut transactions {\n\n if let Some(captures) = owner_override.captures(&transaction.notes) {\n\n if let Some(new_owner) = captures.get(1) {\n\n transaction.person = new_owner.as_str().to_owned();\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/processing/config.rs", "rank": 50, "score": 4.012681177605879 }, { "content": " }\n\n\n\n fn split_transaction(\n\n &self,\n\n transaction: &Transaction,\n\n amounts: &HashMap<String, Money>,\n\n ) -> HashMap<String, Money> {\n\n if self.should_split(transaction) {\n\n RollingBudget::proportions(amounts)\n\n .into_iter()\n\n .map(|(k, v)| (k.to_string(), transaction.amount * v))\n\n .collect()\n\n } else {\n\n let mut s = HashMap::new();\n\n s.insert(transaction.person.clone(), transaction.amount);\n\n s\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 51, "score": 4.00532799719093 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction, TransactionType},\n\n reporting::{rolling_budget, Cashflow, Categories, IncomeExpenseRatio, Reporter},\n\n },\n\n budgetronlib::fintime::Date,\n\n serde::{Deserialize, Serialize},\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::HashMap},\n\n};\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n", "file_path": "src/reporting/config.rs", "rank": 52, "score": 3.9925900923604485 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse chrono::NaiveDate;\n\n\n\n#[derive(Queryable)]\n\npub struct Transaction {\n\n pub id: i32,\n\n pub date: NaiveDate,\n\n pub person: String,\n\n pub description: String,\n\n pub original_description: String,\n\n pub amount: f32,\n\n pub transaction_type: String,\n\n pub category: String,\n", "file_path": "data_store/src/models.rs", "rank": 53, "score": 3.9925900923604485 }, { "content": " AddTags { ref tags } => {\n\n for transaction in &mut transactions {\n\n for (tag, matcher) in tags {\n\n if matcher.matches(transaction) {\n\n transaction.tags.push(tag.to_owned());\n\n }\n\n }\n\n }\n\n }\n\n OwnersForTag { ref tag_owner } => {\n\n for transaction in &mut transactions {\n\n for (tag, owner) in tag_owner {\n\n if transaction.tags.contains(tag) {\n\n transaction.person = owner.to_owned();\n\n }\n\n }\n\n }\n\n }\n\n HideAccount { ref hide_accounts } => {\n\n transactions.retain(|t| !hide_accounts.contains(&t.account_name))\n", "file_path": "src/processing/config.rs", "rank": 54, "score": 3.9891630946249967 }, { "content": " tags,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n fn update(&mut self, transaction: &Transaction) {\n\n match self.find_tag(&transaction.tags) {\n\n None => self.other += transaction.amount,\n\n Some(tag) => {\n\n *self\n\n .by_tag\n\n .entry(tag.to_owned())\n\n .or_insert_with(Money::zero) += transaction.amount\n\n }\n\n }\n\n }\n\n\n\n fn find_tag(&self, transaction_tags: &[String]) -> Option<&'a str> {\n\n for tag in self.tags {\n\n if transaction_tags.contains(&tag) {\n", "file_path": "src/reporting/income_expense_ratio.rs", "rank": 55, "score": 3.9680343665783835 }, { "content": "\n\nimpl<'a, T> Reporter for ByAccount<'a, T>\n\nwhere\n\n T: Reporter,\n\n{\n\n fn report<'b, I>(&self, transactions: I, end_date: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'b, Transaction>>,\n\n {\n\n let (transactions, _): (Vec<_>, Vec<_>) = transactions\n\n .map(|t| {\n\n if let TransactionType::Transfer = t.transaction_type {\n\n if t.account_name == self.account {\n\n let mut t = t.into_owned();\n\n t.transaction_type = TransactionType::Debit;\n\n t.transfer_destination_account = None;\n\n Cow::Owned(t)\n\n } else if *t\n\n .transfer_destination_account\n\n .as_ref()\n", "file_path": "src/reporting/by_account.rs", "rank": 56, "score": 3.9583280678312267 }, { "content": " for (name, amount) in self.split_transaction(&transaction, &amounts) {\n\n let entry = report\n\n .budgets\n\n .entry(name.to_string())\n\n .or_insert_with(Money::zero);\n\n let breakdown_entry = report\n\n .breakdown\n\n .entry(name.to_string())\n\n .or_insert_with(Default::default);\n\n match transaction.transaction_type {\n\n TransactionType::Debit => {\n\n *entry -= amount;\n\n if split {\n\n breakdown_entry.split_transactions -= amount;\n\n } else {\n\n breakdown_entry.personal_transactions -= amount;\n\n }\n\n }\n\n TransactionType::Credit => {\n\n *entry += amount;\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 57, "score": 3.915894387587243 }, { "content": " #[serde(skip_serializing_if = \"Option::is_none\")]\n\n timeseries: Option<Timeseries<HashMap<String, Money>>>,\n\n}\n\n\n\nimpl RollingBudget {\n\n fn should_split(&self, transaction: &Transaction) -> bool {\n\n transaction.person == self.split\n\n }\n\n\n\n fn should_include(&self, transaction: &Transaction) -> bool {\n\n self.amounts.keys().any(|&k| transaction.date >= k)\n\n && TransactionType::Transfer != transaction.transaction_type\n\n }\n\n\n\n fn proportions(amounts: &HashMap<String, Money>) -> HashMap<&str, f64> {\n\n let total = amounts.values().sum::<Money>().to_f64();\n\n amounts\n\n .iter()\n\n .map(|(k, v)| (k.as_ref(), v.to_f64() / total))\n\n .collect()\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 58, "score": 3.9038966713506436 }, { "content": " }\n\n HideDescription {\n\n ref hide_description,\n\n } => transactions.retain(|t| {\n\n for d in hide_description {\n\n if d.is_match(&t.description) {\n\n return false;\n\n }\n\n }\n\n true\n\n }),\n\n Transfers { transfer_horizon } => {\n\n transactions = TransferCollator::new(transfer_horizon).collate(transactions)?;\n\n }\n\n Refunds { refund_horizon } => {\n\n transactions = RefundCollator::new(refund_horizon).collate(transactions)?;\n\n }\n\n }\n\n Ok(transactions)\n\n }\n\n}\n", "file_path": "src/processing/config.rs", "rank": 59, "score": 3.8823237941960125 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {crate::loading::Transaction, budgetronlib::error::BResult};\n\n\n\npub mod config;\n\nmod refunds;\n\nmod regex;\n\nmod transfers;\n\n\n\npub enum Collator {\n\n Transfers(transfers::TransferCollator),\n\n Refund(refunds::RefundCollator),\n\n Config(config::ConfiguredProcessors),\n\n}\n\n\n\npub use crate::processing::{\n\n config::ConfiguredProcessors, refunds::RefundCollator, transfers::TransferCollator,\n\n};\n\n\n", "file_path": "src/processing/mod.rs", "rank": 60, "score": 3.815597430293845 }, { "content": " writeln!(f, \"{}\", value)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<'a, T> Reporter for ByTimeframe<'a, T>\n\nwhere\n\n T: Reporter,\n\n{\n\n fn report<'b, I>(&self, transactions: I, end_date: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'b, Transaction>>,\n\n {\n\n let mut transactions: Vec<_> = transactions.collect();\n\n let mut date = transactions\n\n .get(0)\n\n .map(|t| t.date)\n\n .unwrap_or_else(|| Date::ymd(2000, 1, 1));\n\n\n", "file_path": "src/reporting/by_timeframe.rs", "rank": 61, "score": 3.7997260937083097 }, { "content": " RefundCollator { horizon }\n\n }\n\n}\n\n\n\nimpl Collate for RefundCollator {\n\n fn collate(&self, mut transactions: Vec<Transaction>) -> BResult<Vec<Transaction>> {\n\n let mut to_delete = HashSet::new();\n\n for (i, t) in transactions.iter().enumerate() {\n\n loop {\n\n let candidates: Vec<_> = (i..min(transactions.len(), i + self.horizon))\n\n .filter(|&j| {\n\n let tn = &transactions[j];\n\n tn.amount == t.amount && !to_delete.contains(&i) && !to_delete.contains(&j)\n\n })\n\n .collect();\n\n\n\n if candidates.len() <= 1 {\n\n break;\n\n }\n\n\n", "file_path": "src/processing/refunds.rs", "rank": 62, "score": 3.775324068090851 }, { "content": " ) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>> + Clone,\n\n R: Reporter,\n\n {\n\n if let Some(only_type) = self.only_type {\n\n self.filter_report_only_owners(&reporter.only_type(only_type), transactions, end_date)\n\n } else {\n\n self.filter_report_only_owners(reporter, transactions, end_date)\n\n }\n\n }\n\n\n\n fn filter_report_only_tags<'a, I, R>(\n\n &self,\n\n reporter: &R,\n\n transactions: I,\n\n end_date: Date,\n\n ) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>> + Clone,\n", "file_path": "src/reporting/config.rs", "rank": 63, "score": 3.722287030716517 }, { "content": " I: Iterator<Item = Cow<'a, Transaction>> + Clone,\n\n R: Reporter,\n\n {\n\n if let Some(ref skip_tags) = self.skip_tags {\n\n self.filter_report_only_tags(\n\n &reporter.excluding_tags(skip_tags.clone()),\n\n transactions,\n\n end_date,\n\n )\n\n } else {\n\n self.filter_report_only_tags(reporter, transactions, end_date)\n\n }\n\n }\n\n\n\n fn run_report<'a, I, R>(&self, reporter: &R, transactions: I, end_date: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>> + Clone,\n\n R: Reporter,\n\n {\n\n self.filter_report_skip_tags(reporter, transactions, end_date)\n", "file_path": "src/reporting/config.rs", "rank": 64, "score": 3.6864235689420326 }, { "content": "\n\npub struct TransferCollator {\n\n pub horizon: usize,\n\n}\n\n\n\nimpl TransferCollator {\n\n pub fn new(horizon: usize) -> TransferCollator {\n\n TransferCollator { horizon }\n\n }\n\n}\n\n\n\nimpl Collate for TransferCollator {\n\n fn collate(&self, mut transactions: Vec<Transaction>) -> BResult<Vec<Transaction>> {\n\n let mut to_delete = HashSet::new();\n\n let mut to_update = HashMap::new();\n\n for (i, t) in transactions.iter().enumerate() {\n\n loop {\n\n let candidates: Vec<_> = (i..min(transactions.len(), i + self.horizon))\n\n .filter(|&j| {\n\n let tn = &transactions[j];\n", "file_path": "src/processing/transfers.rs", "rank": 65, "score": 3.66178729459126 }, { "content": " Some(Timeseries::new())\n\n } else {\n\n None\n\n },\n\n };\n\n let mut month = start_dates[amount_index].month();\n\n\n\n if let Some(ref mut ts) = report.timeseries {\n\n ts.add(start_dates[amount_index].clone(), amounts.clone());\n\n }\n\n for transaction in transactions {\n\n if self.should_include(&transaction) {\n\n last_date = Some(transaction.date);\n\n if start_dates.len() > amount_index + 1\n\n && transaction.date >= *start_dates[amount_index + 1]\n\n {\n\n amount_index += 1;\n\n amounts = &self.amounts[start_dates[amount_index]];\n\n }\n\n if transaction.date.month() != month {\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 66, "score": 3.649592228867352 }, { "content": " if split {\n\n breakdown_entry.split_transactions += amount;\n\n } else {\n\n breakdown_entry.personal_transactions += amount;\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n report.transactions.push(transaction.uid());\n\n if let Some(ref mut ts) = report.timeseries {\n\n ts.add(transaction.date, report.budgets.clone());\n\n }\n\n }\n\n }\n\n if let Some(mut last_date) = last_date {\n\n last_date.align_to_month();\n\n last_date += Timeframe::Months(1);\n\n\n\n while end_date >= last_date {\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 67, "score": 3.647941366243837 }, { "content": " let (transactions, _): (Vec<_>, Vec<_>) =\n\n transactions.partition(|t| self.owners.iter().any(|owner| t.person == *owner));\n\n self.inner.report(transactions.into_iter(), end_date)\n\n }\n\n\n\n fn key(&self) -> Option<String> {\n\n Some(format!(\"only_owners_{}\", self.owners.join(\"_\")))\n\n }\n\n}\n", "file_path": "src/reporting/only_owners.rs", "rank": 68, "score": 3.641352823854487 }, { "content": " where\n\n I: Iterator<Item = Cow<'a, Transaction>> + Clone,\n\n R: Reporter,\n\n {\n\n if let Some(ref only_owners) = self.only_owners {\n\n self.inner_run_report(\n\n &reporter.only_owners(only_owners.clone()),\n\n transactions,\n\n end_date,\n\n )\n\n } else {\n\n self.inner_run_report(reporter, transactions, end_date)\n\n }\n\n }\n\n\n\n fn filter_report_only_type<'a, I, R>(\n\n &self,\n\n reporter: &R,\n\n transactions: I,\n\n end_date: Date,\n", "file_path": "src/reporting/config.rs", "rank": 69, "score": 3.627367694503521 }, { "content": " let (transactions, _): (Vec<_>, Vec<_>) =\n\n transactions.partition(|t| self.tags.iter().any(|tag| t.tags.contains(tag)));\n\n\n\n self.inner.report(transactions.into_iter(), end_date)\n\n }\n\n\n\n fn key(&self) -> Option<String> {\n\n Some(format!(\"only_tags_{}\", self.tags.join(\"_\")))\n\n }\n\n}\n", "file_path": "src/reporting/only_tags.rs", "rank": 70, "score": 3.6173133159833153 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{loading::Transaction, reporting::Reporter},\n\n budgetronlib::fintime::Date,\n\n serde_json::Value,\n\n std::borrow::Cow,\n\n};\n\n\n\npub struct OnlyTags<'a, T>\n\nwhere\n\n T: 'a + Reporter,\n\n{\n\n inner: &'a T,\n", "file_path": "src/reporting/only_tags.rs", "rank": 71, "score": 3.6093848327338063 }, { "content": "// Copyright 2018 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{loading::Transaction, reporting::Reporter},\n\n budgetronlib::fintime::Date,\n\n serde_json::Value,\n\n std::borrow::Cow,\n\n};\n\n\n\npub struct OnlyOwners<'a, T>\n\nwhere\n\n T: 'a + Reporter,\n\n{\n\n inner: &'a T,\n", "file_path": "src/reporting/only_owners.rs", "rank": 72, "score": 3.6093848327338063 }, { "content": " let (_, transactions): (Vec<_>, Vec<_>) =\n\n transactions.partition(|t| self.tags.iter().any(|tag| t.tags.contains(tag)));\n\n\n\n self.inner.report(transactions.into_iter(), end_date)\n\n }\n\n\n\n fn key(&self) -> Option<String> {\n\n Some(format!(\"excluding_tags_{}\", self.tags.join(\"_\")))\n\n }\n\n}\n", "file_path": "src/reporting/excluding_tags.rs", "rank": 73, "score": 3.59358913476009 }, { "content": " let mut mindelta = i64::MAX;\n\n let mut found_transfer = (0, 0);\n\n let debits = candidates\n\n .iter()\n\n .filter(|&i| transactions[*i].transaction_type.is_debit());\n\n\n\n for debit_ix in debits {\n\n let debit = &transactions[*debit_ix];\n\n let credits = candidates\n\n .iter()\n\n .filter(|&i| transactions[*i].transaction_type.is_credit());\n\n\n\n for credit_ix in credits {\n\n let credit = &transactions[*credit_ix];\n\n if (debit.date - credit.date).abs() < mindelta\n\n && debit.account_name == credit.account_name\n\n {\n\n found_transfer = (*debit_ix, *credit_ix);\n\n mindelta = (debit.date - credit.date).abs();\n\n }\n", "file_path": "src/processing/refunds.rs", "rank": 74, "score": 3.5780942026995715 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{loading::Transaction, reporting::Reporter},\n\n budgetronlib::fintime::Date,\n\n serde_json::Value,\n\n std::borrow::Cow,\n\n};\n\n\n\npub struct ExcludingTags<'a, T>\n\nwhere\n\n T: 'a + Reporter,\n\n{\n\n inner: &'a T,\n", "file_path": "src/reporting/excluding_tags.rs", "rank": 75, "score": 3.5755291323803897 }, { "content": " timeseries: if self.options.include_graph {\n\n Some(Timeseries::new())\n\n } else {\n\n None\n\n },\n\n ..Default::default()\n\n };\n\n\n\n let cashflow: CashflowReport = transactions.fold(report, |mut report, ref t| {\n\n match t.transaction_type {\n\n TransactionType::Credit => {\n\n report.credit += t.amount;\n\n report.net += t.amount;\n\n }\n\n TransactionType::Debit => {\n\n report.debit += t.amount;\n\n report.net -= t.amount;\n\n }\n\n _ => {}\n\n }\n", "file_path": "src/reporting/cashflow.rs", "rank": 76, "score": 3.539497587853118 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{loading::Transaction, reporting::Reporter},\n\n budgetronlib::fintime::Date,\n\n serde_json::{map::Map, Value},\n\n std::borrow::Cow,\n\n};\n\n\n\nmacro_rules! tuple_impls {\n\n ($(\n\n $Tuple:ident {\n\n $(($idx:tt) -> $T:ident)+\n\n }\n", "file_path": "src/reporting/multi.rs", "rank": 77, "score": 3.446227918820631 }, { "content": " )+) => {\n\n $(\n\n impl<$($T:Reporter),+> Reporter for ($($T),+) {\n\n fn report<'a, It>(&self, transactions: It, end_date: Date) -> Value\n\n where It: Iterator<Item = Cow<'a, Transaction>> + Clone {\n\n let mut retval = Map::new();\n\n $(\n\n if let Some(v) = self.$idx.key() {\n\n retval.insert(v.to_owned(), self.$idx.report(transactions.clone(), end_date));\n\n } else {\n\n match self.$idx.report(transactions.clone(), end_date) {\n\n Value::Object(o) => for (k, v) in o {\n\n retval.insert(k, v);\n\n },\n\n Value::Null => {},\n\n other => {\n\n retval.insert(\"UNNAMED\".to_owned(), other);\n\n }\n\n }\n\n }\n", "file_path": "src/reporting/multi.rs", "rank": 78, "score": 3.439514182744891 }, { "content": " R: Reporter,\n\n {\n\n if let Some(ref only_tags) = self.only_tags {\n\n self.filter_report_only_type(\n\n &reporter.only_tags(only_tags.clone()),\n\n transactions,\n\n end_date,\n\n )\n\n } else {\n\n self.filter_report_only_type(reporter, transactions, end_date)\n\n }\n\n }\n\n\n\n fn filter_report_skip_tags<'a, I, R>(\n\n &self,\n\n reporter: &R,\n\n transactions: I,\n\n end_date: Date,\n\n ) -> Value\n\n where\n", "file_path": "src/reporting/config.rs", "rank": 79, "score": 3.435850733795373 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{loading::Transaction, reporting::Reporter},\n\n budgetronlib::fintime::{Date, Timeframe},\n\n serde::Serialize,\n\n serde_json::{self, Value},\n\n std::{borrow::Cow, collections::BTreeMap, fmt},\n\n};\n\n\n\npub struct ByTimeframe<'a, T>\n\nwhere\n\n T: 'a + Reporter,\n\n{\n", "file_path": "src/reporting/by_timeframe.rs", "rank": 80, "score": 3.415350740982669 }, { "content": " original_category: t.original_category,\n\n account_name: t.account_name,\n\n labels: t.labels,\n\n notes: t.notes,\n\n transfer_destination_account: t.transfer_destination_account,\n\n tags: t.tags,\n\n })\n\n }\n\n if !all_transactions.is_empty() {\n\n db.set_transactions(all_transactions);\n\n }\n\n\n\n Value::Null\n\n }\n\n\n\n fn key(&self) -> Option<String> {\n\n None\n\n }\n\n}\n", "file_path": "src/reporting/database.rs", "rank": 81, "score": 3.414440053591335 }, { "content": " Some((t.uid(), t))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<HashMap<_, _>>();\n\n serde_json::to_value(&transaction_map).expect(\"Couldn't serialize\")\n\n }\n\n\n\n fn key(&self) -> Option<String> {\n\n Some(\"transactions\".to_owned())\n\n }\n\n}\n", "file_path": "src/reporting/list.rs", "rank": 82, "score": 3.3966662699253964 }, { "content": " T: 'a + Reporter,\n\n{\n\n inner: &'a T,\n\n t: TransactionType,\n\n}\n\n\n\nimpl<'a, T> OnlyType<'a, T>\n\nwhere\n\n T: 'a + Reporter,\n\n{\n\n pub fn new(inner: &'a T, t: TransactionType) -> Self {\n\n OnlyType { inner, t }\n\n }\n\n}\n\n\n\nimpl<'a, T> Reporter for OnlyType<'a, T>\n\nwhere\n\n T: Reporter,\n\n{\n\n fn report<'b, I>(&self, transactions: I, end_date: Date) -> Value\n", "file_path": "src/reporting/only_type.rs", "rank": 83, "score": 3.393294564341584 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{\n\n loading::{Money, Transaction},\n\n processing::{regex::Regex, Collate, RefundCollator, TransferCollator},\n\n },\n\n budgetronlib::{error::BResult, fintime::Date},\n\n serde::Deserialize,\n\n std::collections::HashMap,\n\n};\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct ConfiguredProcessors {\n", "file_path": "src/processing/config.rs", "rank": 84, "score": 3.3259521138582433 }, { "content": "// Copyright 2017 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::loading::money::Money,\n\n budgetronlib::{error::BResult, fintime::Date},\n\n serde::{Deserialize, Serialize},\n\n};\n\n\n\n#[derive(Debug, Serialize, Copy, Deserialize, PartialEq, Clone, Eq, PartialOrd, Ord)]\n\npub enum TransactionType {\n\n Credit,\n\n Debit,\n\n Transfer,\n\n}\n", "file_path": "src/loading/generic.rs", "rank": 85, "score": 3.2971836007886437 }, { "content": "// Copyright 2018 Zachary Bush.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse {\n\n crate::{loading::Transaction, processing::Collate},\n\n budgetronlib::error::BResult,\n\n std::{cmp::min, collections::HashSet, i64},\n\n};\n\n\n\npub struct RefundCollator {\n\n pub horizon: usize,\n\n}\n\n\n\nimpl RefundCollator {\n\n pub fn new(horizon: usize) -> RefundCollator {\n", "file_path": "src/processing/refunds.rs", "rank": 86, "score": 3.2971836007886437 }, { "content": " note: Option<Vec<Regex>>,\n\n range: Option<MoneyRange>,\n\n\n\n only_before: Option<Date>,\n\n only_after: Option<Date>,\n\n}\n\n\n\nimpl TransactionMatcher {\n\n fn matches(&self, t: &Transaction) -> bool {\n\n if let Some(only_before) = self.only_before {\n\n if t.date > only_before {\n\n return false;\n\n }\n\n }\n\n\n\n if let Some(only_after) = self.only_after {\n\n if t.date < only_after {\n\n return false;\n\n }\n\n }\n", "file_path": "src/processing/config.rs", "rank": 87, "score": 3.2748589432603565 }, { "content": " tn.amount == t.amount\n\n && !to_delete.contains(&i)\n\n && !to_delete.contains(&j)\n\n && !to_update.contains_key(&i)\n\n && !to_update.contains_key(&j)\n\n })\n\n .collect();\n\n\n\n if candidates.len() <= 1 {\n\n break;\n\n }\n\n\n\n let mut mindelta = i64::MAX;\n\n let mut found_transfer = (0, 0);\n\n let debits = candidates\n\n .iter()\n\n .filter(|&i| transactions[*i].transaction_type.is_debit());\n\n\n\n for debit_ix in debits {\n\n let debit = &transactions[*debit_ix];\n", "file_path": "src/processing/transfers.rs", "rank": 88, "score": 3.2717246145191643 }, { "content": " let credits = candidates\n\n .iter()\n\n .filter(|&i| transactions[*i].transaction_type.is_credit());\n\n for credit_ix in credits {\n\n let credit = &transactions[*credit_ix];\n\n if (debit.date - credit.date).abs() < mindelta\n\n && debit.account_name != credit.account_name\n\n {\n\n found_transfer = (*debit_ix, *credit_ix);\n\n mindelta = (debit.date - credit.date).abs();\n\n }\n\n }\n\n }\n\n\n\n if found_transfer != (0, 0) {\n\n let tn = &transactions[found_transfer.1];\n\n\n\n to_delete.insert(found_transfer.1);\n\n to_update.insert(found_transfer.0, tn.account_name.clone());\n\n\n", "file_path": "src/processing/transfers.rs", "rank": 89, "score": 3.2555871261929408 }, { "content": "\n\n if !($(self.$name)||*) {\n\n reporter.report(transactions, end_date)\n\n } else {\n\n Value::Object(retval)\n\n }\n\n }\n\n }\n\n\n\n check_by! {\n\n by_week, by_month, by_quarter, by_year\n\n }\n\n }\n\n\n\n fn filter_report_only_owners<'a, I, R>(\n\n &self,\n\n reporter: &R,\n\n transactions: I,\n\n end_date: Date,\n\n ) -> Value\n", "file_path": "src/reporting/config.rs", "rank": 90, "score": 3.2457600180658632 }, { "content": " }\n\n}\n\n\n\nimpl Serialize for Money {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&format!(\"{:.02}\", self.to_f64()))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde_json;\n\n\n\n #[test]\n\n fn basic_operations() {\n\n assert_eq!(Money(10) + Money(10), Money(20));\n", "file_path": "src/loading/money.rs", "rank": 91, "score": 3.1630398245967704 }, { "content": " let mut count = transaction.date.month() as i32 - month as i32;\n\n if count < 0 {\n\n count += 12;\n\n }\n\n month = transaction.date.month();\n\n for (name, amount) in amounts {\n\n let entry = report\n\n .budgets\n\n .entry(name.to_string())\n\n .or_insert_with(Money::zero);\n\n *entry += (*amount) * count;\n\n if let Some(rollover_months) = self.rollover_months {\n\n let max_saved = *amount * rollover_months;\n\n if *entry > max_saved {\n\n *entry = max_saved;\n\n }\n\n }\n\n }\n\n }\n\n let split = self.should_split(&transaction);\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 92, "score": 3.1225638042591424 }, { "content": " .expect(\"all transfers should have destinations\")\n\n == self.account\n\n {\n\n let mut t = t.into_owned();\n\n t.transaction_type = TransactionType::Credit;\n\n t.account_name = t.transfer_destination_account.take().unwrap();\n\n Cow::Owned(t)\n\n } else {\n\n t\n\n }\n\n } else {\n\n t\n\n }\n\n })\n\n .partition(|t| t.account_name == self.account);\n\n\n\n let mut retval = serde_json::map::Map::new();\n\n retval.insert(\"account\".to_owned(), Value::String(self.account.clone()));\n\n if let Some(v) = self.inner.key() {\n\n retval.insert(v, self.inner.report(transactions.into_iter(), end_date));\n", "file_path": "src/reporting/by_account.rs", "rank": 93, "score": 3.1048695864598805 }, { "content": " Cashflow { options }\n\n }\n\n}\n\n\n\nimpl CashflowReport {\n\n fn datum(&self) -> CashflowDatum {\n\n CashflowDatum {\n\n credit: self.credit,\n\n debit: self.debit,\n\n net: self.net,\n\n }\n\n }\n\n}\n\n\n\nimpl Reporter for Cashflow {\n\n fn report<'a, I>(&self, transactions: I, _: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>>,\n\n {\n\n let report = CashflowReport {\n", "file_path": "src/reporting/cashflow.rs", "rank": 94, "score": 3.055699036927135 }, { "content": " }\n\n}\n\n\n\nimpl Reporter for ConfiguredReports {\n\n fn report<'a, I>(&self, transactions: I, end_date: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>> + Clone,\n\n {\n\n let mut retval = Vec::new();\n\n for report_config in &self.report {\n\n let report_key = report_config\n\n .name\n\n .to_lowercase()\n\n .split(' ')\n\n .collect::<Vec<_>>()\n\n .join(\"_\");\n\n let value = match report_config.config {\n\n ReportType::RollingBudget(ref rolling_budget) => {\n\n report_config.run_report(rolling_budget, transactions.clone(), end_date)\n\n }\n", "file_path": "src/reporting/config.rs", "rank": 95, "score": 3.036053579294249 }, { "content": " }\n\n }\n\n\n\n if found_transfer != (0, 0) {\n\n to_delete.insert(found_transfer.1);\n\n to_delete.insert(found_transfer.0);\n\n\n\n if found_transfer.0 == i || found_transfer.1 == i {\n\n break;\n\n }\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n\n\n let mut to_delete: Vec<_> = to_delete.into_iter().collect();\n\n to_delete.sort();\n\n to_delete.reverse();\n\n\n\n for i in to_delete {\n\n transactions.remove(i);\n\n }\n\n\n\n Ok(transactions)\n\n }\n\n}\n", "file_path": "src/processing/refunds.rs", "rank": 96, "score": 3.0303494299391316 }, { "content": "pub struct RollingBudget {\n\n split: String,\n\n rollover_months: Option<u8>,\n\n amounts: HashMap<Date, HashMap<String, Money>>,\n\n #[serde(default)]\n\n options: ReportOptions,\n\n}\n\n\n\n#[derive(Debug, Serialize, Default)]\n\npub struct ExpenseBreakdown {\n\n split_transactions: Money,\n\n personal_transactions: Money,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct RollingBudgetReport {\n\n budgets: HashMap<String, Money>,\n\n breakdown: HashMap<String, ExpenseBreakdown>,\n\n transactions: Vec<String>,\n\n\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 97, "score": 2.9702218897004222 }, { "content": "impl Reporter for RollingBudget {\n\n fn report<'a, I>(&self, transactions: I, end_date: Date) -> Value\n\n where\n\n I: Iterator<Item = Cow<'a, Transaction>>,\n\n {\n\n let start_dates = {\n\n let mut sd = self.amounts.keys().collect::<Vec<_>>();\n\n sd.sort();\n\n sd\n\n };\n\n\n\n let mut amount_index = 0;\n\n let mut amounts = &self.amounts[start_dates[amount_index]];\n\n let mut last_date = None;\n\n\n\n let mut report = RollingBudgetReport {\n\n budgets: amounts.clone(),\n\n breakdown: HashMap::new(),\n\n transactions: Vec::new(),\n\n timeseries: if self.options.include_graph {\n", "file_path": "src/reporting/rolling_budget.rs", "rank": 98, "score": 2.9542076400113455 }, { "content": " ReportType::Cashflow { ref options } => report_config.run_report(\n\n &Cashflow::with_options((*options).clone()),\n\n transactions.clone(),\n\n end_date,\n\n ),\n\n ReportType::Categories { ref options } => report_config.run_report(\n\n &Categories::with_options((*options).clone()),\n\n transactions.clone(),\n\n end_date,\n\n ),\n\n ReportType::IncomeExpenseRatio {\n\n ref income_tags,\n\n ref expense_tags,\n\n ..\n\n } => report_config.run_report(\n\n &IncomeExpenseRatio::new(income_tags, expense_tags),\n\n transactions.clone(),\n\n end_date,\n\n ),\n\n };\n", "file_path": "src/reporting/config.rs", "rank": 99, "score": 2.938365149139142 } ]
Rust
guacamole-runner/src/map.rs
EllenNyan/guacamole-runner
aa17fdaf763e415ff9531e42ae946b503f9cbfb9
use crate::{ shipyard::{ *, }, consts::{ *, }, tetra::{ math::{ Vec3, Vec2, }, graphics::{ Color, }, }, }; use vermarine_lib::{ rendering::{ draw_buffer::{ DrawBuffer, DrawCommand, }, Drawables, }, }; use rand::SeedableRng; use rand::Rng; use rand::rngs::StdRng; pub struct HexTileData { pub ground_height: u8, pub wall_height: u8, pub is_tilled: bool, pub is_grown: bool, } impl HexTileData { pub fn new(height: u8) -> HexTileData { HexTileData { ground_height: height, wall_height: height, is_tilled: false, is_grown: false, } } } pub struct HexMap { pub tiles: Vec<HexTileData>, pub width: usize, pub height: usize, pub position: Vec2<f32>, pub tallest: u8, } impl HexMap { pub fn new(width: usize, height: usize) -> Self { let mut rand = StdRng::from_entropy(); let mut tiles = Vec::<HexTileData>::with_capacity(width * height); let mut tallest = 0; for _ in 0..width * height { let value = rand.gen_range(0, MAX_FLOOR_HEIGHT + 1); let tile = HexTileData::new(value); tiles.push(tile); if value > tallest { tallest = value; } } for _ in 0..5 { for section in 0..(width / 10) { let col = rand.gen_range(0, height + 1); let mut total = 0; for _ in 0..5 { total += rand.gen_range(3, 7 + 1); } total /= 5; for offset in 0..total { if let Some(tile) = tiles.get_mut((col * width) + (section * 10) + offset) { tile.is_tilled = true; } } } } let height_px = { height as f32 * FLOOR_VERT_STEP }; let position = Vec2::new( 0., 360. - height_px, ); HexMap { tiles, width, height, position, tallest, } } pub fn pixel_to_hex_raw(&mut self, pos: Vec2<f32>, height_offset: f32) -> (f32, f32) { let mut pos = pos; pos -= Vec2::new(18., 18.); pos.x -= self.position.x; pos.y -= self.position.y; pos.y += height_offset; let size_x = FLOOR_WIDTH / f32::sqrt(3.0); let size_y = 18.66666666666666666; let pos = Vec2::new( pos.x / size_x, pos.y / size_y, ); let b0 = f32::sqrt(3.0) / 3.0; let b1 = -1.0 / 3.0; let b2 = 0.0; let b3 = 2.0 / 3.0; let q: f32 = b0 * pos.x + b1 * pos.y; let r: f32 = b2 * pos.x + b3 * pos.y; (q, r) } #[allow(dead_code)] pub fn pixel_to_hex(&mut self, pos: Vec2<f32>) -> Option<(i32, i32)> { let mut tallest_height: Option<(u8, i32, i32)> = None; for height in 0..=self.tallest { let height_offset = height as f32 * FLOOR_DEPTH_STEP; let (q, r) = self.pixel_to_hex_raw(pos.clone(), height_offset); let (q, r, s) = (q, r, -r -q); let (x, y, _) = cube_round(q, r, s); if x < 0 || x >= self.width as i32 || y < 0 || y >= self.height as i32 { continue; } let tile = &self.tiles[self.width * y as usize + x as usize]; let tile_height = tile.wall_height; if tile_height != height { continue; } if tallest_height.is_none() || tile_height > tallest_height.unwrap().0 { tallest_height = Some((tile_height, x, y)); } } if let Some((_, x, y)) = tallest_height { return Some((x, y)); } None } #[allow(dead_code)] pub fn axial_to_pixel(&mut self, q: i32, r: i32) -> (f32, f32) { let (q, r) = (q as f32, r as f32); let size_x = FLOOR_WIDTH / f32::sqrt(3.0); let size_y = 18.66666666666666666; let x = size_x * (f32::sqrt(3.0) * q + f32::sqrt(3.0) / 2.0 * r); let y = size_y * (3.0 / 2.0 * r); ( x + 18. + self.position.x, y + 18. + self.position.y, ) } } #[allow(dead_code)] pub fn cube_to_offset(q: i32, r: i32) -> (i32, i32) { let col = q + (r - (r & 1)) / 2; let row = r; (col, row) } #[allow(dead_code)] pub fn offset_to_cube(off_x: i32, off_y: i32) -> (i32, i32, i32) { let x = off_x - (off_y - (off_y as i32 & 1)) / 2; let z = off_y; let y = -x-z; (x, y, z) } pub fn cube_round(q: f32, r: f32, s: f32) -> (i32, i32, i32) { let mut qi = q.round() as i32; let mut ri = r.round() as i32; let mut si = s.round() as i32; let q_diff = f64::abs(qi as f64 - q as f64); let r_diff = f64::abs(ri as f64 - r as f64); let s_diff = f64::abs(si as f64 - s as f64); if q_diff > r_diff && q_diff > s_diff { qi = -ri - si; } else if r_diff > s_diff { ri = -qi - si; } else { si = -qi - ri; } (qi, ri, si) } pub fn render_hex_map(mut draw_buffer: UniqueViewMut<DrawBuffer>, drawables: NonSendSync<UniqueViewMut<Drawables>>, mut map: UniqueViewMut<HexMap>) { draw_buffer.new_command_pool(true); let command_pool = draw_buffer.get_command_pool(); let (q, r) = map.pixel_to_hex_raw(Vec2::zero(), 0.); let startx = (q - 40.0) .max(0.0).min(map.width as f32 - 1.0) as usize; let endx = (q + 40.0) .max(0.0).min(map.width as f32 - 1.0) as usize; let starty = (r - 20.0) .max(0.0).min(map.height as f32 - 1.0) as usize; let endy = (r + 20.0) .max(0.0).min(map.height as f32 - 1.0) as usize; let (top_tex, wall_tex, brick_tex, brick_floor_tex, grown_tex, tilled_tex) = ( drawables.alias[textures::FLOOR], drawables.alias[textures::WALL], drawables.alias[textures::WALL_BRICK], drawables.alias[textures::FLOOR_BRICK], drawables.alias[textures::FLOOR_GROWN], drawables.alias[textures::FLOOR_TILLED], ); for height in 0..=MAX_BRICK_HEIGHT { let mut wall_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut wall_brick_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_brick_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_tilled_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_grown_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); for y in starty..=endy { for x in startx..=endx { let tile = &map.tiles[map.width * y + x]; if tile.wall_height < height { continue; } let (draw_x, draw_y) = { let offset_x = (FLOOR_WIDTH / 2.0) * y as f32; let mut x = FLOOR_WIDTH * x as f32; x += offset_x; ( x, (y as i32) as f32 * (FLOOR_VERT_STEP) ) }; let (draw_x, draw_y) = ( draw_x + map.position.x, draw_y + map.position.y, ); if height <= tile.ground_height && height != 0 { render_hex_walls(&mut wall_buffer, draw_x, draw_y, height, wall_tex); } else if height > tile.ground_height && height <= tile.wall_height { render_hex_bricks(&mut wall_brick_buffer, draw_x, draw_y, height, brick_tex); } if tile.is_grown && height == tile.ground_height { render_hex_top(&mut top_grown_buffer, draw_x, draw_y, tile.ground_height, grown_tex, Color::WHITE); } else if tile.is_tilled && height == tile.ground_height { render_hex_top(&mut top_tilled_buffer, draw_x, draw_y, tile.ground_height, tilled_tex, Color::WHITE); } else if height == tile.ground_height && height == tile.wall_height { render_hex_top(&mut top_buffer, draw_x, draw_y, tile.ground_height, top_tex, Color::WHITE); } else if height == tile.wall_height && height != tile.ground_height { render_hex_brick_top(&mut top_brick_buffer, draw_x, draw_y, tile.wall_height, brick_floor_tex, Color::WHITE); } } } command_pool.commands.extend(&wall_buffer); command_pool.commands.extend(&wall_brick_buffer); command_pool.commands.extend(&top_buffer); command_pool.commands.extend(&top_brick_buffer); command_pool.commands.extend(&top_tilled_buffer); command_pool.commands.extend(&top_grown_buffer); } /*let marker_tex = drawables.alias[textures::MARKER]; for y_tile in starty..=endy { for x_tile in startx..=endx { let (x, y) = map.axial_to_pixel(x_tile as i32, y_tile as i32); let tile = &map.tiles[map.width * y_tile + x_tile]; draw_buffer.draw( DrawCommand::new(marker_tex) .position(Vec3::new( x - 2.0, y - 2.0, tile.wall_height as f32 * FLOOR_DEPTH_STEP )) .draw_iso(true) ); } }*/ draw_buffer.end_command_pool(); } pub fn render_hex_top(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, texture: u64, color: Color) { let mut draw_command = create_floor_draw_cmd(x, y, height as f32 * FLOOR_DEPTH_STEP, height, texture); if color != Color::WHITE { draw_command = draw_command.color(color); } draw_buffer.push(draw_command); } fn create_floor_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 0 { let v = 0.55; Color::rgba(v, v, v, 1.0) } else if color == 1 { let v = 0.8; Color::rgba(v, v, v, 1.0) } else { let v = 0.95; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::FLOOR) .draw_iso(true) .color(color) } pub fn render_hex_brick_top(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, texture: u64, color: Color) { let mut draw_command = create_brick_floor_draw_cmd(x, y, height as f32 * FLOOR_DEPTH_STEP, height, texture); if color != Color::WHITE { draw_command = draw_command.color(color); } draw_buffer.push(draw_command); } fn create_brick_floor_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 1 { let v = 0.65; Color::rgba(v, v, v, 1.0) } else if color == 2 { let v = 0.8; Color::rgba(v, v, v, 1.0) } else if color == 3 { let v = 0.9; Color::rgba(v, v, v, 1.0) } else { let v = 1.0; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::FLOOR) .draw_iso(true) .color(color) } pub fn render_hex_walls(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, wall_tex: u64) { let start_height = height as f32 * FLOOR_DEPTH_STEP - WALL_VERT_OFFSET; let color = if height % 2 == 1 { 1 } else { 2 }; draw_buffer.push( create_wall_draw_cmd(x, y, start_height, color, wall_tex) ); } fn create_wall_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 1 { let v = 0.5; Color::rgba(v, v, v, 1.0) } else if color == 2{ let v = 0.7; Color::rgba(v, v, v, 1.0) } else { let v = 1.0; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::WALL) .draw_iso(true) .color(color) } pub fn render_hex_bricks(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, brick_tex: u64) { let start_height = height as f32 * FLOOR_DEPTH_STEP - WALL_VERT_STEP; draw_buffer.push( create_wall_brick_draw_cmd(x, y, start_height, height, brick_tex) ); } fn create_wall_brick_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 1 { let v = 0.3; Color::rgba(v, v, v, 1.0) } else if color == 2 { let v = 0.55; Color::rgba(v, v, v, 1.0) } else if color == 3 { let v = 0.7; Color::rgba(v, v, v, 1.0) } else { let v = 0.80; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::WALL) .draw_iso(true) .color(color) }
use crate::{ shipyard::{ *, }, consts::{ *, }, tetra::{ math::{ Vec3, Vec2, }, graphics::{ Color, }, }, }; use vermarine_lib::{ rendering::{ draw_buffer::{ DrawBuffer, DrawCommand, }, Drawables, }, }; use rand::SeedableRng; use rand::Rng; use rand::rngs::StdRng; pub struct HexTileData { pub ground_height: u8, pub wall_height: u8, pub is_tilled: bool, pub is_grown: bool, } impl HexTileData { pub fn new(height: u8) -> HexTileData { HexTileData { ground_height: height, wall_height: height, is_tilled: false, is_grown: false, } } } pub struct HexMap { pub tiles: Vec<HexTileData>, pub width: usize, pub height: usize, pub position: Vec2<f32>, pub tallest: u8, } impl HexMap { pub fn new(width: usize, height: usize) -> Self { let mut rand = StdRng::from_entropy(); let mut tiles = Vec::<HexTileData>::with_capacity(width * height); let mut tallest = 0; for _ in 0..width * height { let value = rand.gen_range(0, MAX_FLOOR_HEIGHT + 1); let tile = HexTileData::new(value); tiles.push(tile); if value > tallest { tallest = value; } } for _ in 0..5 { for section in 0..(width / 10) { let col = rand.gen_range(0, height + 1); let mut total = 0; for _ in 0..5 { total += rand.gen_range(3, 7 + 1); } total /= 5; for offset in 0..total { if let Some(tile) = tiles.get_mut((col * width) + (section * 10) + offset) { tile.is_tilled = true; } } } } let height_px = { height as f32 * FLOOR_VERT_STEP }; let position = Vec2::new( 0., 360. - height_px, ); HexMap { tiles, width, height, position, tallest, } } pub fn pixel_to_hex_raw(&mut self, pos: Vec2<f32>, height_offset: f32) -> (f32, f32) { let mut pos = pos; pos -= Vec2::new(18., 18.); pos.x -= self.position.x; pos.y -= self.position.y; pos.y += height_offset; let size_x = FLOOR_WIDTH / f32::sqrt(3.0); let size_y = 18.66666666666666666; let pos = Vec2::new( pos.x / size_x, pos.y / size_y, ); let b0 = f32::sqrt(3.0) / 3.0; let b1 = -1.0 / 3.0; let b2 = 0.0; let b3 = 2.0 / 3.0; let q: f32 = b0 * pos.x + b1 * pos.y; let r: f32 = b2 * pos.x + b3 * pos.y; (q, r) } #[allow(dead_code)] pub fn pixel_to_hex(&mut self, pos: Vec2<f32>) -> Option<(i32, i32)> { let mut tallest_height: Option<(u8, i32, i32)> = None; for height in 0..=self.tallest { let height_offset = height as f32 * FLOOR_DEPTH_STEP; let (q, r) = self.pixel_to_hex_raw(pos.clone(), height_offset); let (q, r, s) = (q, r, -r -q); let (x, y, _) = cube_round(q, r, s); if x < 0 || x >= self.width as i32 || y < 0 || y >= self.height as i32 { continue; } let tile = &self.tiles[self.width * y as usize + x as usize]; let tile_height = tile.wall_height; if tile_height != height { continue; } if tallest_height.is_none() || tile_height > tallest_height.unwrap().0 { tallest_height = Some((tile_height, x, y)); } } if let Some((_, x, y)) = tallest_height { return Some((x, y)); } None } #[allow(dead_code)] pub fn axial_to_pixel(&mut self, q: i32, r: i32) -> (f32, f32) { let (q, r) = (q as f32, r as f32); let size_x = FLOOR_WIDTH / f32::sqrt(3.0); let size_y = 18.66666666666666666; let x = size_x * (f32::sqrt(3.0) * q + f32::sqrt(3.0) / 2.0 * r); let y = size_y * (3.0 / 2.0 * r); ( x + 18. + self.position.x, y + 18. + self.position.y, ) } } #[allow(dead_code)] pub fn cube_to_offset(q: i32, r: i32) -> (i32, i32) { let col = q + (r - (r & 1)) / 2; let row = r; (col, row) } #[allow(dead_code)] pub fn offset_to_cube(off_x: i32, off_y: i32) -> (i32, i32, i32) { let x = off_x - (off_y - (off_y as i32 & 1)) / 2; let z = off_y; let y = -x-z; (x, y, z) } pub fn cube_round(q: f32, r: f32, s: f32) -> (i32, i32, i32) { let
pub fn render_hex_map(mut draw_buffer: UniqueViewMut<DrawBuffer>, drawables: NonSendSync<UniqueViewMut<Drawables>>, mut map: UniqueViewMut<HexMap>) { draw_buffer.new_command_pool(true); let command_pool = draw_buffer.get_command_pool(); let (q, r) = map.pixel_to_hex_raw(Vec2::zero(), 0.); let startx = (q - 40.0) .max(0.0).min(map.width as f32 - 1.0) as usize; let endx = (q + 40.0) .max(0.0).min(map.width as f32 - 1.0) as usize; let starty = (r - 20.0) .max(0.0).min(map.height as f32 - 1.0) as usize; let endy = (r + 20.0) .max(0.0).min(map.height as f32 - 1.0) as usize; let (top_tex, wall_tex, brick_tex, brick_floor_tex, grown_tex, tilled_tex) = ( drawables.alias[textures::FLOOR], drawables.alias[textures::WALL], drawables.alias[textures::WALL_BRICK], drawables.alias[textures::FLOOR_BRICK], drawables.alias[textures::FLOOR_GROWN], drawables.alias[textures::FLOOR_TILLED], ); for height in 0..=MAX_BRICK_HEIGHT { let mut wall_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut wall_brick_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_brick_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_tilled_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); let mut top_grown_buffer: Vec<DrawCommand> = Vec::with_capacity(1024); for y in starty..=endy { for x in startx..=endx { let tile = &map.tiles[map.width * y + x]; if tile.wall_height < height { continue; } let (draw_x, draw_y) = { let offset_x = (FLOOR_WIDTH / 2.0) * y as f32; let mut x = FLOOR_WIDTH * x as f32; x += offset_x; ( x, (y as i32) as f32 * (FLOOR_VERT_STEP) ) }; let (draw_x, draw_y) = ( draw_x + map.position.x, draw_y + map.position.y, ); if height <= tile.ground_height && height != 0 { render_hex_walls(&mut wall_buffer, draw_x, draw_y, height, wall_tex); } else if height > tile.ground_height && height <= tile.wall_height { render_hex_bricks(&mut wall_brick_buffer, draw_x, draw_y, height, brick_tex); } if tile.is_grown && height == tile.ground_height { render_hex_top(&mut top_grown_buffer, draw_x, draw_y, tile.ground_height, grown_tex, Color::WHITE); } else if tile.is_tilled && height == tile.ground_height { render_hex_top(&mut top_tilled_buffer, draw_x, draw_y, tile.ground_height, tilled_tex, Color::WHITE); } else if height == tile.ground_height && height == tile.wall_height { render_hex_top(&mut top_buffer, draw_x, draw_y, tile.ground_height, top_tex, Color::WHITE); } else if height == tile.wall_height && height != tile.ground_height { render_hex_brick_top(&mut top_brick_buffer, draw_x, draw_y, tile.wall_height, brick_floor_tex, Color::WHITE); } } } command_pool.commands.extend(&wall_buffer); command_pool.commands.extend(&wall_brick_buffer); command_pool.commands.extend(&top_buffer); command_pool.commands.extend(&top_brick_buffer); command_pool.commands.extend(&top_tilled_buffer); command_pool.commands.extend(&top_grown_buffer); } /*let marker_tex = drawables.alias[textures::MARKER]; for y_tile in starty..=endy { for x_tile in startx..=endx { let (x, y) = map.axial_to_pixel(x_tile as i32, y_tile as i32); let tile = &map.tiles[map.width * y_tile + x_tile]; draw_buffer.draw( DrawCommand::new(marker_tex) .position(Vec3::new( x - 2.0, y - 2.0, tile.wall_height as f32 * FLOOR_DEPTH_STEP )) .draw_iso(true) ); } }*/ draw_buffer.end_command_pool(); } pub fn render_hex_top(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, texture: u64, color: Color) { let mut draw_command = create_floor_draw_cmd(x, y, height as f32 * FLOOR_DEPTH_STEP, height, texture); if color != Color::WHITE { draw_command = draw_command.color(color); } draw_buffer.push(draw_command); } fn create_floor_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 0 { let v = 0.55; Color::rgba(v, v, v, 1.0) } else if color == 1 { let v = 0.8; Color::rgba(v, v, v, 1.0) } else { let v = 0.95; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::FLOOR) .draw_iso(true) .color(color) } pub fn render_hex_brick_top(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, texture: u64, color: Color) { let mut draw_command = create_brick_floor_draw_cmd(x, y, height as f32 * FLOOR_DEPTH_STEP, height, texture); if color != Color::WHITE { draw_command = draw_command.color(color); } draw_buffer.push(draw_command); } fn create_brick_floor_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 1 { let v = 0.65; Color::rgba(v, v, v, 1.0) } else if color == 2 { let v = 0.8; Color::rgba(v, v, v, 1.0) } else if color == 3 { let v = 0.9; Color::rgba(v, v, v, 1.0) } else { let v = 1.0; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::FLOOR) .draw_iso(true) .color(color) } pub fn render_hex_walls(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, wall_tex: u64) { let start_height = height as f32 * FLOOR_DEPTH_STEP - WALL_VERT_OFFSET; let color = if height % 2 == 1 { 1 } else { 2 }; draw_buffer.push( create_wall_draw_cmd(x, y, start_height, color, wall_tex) ); } fn create_wall_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 1 { let v = 0.5; Color::rgba(v, v, v, 1.0) } else if color == 2{ let v = 0.7; Color::rgba(v, v, v, 1.0) } else { let v = 1.0; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::WALL) .draw_iso(true) .color(color) } pub fn render_hex_bricks(draw_buffer: &mut Vec<DrawCommand>, x: f32, y: f32, height: u8, brick_tex: u64) { let start_height = height as f32 * FLOOR_DEPTH_STEP - WALL_VERT_STEP; draw_buffer.push( create_wall_brick_draw_cmd(x, y, start_height, height, brick_tex) ); } fn create_wall_brick_draw_cmd(x: f32, y: f32, height: f32, color: u8, texture: u64) -> DrawCommand { let color = if color == 1 { let v = 0.3; Color::rgba(v, v, v, 1.0) } else if color == 2 { let v = 0.55; Color::rgba(v, v, v, 1.0) } else if color == 3 { let v = 0.7; Color::rgba(v, v, v, 1.0) } else { let v = 0.80; Color::rgba(v, v, v, 1.0) }; DrawCommand::new(texture) .position(Vec3::new(x, y, height)) .draw_layer(draw_layers::WALL) .draw_iso(true) .color(color) }
mut qi = q.round() as i32; let mut ri = r.round() as i32; let mut si = s.round() as i32; let q_diff = f64::abs(qi as f64 - q as f64); let r_diff = f64::abs(ri as f64 - r as f64); let s_diff = f64::abs(si as f64 - s as f64); if q_diff > r_diff && q_diff > s_diff { qi = -ri - si; } else if r_diff > s_diff { ri = -qi - si; } else { si = -qi - ri; } (qi, ri, si) }
function_block-function_prefixed
[ { "content": "pub fn player_height_visualiser(player: View<Player>, height: View<Height>, mut sprite: ViewMut<Sprite>) {\n\n let (_, height, sprite) = (&player, &height, &mut sprite).iter().next().unwrap();\n\n let mut percent = height.0 / START_HEIGHT;\n\n percent *= percent;\n\n let start = 1.;\n\n let end = 3.;\n\n let offset = percent * (end - start);\n\n let lerped = start + offset;\n\n sprite.0.scale = Vec2::new(lerped, lerped);\n\n}", "file_path": "guacamole-runner/src/systems.rs", "rank": 12, "score": 112708.39337757818 }, { "content": "pub fn scroll_map(mut map: UniqueViewMut<HexMap>) {\n\n map.position.x -= SCROLL_RATE;\n\n}\n\n\n\npub struct SpawnTimer {\n\n cur: i32,\n\n max: i32,\n\n}\n\n\n\nimpl SpawnTimer {\n\n pub fn new(max: i32) -> Self {\n\n Self {\n\n cur: 0,\n\n max,\n\n }\n\n }\n\n}\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 13, "score": 101369.76501768935 }, { "content": "pub fn player_platform_check(player: View<Player>, transforms: View<Transform>, colliders: View<Collider>, mut heights: ViewMut<Height>) {\n\n let (_, p_transform, p_collider, height) = (&player, &transforms, &colliders, &mut heights).iter().next().unwrap();\n\n height.0 -= FALL_SPEED;\n\n let (p_transform, p_collider) = ((*p_transform).clone(), (*p_collider).clone());\n\n\n\n for (transform, collider, _) in (&transforms, &colliders, !&player).iter() {\n\n if Collider::intersects(collider, transform, &p_collider, &p_transform) {\n\n height.0 = START_HEIGHT;\n\n return;\n\n }\n\n }\n\n}\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 14, "score": 96630.09579848632 }, { "content": "pub fn grow_ground(transforms: View<Transform>, players: View<Player>, mut map: UniqueViewMut<HexMap>, mut points: UniqueViewMut<Points>) {\n\n use crate::map::cube_round;\n\n for (transform, _) in (&transforms, &players).iter() {\n\n let mut pos = Vec2::new(transform.x as f32, transform.y as f32);\n\n pos.y += 18. * 3.;\n\n pos.x += 18. * 3.;\n\n\n\n let adjacent = [\n\n (0, 0),\n\n (1, -1),\n\n (1, 0),\n\n (0, 1),\n\n (-1, 1),\n\n (-1, 0),\n\n (0, -1),\n\n ];\n\n\n\n let (q, r) = map.pixel_to_hex_raw(pos / 2., 0.);\n\n let (q, r, _) = cube_round(q, r, -r - q); \n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 15, "score": 84342.50894262113 }, { "content": "pub fn move_planes(mut transforms: ViewMut<Transform>, planes: View<Plane>) {\n\n for (transform, plane) in (&mut transforms, &planes).iter() {\n\n let movement;\n\n match plane.direction {\n\n Direction::Up => {\n\n movement = Vec2::new(-SCROLL_RATE as f64 * 2., -4.)\n\n }\n\n Direction::Down => {\n\n movement = Vec2::new(-SCROLL_RATE as f64 * 2., 4.)\n\n }\n\n }\n\n\n\n transform.x += movement.x;\n\n transform.y += movement.y;\n\n }\n\n}\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 16, "score": 80644.62676891897 }, { "content": "pub fn platform_spawner(all_storages: AllStoragesViewMut) {\n\n let spawn = all_storages.run(|mut spawn_timer: UniqueViewMut<SpawnTimer>| {\n\n if spawn_timer.cur <= 0 {\n\n spawn_timer.cur = spawn_timer.max;\n\n true \n\n } else {\n\n spawn_timer.cur -= 1;\n\n false\n\n }\n\n });\n\n\n\n if spawn {\n\n use rand::prelude::*;\n\n\n\n let mut rng = rand::thread_rng();\n\n let (x, mut y) = (rng.gen_range(800, 1280), rng.gen_range(0, 2) * 720);\n\n let direction;\n\n let rotation;\n\n let collider;\n\n if y == 0 {\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 17, "score": 72800.52495779181 }, { "content": "pub fn move_player(ctx: UniqueView<InputContext>, players: View<Player>, mut transforms: ViewMut<Transform>) {\n\n let mut movement: Vec2<f32> = Vec2::zero();\n\n\n\n if input::is_key_down(&ctx, Key::Down) {\n\n movement += Vec2::new(-0.5, 2.);\n\n }\n\n if input::is_key_down(&ctx, Key::Up) {\n\n movement += Vec2::new(-0.5, -2.);\n\n }\n\n\n\n if movement == Vec2::zero() {\n\n if input::is_key_down(&ctx, Key::Left) {\n\n movement += Vec2::new(-5., 0.);\n\n }\n\n else if input::is_key_down(&ctx, Key::Right) {\n\n movement += Vec2::new(1., 0.);\n\n }\n\n } else if input::is_key_down(&ctx, Key::Left) {\n\n movement = Vec2::new(-5., 0.);\n\n }\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 18, "score": 71734.77642602562 }, { "content": "fn main() -> tetra::Result {\n\n ContextBuilder::new(\"Guacamole-Runner\", 1280, 720)\n\n .show_mouse(true)\n\n .build()?\n\n .run(|ctx| PushdownAutomaton::new(ctx, Game::new, |_| Ok(())))\n\n}\n\n\n\npub struct Game {\n\n world: World,\n\n background_canvas: Canvas,\n\n text: Text,\n\n}\n\n\n\nimpl Game {\n\n pub fn new(ctx: &mut Context, _: &mut Res) -> tetra::Result<Self> {\n\n let world = World::new();\n\n\n\n let text = Text::new(\"\", Font::vector(ctx, \"./assets/DejaVuSansMono.ttf\", 16.0).unwrap());\n\n\n\n let mut game = Game {\n", "file_path": "guacamole-runner/src/main.rs", "rank": 19, "score": 68000.59091063145 }, { "content": "struct DeadState {\n\n text: Text,\n\n}\n\n\n\nimpl PDAState<Res> for DeadState {\n\n fn update(&mut self, ctx: &mut Context, res: &mut Res) -> tetra::Result<Trans<Res>> {\n\n if input::is_key_down(ctx.input_context(), Key::Space) {\n\n return Ok(Trans::Switch(Box::new(Game::new(ctx, res)?)));\n\n }\n\n\n\n Ok(Trans::None)\n\n }\n\n\n\n fn draw(&mut self, ctx: &mut Context, _resources: &mut Res) -> tetra::Result {\n\n graphics::clear(ctx, Color::rgb(0.45, 0.65, 1.0));\n\n graphics::draw(ctx, &self.text, Vec2::new(400., 300.));\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "guacamole-runner/src/main.rs", "rank": 20, "score": 43398.07154831417 }, { "content": "pub const FLOOR_HEIGHT: f32 = 36.0;\n\npub const FLOOR_VERT_STEP: f32 = 28.0;\n\npub const FLOOR_DEPTH_STEP: f32 = 12.0;\n\n\n\npub const WALL_VERT_OFFSET: f32 = 12.0;\n\npub const WALL_VERT_STEP: f32 = 12.0;\n\n\n\npub const CAM_SPEED: f32 = 5.0;\n\npub const PLAYER_SPEED: f32 = 5.0;\n\n\n\npub const MAX_FLOOR_HEIGHT: u8 = 2;\n\npub const MAX_BRICK_HEIGHT: u8 = 4;\n\n\n\npub const WIDTH: usize = 100000;\n\npub const HEIGHT: usize = 10;\n\n\n\npub const CLEAR_COL: crate::tetra::graphics::Color = crate::tetra::graphics::Color::rgb(0.392, 0.584, 0.929);\n\n\n\npub const SCROLL_RATE: f32 = 4.;\n\n\n\npub const FALL_SPEED: f32 = 0.01;\n\npub const START_HEIGHT: f32 = 1.0;\n\n\n\npub const POINTS_GROW: u32 = 1;", "file_path": "guacamole-runner/src/consts.rs", "rank": 21, "score": 24038.34509557577 }, { "content": "pub mod textures {\n\n pub const FLOOR: &'static str = \"hex-grass\";\n\n pub const FLOOR_BRICK: &'static str = \"hex-stone-floor\";\n\n pub const FLOOR_TILLED: &'static str = \"tilled_floor\";\n\n pub const FLOOR_GROWN: &'static str = \"grown_floor\";\n\n pub const WALL: &'static str = \"hex-dirt\";\n\n pub const WALL_BRICK: &'static str = \"hex-stone\";\n\n pub const MARKER: &'static str = \"marker\";\n\n pub const PLAYER: &'static str = \"player\";\n\n pub const AEROPLANE: &'static str = \"aeroplane\";\n\n}\n\n\n\npub mod draw_layers {\n\n pub const PLAYER: f32 = 10.0;\n\n pub const PLANE: f32 = 5.0;\n\n pub const WALL: f32 = 1.0;\n\n pub const FLOOR: f32 = 0.0;\n\n}\n\n\n\npub const FLOOR_WIDTH: f32 = 36.0;\n", "file_path": "guacamole-runner/src/consts.rs", "rank": 22, "score": 24025.438341235516 }, { "content": " for (q_mod, r_mod) in &adjacent {\n\n let r = r + r_mod;\n\n let q = q + q_mod;\n\n\n\n if q >= WIDTH as i32 || q < 0 || r >= HEIGHT as i32 || r < 0 {\n\n continue;\n\n }\n\n\n\n if let Some(tile) = map.tiles.get_mut((r * WIDTH as i32 + q) as usize) {\n\n if tile.is_tilled && !tile.is_grown{\n\n tile.is_grown = true;\n\n points.0 += POINTS_GROW;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 33, "score": 14.701082361103637 }, { "content": "\n\n#[derive(Clone)]\n\npub struct Collider {\n\n x: i32,\n\n y: i32,\n\n width: u32,\n\n height: u32,\n\n}\n\n\n\nimpl Collider {\n\n pub fn new(x: i32, y: i32, width: u32, height: u32) -> Self {\n\n Self {\n\n x,\n\n y,\n\n width,\n\n height,\n\n }\n\n }\n\n\n\n pub fn intersects(this: &Collider, this_pos: &Transform, other: &Collider, other_pos: &Transform) -> bool {\n", "file_path": "guacamole-runner/src/components.rs", "rank": 34, "score": 14.12319057766602 }, { "content": " pub fn ymin(&self, pos: &Transform) -> i32 {\n\n self.y + pos.y as i32\n\n }\n\n\n\n pub fn ymax(&self, pos: &Transform) -> i32 {\n\n self.y + self.height as i32 + pos.y as i32\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Height(pub f32);\n\n\n\npub struct Points(pub u32);\n\n\n\nimpl Points {\n\n pub fn new() -> Self {\n\n Self(0)\n\n }\n\n}", "file_path": "guacamole-runner/src/components.rs", "rank": 35, "score": 12.312338898635819 }, { "content": "use crate::{\n\n shipyard::{\n\n *,\n\n },\n\n tetra::{\n\n InputContext,\n\n input::{\n\n self,\n\n Key,\n\n },\n\n math::{\n\n Vec2,\n\n },\n\n },\n\n consts::{\n\n *,\n\n },\n\n map::{\n\n HexMap,\n\n },\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 36, "score": 12.170678349882857 }, { "content": "use vermarine_lib::{\n\n rendering::{\n\n RenderingWorkloadCreator,\n\n RenderingWorkloadSystems,\n\n draw_buffer::{\n\n DrawBuffer,\n\n DrawCommand,\n\n },\n\n Drawables,\n\n Sprite,\n\n },\n\n components::{\n\n Transform,\n\n },\n\n tetra::{\n\n self,\n\n ContextBuilder,\n\n Context,\n\n graphics::{\n\n Color,\n", "file_path": "guacamole-runner/src/main.rs", "rank": 38, "score": 10.47122647965988 }, { "content": " .origin(Vec2::new(20., 18.))\n\n ))\n\n .with(Transform::new(200., 360.))\n\n .with(Player {})\n\n .with(Collider::new(-20 * 3, -8 * 3, 36 * 3, 16 * 3))\n\n .with(Height(START_HEIGHT))\n\n .build();\n\n }\n\n\n\n fn draw_background(&mut self, ctx: &mut Context) {\n\n graphics::set_canvas(ctx, &self.background_canvas);\n\n graphics::clear(ctx, CLEAR_COL);\n\n\n\n self.world.run(render_hex_map);\n\n self.world.run_with_data(DrawBuffer::flush, ctx);\n\n graphics::flush(ctx);\n\n graphics::reset_canvas(ctx);\n\n\n\n graphics::clear(ctx, CLEAR_COL);\n\n\n", "file_path": "guacamole-runner/src/main.rs", "rank": 40, "score": 9.401055795052274 }, { "content": "use vermarine_lib::components::Transform;\n\n\n\npub struct Player();\n\n\n\npub enum Direction {\n\n Up,\n\n Down,\n\n}\n\n\n\npub struct Plane {\n\n pub direction: Direction, \n\n}\n\n\n\nimpl Plane {\n\n pub fn new(direction: Direction) -> Self {\n\n Self {\n\n direction,\n\n }\n\n }\n\n}\n", "file_path": "guacamole-runner/src/components.rs", "rank": 42, "score": 8.480110531538559 }, { "content": " .with_rendering_systems()\n\n .build();\n\n\n\n self.world.run(|mut camera: UniqueViewMut<Camera>| {\n\n camera.position = Vec2::new(640., 360.);\n\n });\n\n\n\n let (player_tex, _) = self.world.run(|drawables: NonSendSync<UniqueView<Drawables>>| {\n\n (\n\n drawables.alias[textures::PLAYER],\n\n drawables.alias[textures::AEROPLANE],\n\n )\n\n });\n\n\n\n self.world\n\n .entity_builder()\n\n .with(Sprite::from_command(\n\n DrawCommand::new(player_tex)\n\n .scale(Vec2::new(3., 3.))\n\n .draw_layer(draw_layers::PLAYER)\n", "file_path": "guacamole-runner/src/main.rs", "rank": 43, "score": 7.711274632714248 }, { "content": "\n\nimpl DeadState {\n\n pub fn new(ctx: &mut Context, points: u32, distance: u32) -> tetra::Result<Self> {\n\n Ok(Self {\n\n text: Text::new(\n\n\n\nformat!(\n\n\"\n\n You landed with {} points with a distance of {}\n\n Press <SPACEBAR> to restart\n\n\", points, distance),\n\n Font::vector(ctx, \"./assets/DejaVuSansMono.ttf\", 16.0)?\n\n )\n\n })\n\n }\n\n}", "file_path": "guacamole-runner/src/main.rs", "rank": 44, "score": 7.144909625008612 }, { "content": " components::{\n\n Player,\n\n Plane,\n\n Direction,\n\n Collider,\n\n Height,\n\n Points,\n\n },\n\n};\n\n\n\nuse vermarine_lib::{\n\n components::{\n\n Transform,\n\n },\n\n rendering::{\n\n Sprite,\n\n Drawables,\n\n draw_buffer::{\n\n DrawCommand,\n\n },\n\n },\n\n};\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 45, "score": 6.9010258952312356 }, { "content": " graphics::draw(ctx, &self.background_canvas, \n\n DrawParams::new()\n\n .scale(Vec2::new(2., 2.))\n\n );\n\n graphics::flush(ctx);\n\n }\n\n}\n\n\n\nimpl PDAState<Res> for Game {\n\n fn update(&mut self, ctx: &mut Context, _res: &mut Res) -> tetra::Result<Trans<Res>> {\n\n let input_ctx = ctx.input_context();\n\n self.world.run(|mut ctx: UniqueViewMut<InputContext>| {\n\n *ctx = (*input_ctx).clone();\n\n });\n\n\n\n self.world.run(systems::scroll_map);\n\n self.world.run(systems::move_player);\n\n self.world.run(systems::platform_spawner);\n\n self.world.run(systems::move_planes);\n\n self.world.run(systems::grow_ground);\n", "file_path": "guacamole-runner/src/main.rs", "rank": 46, "score": 6.150378636561679 }, { "content": " Camera,\n\n self,\n\n Canvas,\n\n DrawParams,\n\n text::{\n\n Text,\n\n Font,\n\n }\n\n },\n\n input::{\n\n self,\n\n InputContext,\n\n Key,\n\n },\n\n math::{\n\n Vec2,\n\n Mat4,\n\n },\n\n },\n\n shipyard::{\n", "file_path": "guacamole-runner/src/main.rs", "rank": 47, "score": 5.8337212316951 }, { "content": " if (this.xmin(this_pos) >= other.xmin(other_pos) && this.xmin(this_pos) <= other.xmax(other_pos)) ||\n\n (this.xmax(this_pos) >= other.xmin(other_pos) && this.xmax(this_pos) <= other.xmax(other_pos)) || \n\n (this.xmin(this_pos) <= other.xmin(other_pos) && this.xmax(this_pos) >= other.xmax(other_pos)) {\n\n if (this.ymin(this_pos) >= other.ymin(other_pos) && this.ymin(this_pos) <= other.ymax(other_pos)) ||\n\n (this.ymax(this_pos) >= other.ymin(other_pos) && this.ymax(this_pos) <= other.ymax(other_pos)) || \n\n (this.ymin(this_pos) <= other.ymin(other_pos) && this.ymax(this_pos) >= other.ymax(other_pos)) {\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n\n\n pub fn xmin(&self, pos: &Transform) -> i32 {\n\n self.x + pos.x as i32\n\n }\n\n\n\n pub fn xmax(&self, pos: &Transform) -> i32 {\n\n self.x + self.width as i32 + pos.x as i32\n\n }\n\n\n", "file_path": "guacamole-runner/src/components.rs", "rank": 48, "score": 5.638525724693191 }, { "content": " fn draw(&mut self, ctx: &mut Context, _res: &mut Res) -> tetra::Result {\n\n self.world.run(|mut draw_buff: UniqueViewMut<DrawBuffer>| {\n\n draw_buff.transform_mat = Mat4::identity();\n\n });\n\n\n\n self.draw_background(ctx);\n\n\n\n self.world.run(|mut camera: UniqueViewMut<Camera>, mut draw_buff: UniqueViewMut<DrawBuffer>| {\n\n camera.update();\n\n draw_buff.transform_mat = camera.as_matrix();\n\n });\n\n\n\n self.world.run_workload(\"Rendering\");\n\n self.world.run_with_data(DrawBuffer::flush, ctx);\n\n\n\n self.world.run_with_data(|text: &mut Text, points: UniqueView<Points>| {\n\n text.set_content(format!(\"Points: {}\", points.0))\n\n }, &mut self.text);\n\n graphics::draw(ctx, &self.text, Vec2::new(40., 20.));\n\n\n", "file_path": "guacamole-runner/src/main.rs", "rank": 49, "score": 5.625422152746414 }, { "content": "mod systems;\n\n#[allow(dead_code)]\n\nmod consts;\n\nmod map;\n\nmod components;\n\n\n\nuse components::{\n\n Player,\n\n Height,\n\n Collider,\n\n Points,\n\n};\n\n\n\nuse map::{\n\n render_hex_map,\n\n HexMap,\n\n};\n\n\n\nuse consts::*;\n\n\n", "file_path": "guacamole-runner/src/main.rs", "rank": 50, "score": 5.613326627057319 }, { "content": " world,\n\n background_canvas: Canvas::new(ctx, 640, 360)\n\n .expect(\"Could not make canvas\"),\n\n text,\n\n };\n\n\n\n game.init_world(ctx);\n\n\n\n Ok(game)\n\n }\n\n\n\n fn init_world(&mut self, ctx: &mut Context) {\n\n self.world.add_unique(map::HexMap::new(WIDTH, HEIGHT));\n\n self.world.add_unique((*ctx.input_context()).clone());\n\n self.world.add_unique(systems::SpawnTimer::new(70));\n\n self.world.add_unique(Points::new());\n\n self.world.add_unique_non_send_sync(Drawables::new(ctx).unwrap());\n\n\n\n self.world\n\n .add_rendering_workload(ctx)\n", "file_path": "guacamole-runner/src/main.rs", "rank": 52, "score": 5.1358501457595676 }, { "content": " self.world.run(systems::player_platform_check);\n\n self.world.run(systems::player_height_visualiser);\n\n\n\n let trans = self.world.run(|player: View<Player>, height: View<Height>| {\n\n let (_, height) = (&player, &height).iter().next().unwrap();\n\n if height.0 <= 0. {\n\n let trans = self.world.run(|points: UniqueView<Points>, map: UniqueView<HexMap>| {\n\n let x = -map.position.x / FLOOR_WIDTH;\n\n let trans = Trans::Replace(Box::new(DeadState::new(ctx, points.0, x as u32).unwrap()));\n\n trans\n\n });\n\n trans\n\n } else {\n\n Trans::None\n\n }\n\n });\n\n\n\n Ok(trans)\n\n }\n\n\n", "file_path": "guacamole-runner/src/main.rs", "rank": 53, "score": 5.131973610864078 }, { "content": "### ARROW KEYS TO MOVE\n\n\n\n### [Guacamole Runner][guacamole]\n\n\n\n![Gameplay image](/guacamole.gif)\n\n\n\n[Guacamole Runner][guacamole] is a small game made with\n\n[Tetra][tetra] and [Shipyard][shipyard] in approximately 2 days by [@EllenNyan][twitter]. \n\nThe game's concept is that the player is constantly falling \n\nand must jump off planes to stay in the air. \n\nWhen they go over the top of the dirt tiles \n\nthey plant flowers which gives them points.\n\n\n\n[twitter]: https://twitter.com/EllenNyan0214\n\n[tetra]: https://github.com/17cupsofcoffee/Tetra\n\n[shipyard]: https://github.com/leudz/shipyard\n\n[guacamole]: https://github.com/EllenNyan/guacamole-runner\n\n\n\n### Building\n\nTo build Guacamole Runner you should follow [Tetra's installation guide][tetra_install]. \n\nClone this repository recursively. \n\nRun ``cargo run --release``. \n\n\n\n[tetra_install]: https://tetra.seventeencups.net/installation/\n", "file_path": "README.md", "rank": 54, "score": 5.0686694877123575 }, { "content": " y = -36;\n\n direction = Direction::Down;\n\n rotation = std::f32::consts::PI;\n\n collider = Collider::new(-32 * 2, -10 * 2, 64 * 2, 26 * 2);\n\n } else {\n\n direction = Direction::Up;\n\n rotation = 0.;\n\n y += 36;\n\n collider = Collider::new(-32 * 2, -16 * 2, 64 * 2, 26 * 2);\n\n }\n\n\n\n let tex = all_storages.run(|drawables: NonSendSync<UniqueView<Drawables>>| {\n\n drawables.alias[textures::AEROPLANE]\n\n });\n\n\n\n all_storages\n\n .entity_builder()\n\n .with(Transform::new(x as f64, y as f64))\n\n .with(Sprite::from_command(\n\n DrawCommand::new(tex)\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 55, "score": 3.4275863200904517 }, { "content": "\n\n if movement != Vec2::new(0.0, 0.0) {\n\n movement *= PLAYER_SPEED;\n\n movement.x = movement.x.floor();\n\n movement.y = movement.y.floor(); \n\n }\n\n\n\n if let Some((_, transform)) = (&players, &mut transforms).iter().next() {\n\n transform.x += movement.x as f64;\n\n transform.y += movement.y as f64;\n\n\n\n transform.x = transform.x.max(-8. + -72. + (20. * 3.)).min(1240. - 62. + (20. * 3.));\n\n transform.y = transform.y.max(-35. + (18. * 3.)).min(647. + (18. * 3.));\n\n }\n\n}\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 56, "score": 2.7211614730784737 }, { "content": " tetra::window::set_title(\n\n ctx,\n\n &format!(\n\n \"Guacamole-Runner - {:.0} FPS\",\n\n tetra::time::get_fps(ctx)\n\n ),\n\n );\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n\n", "file_path": "guacamole-runner/src/main.rs", "rank": 57, "score": 2.4803643773094066 }, { "content": " .scale(Vec2::new(2., 2.))\n\n .draw_layer(draw_layers::PLANE)\n\n .rotation(rotation)\n\n .origin(Vec2::new(36., 36.))\n\n ))\n\n .with(Plane::new(direction))\n\n .with(collider)\n\n .build();\n\n }\n\n}\n\n\n", "file_path": "guacamole-runner/src/systems.rs", "rank": 58, "score": 2.3039196453791515 } ]
Rust
src/grammar/grammar.rs
zuyumi/compiler-course-helper
093b0d37073703d8e87e29e140a7bec29008c93b
use std::collections::{HashMap, HashSet}; #[derive(Debug, Clone)] pub struct NonTerminal { pub index: usize, pub name: String, pub first: HashSet<usize>, pub follow: HashSet<usize>, pub nullable: bool, pub productions: Vec<Vec<usize>>, } impl NonTerminal { pub fn new(index: usize, name: String) -> Self { Self { index, name, first: HashSet::new(), follow: HashSet::new(), nullable: false, productions: Vec::new(), } } } #[derive(Debug, Clone)] pub enum Symbol { NonTerminal(NonTerminal), Terminal(String), } impl Symbol { pub fn non_terminal(&self) -> Option<&NonTerminal> { match self { Symbol::NonTerminal(e) => Some(e), Symbol::Terminal(_) => None, } } pub fn mut_non_terminal(&mut self) -> Option<&mut NonTerminal> { match self { Symbol::NonTerminal(e) => Some(e), Symbol::Terminal(_) => None, } } } #[derive(Debug, Clone)] pub struct Grammar { valid_nullable_first_follow: bool, pub symbols: Vec<Symbol>, pub symbol_table: HashMap<String, usize>, pub start_symbol: Option<usize>, } impl Grammar { pub fn new() -> Self { let mut g = Self { valid_nullable_first_follow: false, symbols: Vec::new(), symbol_table: HashMap::new(), start_symbol: None, }; let e_idx = g.add_non_terminal(super::EPSILON); g.symbols[e_idx].mut_non_terminal().unwrap().nullable = true; g.symbol_table.insert("ε".to_string(), e_idx); g.add_terminal(super::END_MARK.to_string()); g } pub fn get_symbol_by_name(&self, name: &str) -> &Symbol { &self.symbols[self.symbol_table[name]] } pub fn terminal_iter(&self) -> impl Iterator<Item = &String> { self.symbols.iter().filter_map(|s| { if let Symbol::Terminal(name) = s { Some(name) } else { None } }) } pub fn non_terminal_iter(&self) -> impl Iterator<Item = &NonTerminal> { self.symbols.iter().filter_map(|s| s.non_terminal()).skip(1) } pub fn non_terminal_iter_mut(&mut self) -> impl Iterator<Item = &mut NonTerminal> { self.symbols .iter_mut() .filter_map(|s| s.mut_non_terminal()) .skip(1) } pub fn get_symbol_index(&self, name: &str) -> Option<usize> { self.symbol_table.get(name).cloned() } pub fn add_non_terminal(&mut self, name: &str) -> usize { let idx = self.symbols.len(); self.symbols .push(Symbol::NonTerminal(NonTerminal::new(idx, name.to_string()))); self.symbol_table.insert(name.to_string(), idx); idx } pub fn add_terminal(&mut self, name: String) -> usize { let idx = self.symbols.len(); self.symbols.push(Symbol::Terminal(name.clone())); self.symbol_table.insert(name, idx); idx } pub fn add_production(&mut self, left: usize, right: Vec<usize>) { self.symbols[left] .mut_non_terminal() .unwrap() .productions .push(right); } pub fn get_symbol_name(&self, index: usize) -> &str { match &self.symbols[index] { Symbol::NonTerminal(e) => e.name.as_str(), Symbol::Terminal(e) => e.as_str(), } } pub fn get_symbol_prime_name(&self, mut name: String) -> String { while self.symbol_table.contains_key(&name) { name.push('\''); } name } pub fn invalidate_nullable_first_follow(&mut self) { self.valid_nullable_first_follow = false; self.reset_nullable_first_follow(); } pub fn is_nullable_first_follow_valid(&self) -> bool { self.valid_nullable_first_follow } pub fn validate_nullable_first_follow(&mut self) { self.valid_nullable_first_follow = true; } pub fn production_to_vec_str(&self, production: &Vec<usize>) -> Vec<&str> { production .iter() .map(|idx| self.get_symbol_name(*idx)) .collect() } }
use std::collections::{HashMap, HashSet}; #[derive(Debug, Clone)] pub struct NonTerminal { pub index: usize, pub name: String, pub first: HashSet<usize>, pub follow: HashSet<usize>, pub nullable: bool, pub productions: Vec<Vec<usize>>, } impl NonTerminal { pub fn new(index: usize, name: String) -> Self { Self { index, name, first: HashSet::new(), follow: HashSet::new(), nullable: false, productions: Vec::new(), } } } #[derive(Debug, Clone)] pub enum Symbol { NonTerminal(NonTerminal), Terminal(String), } impl Symbol { pub fn non_terminal(&self) -> Option<&NonTerminal> { match self { Symbol::NonTerminal(e) => Some(e), Symbol::Terminal(_) => None, } } pub fn mut_non_terminal(&mut self) -> Option<&mut NonTerminal> { match self { Symbol::NonTerminal(e) => Some(e), Symbol::Terminal(_) => None, } } } #[derive(Debug, Clone)] pub struct Grammar { valid_nullable_first_follow: bool, pub symbols: Vec<Symbol>, pub symbol_table: HashMap<String, usize>, pub start_symbol: Option<usize>, } impl Grammar { pub fn new() -> Self { let mut g = Self { valid_nullable_first_follow: false, symbols: Vec::new(), symbol_table: HashMap::new(), start_symbol: None, }; let e_idx = g.add_non_terminal(super::EPSILON); g.symbols[e_idx].mut_non_terminal().unwrap().nullable = true; g.symbol_table.insert("ε".to_string(), e_idx); g.add_terminal(super::END_MARK.to_string()); g } pub fn get_symbol_by_name(&self, name: &str) -> &Symbol { &self.symbols[self.symbol_table[name]] } pub fn terminal_iter(&self) -> impl Iterator<Item = &String> { self.symbols.iter().filter_map(|s| { if let Symbol::Terminal(name) = s { Some(name) } else { None } }) } pub fn non_terminal_iter(&self) -> impl Iterator<Item = &NonTerminal> { self.symbols.iter().filter_map(|s| s.non_terminal()).skip(1) } pub fn non_terminal_iter_mut(&mut self) -> impl Iterator<Item = &mut NonTerminal> { self.symbols .iter_mut() .filter_map(|s| s.mut_non_terminal()) .skip(1) } pub fn get_symbol_index(&self, name: &str) -> Option<usize> { self.symbol_table.get(name).cloned() } pub fn add_non_terminal(&mut self, name: &str) -> usize { let idx = self.symbols.len(); self.symbols .push(Symbol::NonTerminal(NonTerminal::new(idx, name.to_string()))); self.symbol_table.insert(name.to_string(), idx); idx } pub fn add_terminal(&mut self, name: String) -> usize { let idx = self.symbols.len(); self.symbols.push(Symbol::Terminal(name.clone())); self.symbol_table.insert(name, idx); idx } pub fn add_production(&mut self, left: usize, right: Vec<usize>) { self.symbols[left] .mut_non_terminal() .unwrap() .productions .push(right); } pub fn get_symbol_name(&self, index: usize) -> &str { match &self.symbols[index] { Symbol::NonTerminal(e) => e.name.as_str(), Symbol::Terminal(e) => e.as_str(), } } pub fn get_symbol_prime_name(&self, mut name: String) -> String { while self.symbol_table.contains_key(&name) { name.push('\''); } name } pub fn invalidate_nullable_first_follow(&mut self) { self.valid_nullable_first_follow = false; self.reset_nullable_first_follow(); } pub fn is_nullable_first_follow_valid(&self) -> bool { self.valid_nullable_first_follow } pub fn validate_nullable_first_follow(&mut self) { self.valid_nullable_first_follow = true; }
}
pub fn production_to_vec_str(&self, production: &Vec<usize>) -> Vec<&str> { production .iter() .map(|idx| self.get_symbol_name(*idx)) .collect() }
function_block-full_function
[ { "content": "#[wasm_bindgen]\n\npub fn wasm_grammar_to_output(json: &str) -> String {\n\n let args: WasmArgs = serde_json::from_str(json).unwrap();\n\n let result = grammar_to_output(&args.grammar, &args.actions, &args.outputs);\n\n serde_json::to_string(&result).unwrap()\n\n}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize)]\n\npub enum Action {\n\n EliminateLeftRecursion,\n\n}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize)]\n\npub enum Format {\n\n Plain,\n\n LaTeX,\n\n JSON,\n\n}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize)]\n\npub enum Output {\n", "file_path": "src/lib.rs", "rank": 0, "score": 105170.03309091684 }, { "content": " production.iter().all(|s| match &self.symbols[*s] {\n\n Symbol::Terminal(_) => false,\n\n Symbol::NonTerminal(e) => e.nullable,\n\n })\n\n })\n\n }\n\n };\n\n\n\n if nullable {\n\n self.symbols[i].mut_non_terminal().unwrap().nullable = true;\n\n changed = true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn calculate_first_for_production(&self, production: &[usize]) -> HashSet<usize> {\n\n let mut first: HashSet<usize> = HashSet::new();\n\n for (idx, symbol) in production.iter().map(|i| (*i, &self.symbols[*i])) {\n\n match symbol {\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 1, "score": 84242.24467947884 }, { "content": "use std::collections::HashSet;\n\n\n\nuse super::{grammar::Symbol, Grammar, END_MARK};\n\n\n\nimpl Grammar {\n\n pub fn calculate_nullable_first_follow(&mut self) {\n\n if let Some(start_idx) = self.start_symbol {\n\n self.symbols[start_idx]\n\n .mut_non_terminal()\n\n .unwrap()\n\n .follow\n\n .insert(self.symbol_table[END_MARK]);\n\n self.calculate_nullable();\n\n self.calculate_first();\n\n self.calculate_follow();\n\n\n\n self.validate_nullable_first_follow();\n\n }\n\n }\n\n\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 2, "score": 84241.41573663833 }, { "content": " }\n\n\n\n fn calculate_follow(&mut self) {\n\n let mut changed = true;\n\n while changed {\n\n changed = false;\n\n for i in 0..self.symbols.len() {\n\n if let Symbol::Terminal(_) = self.symbols[i] {\n\n continue;\n\n }\n\n\n\n let productions = self.symbols[i].non_terminal().unwrap().productions.clone();\n\n for production in productions {\n\n let mut first: HashSet<usize> = HashSet::new();\n\n let mut left_follow =\n\n Some(self.symbols[i].non_terminal().unwrap().follow.clone());\n\n\n\n for i in (0..production.len()).rev() {\n\n match &mut self.symbols[production[i]] {\n\n Symbol::Terminal(_) => {\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 3, "score": 84239.57113772481 }, { "content": " }\n\n }\n\n\n\n pub fn calculate_follow_for_production(&self, production: &Vec<usize>) -> HashSet<usize> {\n\n let mut follow = HashSet::new();\n\n for idx in production.iter().rev() {\n\n match &self.symbols[*idx] {\n\n Symbol::Terminal(_) => {\n\n follow.insert(*idx);\n\n break;\n\n }\n\n Symbol::NonTerminal(nt) => {\n\n follow.extend(nt.follow.iter().cloned());\n\n if !nt.nullable {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n follow\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 4, "score": 84239.32652011527 }, { "content": " pub fn reset_nullable_first_follow(&mut self) {\n\n for nt in self.non_terminal_iter_mut() {\n\n nt.nullable = false;\n\n nt.first = HashSet::new();\n\n nt.follow = HashSet::new();\n\n }\n\n }\n\n\n\n fn calculate_nullable(&mut self) {\n\n let mut changed = true;\n\n while changed {\n\n changed = false;\n\n for i in 0..self.symbols.len() {\n\n let nullable: bool = match &self.symbols[i] {\n\n Symbol::Terminal(_) => continue,\n\n Symbol::NonTerminal(nt) => {\n\n if nt.nullable {\n\n continue;\n\n }\n\n nt.productions.iter().any(|production| {\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 5, "score": 84238.14333068431 }, { "content": " let first: HashSet<usize> = match &self.symbols[i] {\n\n Symbol::Terminal(_) => continue,\n\n Symbol::NonTerminal(nt) => {\n\n nt.productions\n\n .iter()\n\n .fold(HashSet::new(), |mut first, production| {\n\n first.extend(\n\n self.calculate_first_for_production(production).into_iter(),\n\n );\n\n first\n\n })\n\n }\n\n };\n\n\n\n let nt = self.symbols[i].mut_non_terminal().unwrap();\n\n if nt.first.len() != first.len() {\n\n changed = true;\n\n nt.first = first;\n\n }\n\n }\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 6, "score": 84232.59220189748 }, { "content": " first = HashSet::new();\n\n first.insert(production[i]);\n\n left_follow = None;\n\n }\n\n Symbol::NonTerminal(nt) => {\n\n let len = nt.follow.len();\n\n\n\n if let Some(left_follow) = &left_follow {\n\n nt.follow.extend(left_follow.iter().cloned());\n\n }\n\n nt.follow.extend(first.iter().cloned());\n\n changed |= len != nt.follow.len();\n\n\n\n if !nt.nullable {\n\n first = nt.first.clone();\n\n left_follow = None;\n\n } else {\n\n first.extend(nt.first.iter().cloned());\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 7, "score": 84232.28031021214 }, { "content": " Symbol::Terminal(_) => {\n\n first.insert(idx);\n\n break;\n\n }\n\n Symbol::NonTerminal(nt) => {\n\n first.extend(nt.first.iter().cloned());\n\n if !nt.nullable {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n first\n\n }\n\n\n\n fn calculate_first(&mut self) {\n\n let mut changed = true;\n\n while changed {\n\n changed = false;\n\n for i in 0..self.symbols.len() {\n", "file_path": "src/grammar/nullable_first_follow.rs", "rank": 8, "score": 84230.04478405757 }, { "content": "fn production_right_to_latex<'a>(\n\n production: impl Iterator<Item = &'a str>,\n\n terminal_set: &HashSet<&str>,\n\n) -> String {\n\n production\n\n .map(|s| {\n\n if terminal_set.contains(s) {\n\n format!(\"\\\\text{{{}}}\", escape::tex(s))\n\n } else {\n\n escape::tex(s).to_string()\n\n }\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\" \\\\ \")\n\n .replace(super::EPSILON, \"\\\\epsilon\")\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct ProductionOutput<'a> {\n\n pub left: &'a str,\n", "file_path": "src/grammar/pretty_print.rs", "rank": 9, "score": 83720.11330314579 }, { "content": "pub fn grammar_to_output(\n\n grammar: &str,\n\n actions: &[Action],\n\n outputs: &[Output],\n\n) -> Result<Vec<Result<String, String>>, String> {\n\n let mut ret: Vec<Result<String, String>> = Vec::new();\n\n\n\n let mut g = match Grammar::parse(grammar) {\n\n Ok(g) => g,\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n };\n\n\n\n for action in actions {\n\n match action {\n\n Action::EliminateLeftRecursion => g.eliminate_left_recursion(),\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 10, "score": 74785.47396929943 }, { "content": "#[derive(Serialize)]\n\nstruct NonTerminalOutput<'a> {\n\n name: &'a str,\n\n nullable: bool,\n\n first: Vec<&'a str>,\n\n follow: Vec<&'a str>,\n\n}\n\n\n\nimpl NonTerminalOutput<'_> {\n\n fn to_plaintext(&self) -> String {\n\n format!(\n\n \"{} | {} | {} | {}\",\n\n self.name,\n\n self.nullable,\n\n self.first.join(\", \"),\n\n self.follow.join(\", \")\n\n )\n\n }\n\n fn to_latex(&self) -> String {\n\n fn f(a: &Vec<&str>) -> String {\n\n a.iter()\n", "file_path": "src/grammar/pretty_print.rs", "rank": 11, "score": 41982.30876001238 }, { "content": "use std::collections::{HashMap, HashSet};\n\n\n\nuse super::{grammar::NonTerminal, Grammar, EPSILON};\n\n\n\nimpl Grammar {\n\n pub fn eliminate_left_recursion(&mut self) {\n\n if !self.is_nullable_first_follow_valid() {\n\n self.calculate_nullable_first_follow();\n\n }\n\n\n\n let epsilon_idx = self.get_symbol_index(EPSILON).unwrap();\n\n let offset = self.symbols.len();\n\n\n\n let mut non_terminals = self.non_terminal_iter_mut().collect::<Vec<_>>();\n\n let map: HashMap<usize, usize> =\n\n non_terminals\n\n .iter()\n\n .enumerate()\n\n .fold(HashMap::new(), |mut map, (i, nt)| {\n\n map.insert(nt.index, i);\n", "file_path": "src/grammar/eliminate_left_recursion.rs", "rank": 12, "score": 38694.857672171245 }, { "content": " }\n\n\n\n for mut nt in new_non_terminals {\n\n nt.name = self.get_symbol_prime_name(nt.name);\n\n self.symbol_table.insert(nt.name.clone(), nt.index);\n\n self.symbols.push(super::grammar::Symbol::NonTerminal(nt));\n\n }\n\n\n\n self.invalidate_nullable_first_follow();\n\n self.calculate_nullable_first_follow();\n\n }\n\n}\n", "file_path": "src/grammar/eliminate_left_recursion.rs", "rank": 13, "score": 38689.883525911755 }, { "content": " }\n\n\n\n if recursive_productions.len() > 0 {\n\n let nt_prime_idx = offset + new_non_terminals.len();\n\n for production in &mut nt.productions {\n\n production.push(nt_prime_idx);\n\n }\n\n for production in &mut recursive_productions {\n\n production.push(nt_prime_idx);\n\n }\n\n recursive_productions.push(vec![epsilon_idx]);\n\n new_non_terminals.push(NonTerminal {\n\n index: nt_prime_idx,\n\n nullable: false,\n\n name: nt.name.clone(),\n\n first: HashSet::new(),\n\n follow: HashSet::new(),\n\n productions: recursive_productions,\n\n });\n\n }\n", "file_path": "src/grammar/eliminate_left_recursion.rs", "rank": 14, "score": 38685.5058429721 }, { "content": " map\n\n });\n\n\n\n let mut new_non_terminals: Vec<NonTerminal> = Vec::new();\n\n\n\n for i in 0..non_terminals.len() {\n\n let (replace, b) = non_terminals.split_at_mut(i);\n\n let (nt, _) = b.split_first_mut().unwrap();\n\n let replace = &replace[..];\n\n\n\n let old_productions = std::mem::replace(&mut nt.productions, Vec::new());\n\n let mut recursive_productions: Vec<Vec<usize>> = Vec::new();\n\n for mut production in old_productions {\n\n if let Some(idx) = production.first() {\n\n if let Some(&arr_idx) = map.get(idx) {\n\n match arr_idx.cmp(&i) {\n\n std::cmp::Ordering::Less => {\n\n for prefix in &replace[arr_idx].productions {\n\n let new_production =\n\n prefix.iter().chain(production.iter().skip(1)).cloned();\n", "file_path": "src/grammar/eliminate_left_recursion.rs", "rank": 15, "score": 38684.36450623644 }, { "content": "\n\n if Some(&nt.index) == prefix.first() {\n\n recursive_productions.push(new_production.skip(1).collect())\n\n } else {\n\n nt.productions.push(new_production.collect())\n\n }\n\n }\n\n }\n\n std::cmp::Ordering::Equal => {\n\n production.remove(0);\n\n recursive_productions.push(production);\n\n }\n\n std::cmp::Ordering::Greater => {\n\n nt.productions.push(production);\n\n }\n\n };\n\n } else {\n\n nt.productions.push(production);\n\n }\n\n }\n", "file_path": "src/grammar/eliminate_left_recursion.rs", "rank": 16, "score": 38673.655154224994 }, { "content": "fn main() {\n\n let mut actions: Vec<Action> = Vec::new();\n\n let mut outputs: Vec<Output> = Vec::new();\n\n let args = std::env::args().skip(1).collect::<Vec<String>>();\n\n\n\n let action_map: HashMap<&str, Action> = [(\"elf\", Action::EliminateLeftRecursion)]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n let output_map: HashMap<&str, Output> = [\n\n (\"prod\", Output::Production(Format::Plain)),\n\n (\"nff\", Output::NonTerminal(Format::Plain)),\n\n (\"ll1\", Output::LL1ParsingTable(Format::Plain)),\n\n (\"lr0fsm\", Output::LRFSM(LRFSMType::LR0, Format::Plain)),\n\n (\"lr1fsm\", Output::LRFSM(LRFSMType::LR1, Format::Plain)),\n\n (\"lalrfsm\", Output::LRFSM(LRFSMType::LALR, Format::Plain)),\n\n (\n\n \"lr0table\",\n\n Output::LRParsingTable(LRFSMType::LR0, Format::Plain),\n\n ),\n", "file_path": "src/main.rs", "rank": 17, "score": 34079.230863228106 }, { "content": "fn print_help() {\n\n println!(\"Usage: compiler-course-helper [action]... output... [option] [grammar file]\");\n\n println!(\"action:\");\n\n println!(\" elf: Eliminate left recursion\");\n\n println!(\"output:\");\n\n println!(\" prod: Productions\");\n\n println!(\" nff: Nullable first and follow\");\n\n println!(\" ll1: LL(1) parsing table\");\n\n println!(\" lr0fsm: LR(0) Automata\");\n\n println!(\" lr1fsm: LR(1) Automata\");\n\n println!(\" lalrfsm: LALR Automata\");\n\n println!(\" lr0table: LR(0) parsing table\");\n\n println!(\" lr1table: LR(1) parsing table\");\n\n println!(\" lalrtable: LALR parsing table\");\n\n println!(\"option:\");\n\n println!(\" -h: Print this help\");\n\n println!(\" -l: Print in LaTeX format\");\n\n println!(\" -j: Print in JSON format\");\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 18, "score": 32730.668666746584 }, { "content": "use crate::Grammar;\n\n\n\nimpl Grammar {\n\n pub fn parse(grammar: &str) -> Result<Self, String> {\n\n let mut g = Self::new();\n\n\n\n let mut raw_productions: Vec<(usize, &str)> = Vec::new();\n\n\n\n let mut previous_left: Option<usize> = None;\n\n for (i, line) in grammar.lines().enumerate() {\n\n if line.chars().all(|c| c.is_whitespace()) {\n\n continue;\n\n }\n\n let parts: Vec<&str> = line.split(\"->\").collect();\n\n if parts.len() > 2 {\n\n return Err(format!(\"Line {}: too many \\\"->\\\"\", i + 1));\n\n }\n\n let (left, rights): (usize, &str) = if parts.len() == 2 {\n\n let left_str = parts[0].trim();\n\n if left_str.split_whitespace().count() != 1 {\n", "file_path": "src/grammar/parse.rs", "rank": 27, "score": 17434.315522044224 }, { "content": "\n\n previous_left = Some(left);\n\n\n\n raw_productions.push((left, rights));\n\n }\n\n\n\n for (left, rights) in raw_productions {\n\n for right in rights.split(\"|\") {\n\n let symbols = right\n\n .split_whitespace()\n\n .map(|s| {\n\n if let Some(idx) = g.get_symbol_index(s) {\n\n idx\n\n } else {\n\n g.add_terminal(s.to_string())\n\n }\n\n })\n\n .collect();\n\n g.add_production(left, symbols);\n\n }\n", "file_path": "src/grammar/parse.rs", "rank": 28, "score": 17425.239522307933 }, { "content": "pub mod eliminate_left_recursion;\n\npub mod grammar;\n\npub mod ll1_parsing_table;\n\npub mod lr_fsm;\n\npub mod nullable_first_follow;\n\npub mod parse;\n\npub mod pretty_print;\n\npub use grammar::Grammar;\n\n\n\npub const EPSILON: &str = \"ϵ\";\n\npub const END_MARK: &str = \"$\";\n", "file_path": "src/grammar/mod.rs", "rank": 29, "score": 17424.11505533288 }, { "content": " return Err(format!(\"Line {}: left side contains whitespace\", i + 1));\n\n } else if left_str.is_empty() {\n\n return Err(format!(\"Line {}: empty left side\", i + 1));\n\n } else {\n\n (\n\n if let Some(idx) = g.get_symbol_index(left_str) {\n\n idx\n\n } else {\n\n g.add_non_terminal(left_str)\n\n },\n\n parts[1].trim(),\n\n )\n\n }\n\n } else {\n\n if let Some(idx) = previous_left {\n\n (idx, parts[0].trim()[1..].trim())\n\n } else {\n\n return Err(format!(\"Line {}: cannot find left side\", i + 1));\n\n }\n\n };\n", "file_path": "src/grammar/parse.rs", "rank": 30, "score": 17418.790054293124 }, { "content": " }\n\n\n\n let start_symbol: Option<usize> = if let Some(nt) = g.non_terminal_iter().next() {\n\n Some(g.symbol_table[&nt.name])\n\n } else {\n\n None\n\n };\n\n g.start_symbol = start_symbol;\n\n\n\n Ok(g)\n\n }\n\n}\n", "file_path": "src/grammar/parse.rs", "rank": 31, "score": 17416.282305242174 }, { "content": "\n\n if t == LRFSMType::LR0 && !self.is_nullable_first_follow_valid() {\n\n self.calculate_nullable_first_follow();\n\n }\n\n\n\n let real_start = self.get_symbol_name(self.start_symbol.unwrap()).to_string();\n\n let dummy_start = self.get_symbol_prime_name(real_start.clone());\n\n let mut start_state = LRItem::new(vec![DotProduction::new(\n\n dummy_start.clone(),\n\n vec![real_start],\n\n if t == LRFSMType::LR1 || t == LRFSMType::LALR {\n\n Some(vec![END_MARK.to_string()])\n\n } else {\n\n None\n\n },\n\n )]);\n\n start_state.calculate_extend(self);\n\n let mut states = vec![start_state];\n\n let mut q: VecDeque<usize> = VecDeque::new();\n\n q.push_back(0);\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 32, "score": 16512.291251558196 }, { "content": "use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::Grammar;\n\n\n\nuse super::{grammar::Symbol, END_MARK, EPSILON};\n\n\n\n#[derive(PartialEq, Eq, Hash, Debug, PartialOrd, Ord, Clone, Serialize)]\n\npub struct DotProduction {\n\n pub left: String,\n\n pub production: Vec<String>,\n\n pub position: usize,\n\n pub lookahead: Option<Vec<String>>,\n\n}\n\n\n\nimpl DotProduction {\n\n pub fn new(left: String, production: Vec<String>, lookahead: Option<Vec<String>>) -> Self {\n\n let mut i = 0;\n\n while i < production.len() && production[i] == EPSILON {\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 33, "score": 16512.193375185638 }, { "content": " + &content\n\n + \"\\\\\\\\\\n\\\\end{tabular}\"\n\n }\n\n}\n\n\n\nimpl Grammar {\n\n pub fn to_non_terminal_output_vec(&mut self) -> NonTerminalOutputVec {\n\n if !self.is_nullable_first_follow_valid() {\n\n self.calculate_nullable_first_follow();\n\n }\n\n\n\n let mut data = Vec::new();\n\n for symbol in self.symbols.iter().skip(1) {\n\n // skip(1): skip epsilon\n\n if let Some(non_terminal) = symbol.non_terminal() {\n\n let mut t = NonTerminalOutput {\n\n name: non_terminal.name.as_str(),\n\n nullable: non_terminal.nullable,\n\n first: non_terminal\n\n .first\n", "file_path": "src/grammar/pretty_print.rs", "rank": 34, "score": 16510.663826740078 }, { "content": " // iteratively calculate self.extend\n\n while let Some(s_idx) = q.pop_front() {\n\n for production in &g.symbols[s_idx].non_terminal().unwrap().productions {\n\n if let Symbol::NonTerminal(nt) = &g.symbols[production[0]] {\n\n if !extend.contains_key(&nt.index) {\n\n extend.insert(nt.index, if is_lr1 { Some(HashSet::new()) } else { None });\n\n q.push_back(nt.index);\n\n }\n\n\n\n if is_lr1 {\n\n let lookahead = if production.len() > 1 {\n\n g.calculate_first_for_production(&production[1..])\n\n } else {\n\n extend[&s_idx].as_ref().unwrap().clone()\n\n };\n\n extend\n\n .get_mut(&nt.index)\n\n .unwrap()\n\n .as_mut()\n\n .unwrap()\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 35, "score": 16510.280473746025 }, { "content": " lookahead: self.lookahead.clone(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug, Clone, Serialize)]\n\npub struct LRItem {\n\n pub kernel: Vec<DotProduction>,\n\n pub extend: Vec<DotProduction>,\n\n pub edges: BTreeMap<String, usize>,\n\n}\n\n\n\nimpl LRItem {\n\n fn calculate_extend(&mut self, g: &Grammar) {\n\n let is_lr1 = self.kernel[0].lookahead.is_some();\n\n let mut extend: HashMap<usize, Option<HashSet<usize>>> = HashMap::new();\n\n let mut q: VecDeque<usize> = VecDeque::new();\n\n\n\n let calculate_first = |production: &[String]| -> Vec<usize> {\n\n g.calculate_first_for_production(\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 36, "score": 16509.37070610322 }, { "content": " nt.name.clone(),\n\n g.production_to_vec_str(production)\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect(),\n\n lookahead.clone(),\n\n ));\n\n }\n\n\n\n self.extend.sort();\n\n }\n\n }\n\n}\n\n\n\nimpl LRItem {\n\n fn new(mut kernel: Vec<DotProduction>) -> Self {\n\n kernel.sort();\n\n Self {\n\n kernel,\n\n extend: Vec::new(),\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 37, "score": 16508.46355743944 }, { "content": " std::iter::once(\"\\\\[\\\\begin{array}{cll}\".to_string())\n\n .chain(\n\n self.productions\n\n .iter()\n\n .map(|s| s.to_latex(true, &self.terminal_set)),\n\n )\n\n .chain(std::iter::once(\"\\\\end{array}\\\\]\".to_string()))\n\n .collect::<Vec<String>>()\n\n .join(\"\\\\\\\\\\n\")\n\n }\n\n}\n\n\n\nimpl Grammar {\n\n pub fn to_production_output_vec(&self) -> ProductionOutputVec {\n\n let mut productions = Vec::new();\n\n for symbol in self.symbols.iter().skip(1) {\n\n // skip(1): skip epsilon\n\n if let Some(non_terminal) = symbol.non_terminal() {\n\n let mut rights = Vec::new();\n\n for production in &non_terminal.productions {\n", "file_path": "src/grammar/pretty_print.rs", "rank": 38, "score": 16508.04404900525 }, { "content": " pub rights: Vec<Vec<&'a str>>,\n\n}\n\n\n\nimpl ProductionOutput<'_> {\n\n pub fn to_plaintext(&self, left_width: usize, multiline: bool) -> String {\n\n self.rights\n\n .iter()\n\n .map(|right| right.join(\" \"))\n\n .enumerate()\n\n .map(|(i, right)| {\n\n if i == 0 {\n\n format!(\"{:>width$} -> {}\", self.left, right, width = left_width)\n\n } else {\n\n if multiline {\n\n format!(\"{:>width$} | {}\", \"\", right, width = left_width)\n\n } else {\n\n format!(\" | {}\", right)\n\n }\n\n }\n\n })\n", "file_path": "src/grammar/pretty_print.rs", "rank": 39, "score": 16507.74840045646 }, { "content": " LALR,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct LRFSM {\n\n pub t: LRFSMType,\n\n pub(super) terminals: Vec<String>,\n\n pub(super) non_terminals: Vec<String>,\n\n\n\n pub states: Vec<LRItem>,\n\n pub start: usize,\n\n pub end: usize,\n\n pub follow: Option<HashMap<String, Vec<String>>>,\n\n}\n\n\n\nimpl Grammar {\n\n pub fn to_lr_fsm(&mut self, t: LRFSMType) -> Result<LRFSM, String> {\n\n if self.start_symbol.is_none() {\n\n return Err(\"start symbol is not set\".to_string());\n\n }\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 40, "score": 16506.32472192528 }, { "content": " &production\n\n .iter()\n\n .map(|s| g.get_symbol_index(s).unwrap())\n\n .collect::<Vec<_>>(),\n\n )\n\n .into_iter()\n\n .collect()\n\n };\n\n\n\n // use self.kernel to initialize self.extend\n\n for c in &self.kernel {\n\n if let Some(symbol) = c.production.get(c.position) {\n\n if let Symbol::NonTerminal(nt) = g.get_symbol_by_name(symbol.as_str()) {\n\n if !extend.contains_key(&nt.index) {\n\n extend.insert(nt.index, if is_lr1 { Some(HashSet::new()) } else { None });\n\n q.push_back(nt.index);\n\n }\n\n\n\n if is_lr1 {\n\n let lookahead = if c.position + 1 < c.production.len() {\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 41, "score": 16506.287483057018 }, { "content": " pub goto: Vec<Vec<Option<usize>>>,\n\n}\n\n\n\nimpl LRFSM {\n\n pub fn to_parsing_table(&self) -> LRParsingTable {\n\n let dummy_start = &self.states[0].kernel[0].left;\n\n\n\n let mut terminal_idx_map: HashMap<&str, usize> = HashMap::new();\n\n for (i, s) in self.terminals.iter().enumerate() {\n\n terminal_idx_map.insert(s, i);\n\n }\n\n\n\n let mut non_terminal_idx_map: HashMap<&str, usize> = HashMap::new();\n\n for (i, s) in self.non_terminals.iter().enumerate() {\n\n non_terminal_idx_map.insert(s, i);\n\n }\n\n\n\n let mut table = LRParsingTable {\n\n t: self.t,\n\n terminals: self.terminals.clone(),\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 42, "score": 16505.931333977267 }, { "content": " .collect::<Vec<_>>()\n\n .join(if multiline { \"\\n\" } else { \"\" })\n\n }\n\n pub fn to_latex(&self, and_sign: bool, terminal_set: &HashSet<&str>) -> String {\n\n if self.rights.len() == 0 {\n\n return String::new();\n\n }\n\n\n\n let right = self\n\n .rights\n\n .iter()\n\n .map(|right| production_right_to_latex(right.iter().cloned(), terminal_set))\n\n .collect::<Vec<_>>()\n\n .join(\" \\\\mid \");\n\n\n\n if and_sign {\n\n format!(\"{} & \\\\rightarrow & {}\", escape::tex(self.left), right)\n\n } else {\n\n format!(\"{} \\\\rightarrow {}\", escape::tex(self.left), right)\n\n }\n", "file_path": "src/grammar/pretty_print.rs", "rank": 43, "score": 16505.865591214824 }, { "content": " }\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct ProductionOutputVec<'a> {\n\n productions: Vec<ProductionOutput<'a>>,\n\n terminal_set: HashSet<&'a str>,\n\n}\n\n\n\nimpl ProductionOutputVec<'_> {\n\n pub fn to_plaintext(&self) -> String {\n\n let left_max_len = self.productions.iter().map(|p| p.left.len()).max().unwrap();\n\n self.productions\n\n .iter()\n\n .map(|s| s.to_plaintext(left_max_len, true))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\")\n\n }\n\n\n\n pub fn to_latex(&self) -> String {\n", "file_path": "src/grammar/pretty_print.rs", "rank": 44, "score": 16505.80446127307 }, { "content": " terminal_set: self.terminal_iter().map(|s| s.as_str()).collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl DotProduction {\n\n pub fn to_plaintext(&self) -> String {\n\n let mut output = String::new();\n\n output.push_str(&self.left);\n\n output.push_str(\" -> \");\n\n for (i, s) in self.production.iter().enumerate() {\n\n if i != 0 {\n\n output.push_str(\" \");\n\n }\n\n\n\n if i == self.position {\n\n output.push_str(\".\");\n\n }\n\n output.push_str(s);\n\n }\n", "file_path": "src/grammar/pretty_print.rs", "rank": 45, "score": 16505.073110306657 }, { "content": " .extend(lookahead);\n\n }\n\n }\n\n }\n\n }\n\n\n\n for (nt_idx, lookahead) in extend {\n\n let nt = g.symbols[nt_idx].non_terminal().unwrap();\n\n\n\n let lookahead: Option<Vec<String>> = lookahead.and_then(|lookahead| {\n\n let mut lookahead = lookahead\n\n .iter()\n\n .map(|&i| g.get_symbol_name(i).to_string())\n\n .collect::<Vec<_>>();\n\n lookahead.sort();\n\n Some(lookahead)\n\n });\n\n\n\n for production in &nt.productions {\n\n self.extend.push(DotProduction::new(\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 46, "score": 16504.81535906914 }, { "content": " i += 1;\n\n }\n\n Self {\n\n left,\n\n production,\n\n position: i,\n\n lookahead,\n\n }\n\n }\n\n\n\n pub fn generate_next(&self) -> Self {\n\n let mut i = self.position + 1;\n\n while i < self.production.len() && self.production[i] == EPSILON {\n\n i += 1;\n\n }\n\n\n\n Self {\n\n left: self.left.clone(),\n\n production: self.production.clone(),\n\n position: i,\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 47, "score": 16504.667621598 }, { "content": " .map(|s| s.as_str())\n\n .skip(self.position),\n\n );\n\n let right = production_right_to_latex(right, terminal_set);\n\n\n\n if let Some(lookahead) = &self.lookahead {\n\n let lookahead = lookahead\n\n .iter()\n\n .map(|s| escape::tex(s))\n\n .collect::<Vec<_>>()\n\n .join(\" \");\n\n format!(\"${} \\\\rightarrow {}$, {}\", self.left, right, lookahead)\n\n } else {\n\n format!(\"${} \\\\rightarrow {}$\", self.left, right)\n\n }\n\n }\n\n}\n\n\n\nimpl LRItem {\n\n pub fn to_plaintext(&self, is_end: bool) -> String {\n", "file_path": "src/grammar/pretty_print.rs", "rank": 48, "score": 16504.565032889688 }, { "content": " calculate_first(&c.production[c.position + 1..])\n\n } else {\n\n c.lookahead\n\n .as_ref()\n\n .unwrap()\n\n .iter()\n\n .map(|s| g.get_symbol_index(s).unwrap())\n\n .collect()\n\n };\n\n extend\n\n .get_mut(&nt.index)\n\n .unwrap()\n\n .as_mut()\n\n .unwrap()\n\n .extend(lookahead.into_iter());\n\n }\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 49, "score": 16503.03486306082 }, { "content": " non_terminals: self.non_terminals.clone(),\n\n action: Vec::new(),\n\n goto: Vec::new(),\n\n };\n\n\n\n for state in &self.states {\n\n let mut action_row: Vec<Vec<LRParsingTableAction>> =\n\n vec![Vec::new(); self.terminals.len()];\n\n let mut goto_row: Vec<Option<usize>> = vec![None; self.non_terminals.len()];\n\n for prodcution in state.kernel.iter().chain(state.extend.iter()) {\n\n if prodcution.production.len() == prodcution.position {\n\n if &prodcution.left == dummy_start {\n\n action_row[terminal_idx_map[END_MARK]].push(LRParsingTableAction::Accept);\n\n continue;\n\n }\n\n\n\n let lookahead = if let Some(lookahead) = &prodcution.lookahead {\n\n lookahead\n\n } else {\n\n &self.follow.as_ref().unwrap()[&prodcution.left]\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 50, "score": 16502.66597589353 }, { "content": "}\n\n\n\nimpl NonTerminalOutputVec<'_> {\n\n pub fn to_plaintext(&self) -> String {\n\n self.non_terminals\n\n .iter()\n\n .map(|s| s.to_plaintext())\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\")\n\n }\n\n pub fn to_latex(&self) -> String {\n\n let content = self\n\n .non_terminals\n\n .iter()\n\n .map(|e| e.to_latex())\n\n .collect::<Vec<_>>()\n\n .join(\"\\\\\\\\\\n \");\n\n\n\n \"\\\\begin{tabular}{c|c|c|c}\\n\".to_string()\n\n + \"Symbol & Nullable & First & Follow\\\\\\\\\\\\hline\\n\"\n", "file_path": "src/grammar/pretty_print.rs", "rank": 51, "score": 16502.634387793427 }, { "content": " Ok(LRFSM {\n\n t,\n\n terminals: self.terminal_iter().cloned().collect(),\n\n non_terminals: self.non_terminal_iter().map(|nt| nt.name.clone()).collect(),\n\n states,\n\n start: 0,\n\n end,\n\n follow: if t == LRFSMType::LR0 {\n\n let mut r: HashMap<String, Vec<String>> = HashMap::new();\n\n r.insert(dummy_start, vec![END_MARK.to_string()]);\n\n for nt in self.non_terminal_iter() {\n\n r.insert(\n\n nt.name.clone(),\n\n nt.follow\n\n .iter()\n\n .map(|i| self.get_symbol_name(*i).to_string())\n\n .collect(),\n\n );\n\n }\n\n Some(r)\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 52, "score": 16502.389823811107 }, { "content": "\n\n pub fn to_latex(&self, terminal_set: &HashSet<&str>) -> String {\n\n match self {\n\n LRParsingTableAction::Reduce(r) => {\n\n format!(\n\n \"reduce ${} \\\\rightarrow {}$\",\n\n escape::tex(&r.0),\n\n production_right_to_latex(r.1.iter().map(|s| s.as_str()), terminal_set)\n\n )\n\n }\n\n LRParsingTableAction::Shift(s) => {\n\n format!(\"shift {}\", s)\n\n }\n\n LRParsingTableAction::Accept => \"accept\".to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl LRParsingTable {\n\n pub fn to_plaintext(&self) -> String {\n", "file_path": "src/grammar/pretty_print.rs", "rank": 53, "score": 16501.670199335807 }, { "content": " rights.push(self.production_to_vec_str(&production));\n\n }\n\n productions.push(ProductionOutput {\n\n left: non_terminal.name.as_str(),\n\n rights,\n\n });\n\n }\n\n }\n\n ProductionOutputVec {\n\n productions,\n\n terminal_set: self.terminal_iter().map(|s| s.as_str()).collect(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/grammar/pretty_print.rs", "rank": 54, "score": 16501.25671741179 }, { "content": "\n\n let mut end: usize = 0;\n\n\n\n while let Some(u) = q.pop_front() {\n\n let mut edges: BTreeMap<String, BTreeSet<DotProduction>> = BTreeMap::new();\n\n\n\n let productions = states[u].kernel.iter().chain(states[u].extend.iter());\n\n for production in productions {\n\n if production.production.len() == 1\n\n && production.position == 1\n\n && production.left == dummy_start\n\n {\n\n end = u;\n\n }\n\n\n\n if production.position < production.production.len() {\n\n let e = production.production[production.position].clone();\n\n let item = edges.entry(e).or_insert(BTreeSet::new());\n\n item.insert(production.generate_next());\n\n }\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 55, "score": 16500.947731734497 }, { "content": " }\n\n\n\n pub fn edge_to_latex(&self, id: usize) -> String {\n\n self.edges\n\n .iter()\n\n .map(|(e, v)| {\n\n format!(\n\n \"\\\\path [->] (I_{}) edge {} node [above]{{{}}} (I_{});\",\n\n id,\n\n if id == *v { \"[loop left]\" } else { \"[right]\" },\n\n e,\n\n v\n\n )\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n }\n\n}\n\n\n\nimpl LRFSM {\n", "file_path": "src/grammar/pretty_print.rs", "rank": 56, "score": 16500.877544480834 }, { "content": " .iter()\n\n .map(|idx| self.get_symbol_name(*idx))\n\n .collect(),\n\n follow: non_terminal\n\n .follow\n\n .iter()\n\n .map(|idx| self.get_symbol_name(*idx))\n\n .collect(),\n\n };\n\n t.first.sort();\n\n t.follow.sort();\n\n\n\n if non_terminal.nullable {\n\n t.first.push(EPSILON);\n\n }\n\n data.push(t);\n\n }\n\n }\n\n NonTerminalOutputVec {\n\n non_terminals: data,\n", "file_path": "src/grammar/pretty_print.rs", "rank": 57, "score": 16500.834028939007 }, { "content": " if self.position == self.production.len() {\n\n output.push_str(\".\");\n\n }\n\n if let Some(lookahead) = &self.lookahead {\n\n output.push_str(\", \");\n\n output.push_str(&lookahead.join(\"/\"));\n\n }\n\n\n\n output\n\n }\n\n pub fn to_latex(&self, terminal_set: &HashSet<&str>) -> String {\n\n let right = self\n\n .production\n\n .iter()\n\n .map(|s| s.as_str())\n\n .take(self.position)\n\n .chain(std::iter::once(\".\"))\n\n .chain(\n\n self.production\n\n .iter()\n", "file_path": "src/grammar/pretty_print.rs", "rank": 58, "score": 16499.06718656223 }, { "content": " } else {\n\n None\n\n },\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub enum LRParsingTableAction {\n\n Shift(usize),\n\n Reduce((String, Vec<String>)),\n\n Accept,\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct LRParsingTable {\n\n pub t: LRFSMType,\n\n pub terminals: Vec<String>,\n\n pub non_terminals: Vec<String>,\n\n pub action: Vec<Vec<Vec<LRParsingTableAction>>>,\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 59, "score": 16498.995452108564 }, { "content": " };\n\n for terminal in lookahead {\n\n action_row[terminal_idx_map[terminal.as_str()]].push(\n\n LRParsingTableAction::Reduce((\n\n prodcution.left.clone(),\n\n prodcution.production.clone(),\n\n )),\n\n );\n\n }\n\n }\n\n }\n\n for (e, v) in &state.edges {\n\n if let Some(idx) = terminal_idx_map.get(e.as_str()) {\n\n action_row[*idx].push(LRParsingTableAction::Shift(*v));\n\n }\n\n if let Some(idx) = non_terminal_idx_map.get(e.as_str()) {\n\n goto_row[*idx] = Some(*v);\n\n }\n\n }\n\n table.action.push(action_row);\n\n table.goto.push(goto_row);\n\n }\n\n\n\n table\n\n }\n\n}\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 60, "score": 16498.690926874362 }, { "content": " x.lookahead\n\n .as_mut()\n\n .unwrap()\n\n .extend(y.lookahead.as_ref().unwrap().iter().cloned());\n\n x.lookahead.as_mut().unwrap().sort();\n\n x.lookahead.as_mut().unwrap().dedup();\n\n }\n\n\n\n for (e, v) in s.edges {\n\n let to = accum.edges.entry(e).or_insert(new_id[v].unwrap());\n\n assert_eq!(*to, new_id[v].unwrap());\n\n }\n\n\n\n accum\n\n })\n\n .unwrap()\n\n })\n\n .collect();\n\n }\n\n\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 61, "score": 16498.37749320419 }, { "content": " edges: BTreeMap::new(),\n\n }\n\n }\n\n\n\n fn core_eq(&self, rhs: &LRItem) -> bool {\n\n if self.kernel.len() != rhs.kernel.len() || self.extend.len() != rhs.extend.len() {\n\n return false;\n\n }\n\n let a = self.kernel.iter().chain(self.extend.iter());\n\n let b = rhs.kernel.iter().chain(rhs.extend.iter());\n\n a.zip(b).all(|(x, y)| {\n\n x.left == y.left && x.production == y.production && x.position == y.position\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]\n\npub enum LRFSMType {\n\n LR0,\n\n LR1,\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 62, "score": 16498.18025185059 }, { "content": " }\n\n\n\n if t == LRFSMType::LALR {\n\n let mut new_id: Vec<Option<usize>> = vec![None; states.len()];\n\n let mut cnt: usize = 0;\n\n for i in 0..states.len() {\n\n if new_id[i].is_some() {\n\n continue;\n\n }\n\n let id = cnt;\n\n cnt += 1;\n\n new_id[i] = Some(id);\n\n for j in i + 1..states.len() {\n\n if states[i].core_eq(&states[j]) {\n\n assert_eq!(new_id[j], None);\n\n new_id[j] = Some(id);\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 63, "score": 16498.11997815625 }, { "content": " let edges = if self.edges.len() > 0 || is_end {\n\n format!(\n\n \"\\n===\\n{}\",\n\n self.edges\n\n .iter()\n\n .map(|(k, v)| format!(\"- {} -> {}\", k, v))\n\n .chain(std::iter::once(\"- $ -> accept\".to_string()))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n )\n\n } else {\n\n String::new()\n\n };\n\n\n\n format!(\"{}{}{}\", kernel, extend, edges)\n\n }\n\n\n\n pub fn node_to_latex(&self, id: usize, terminal_set: &HashSet<&str>) -> String {\n\n let content = self\n\n .kernel\n", "file_path": "src/grammar/pretty_print.rs", "rank": 64, "score": 16497.743368785636 }, { "content": " .map(|s| escape::tex(*s))\n\n .collect::<Vec<_>>()\n\n .join(r\"\\ \")\n\n .replace(EPSILON, r\"$\\epsilon$\")\n\n }\n\n\n\n format!(\n\n \"{} & {} & {} & {}\",\n\n escape::tex(self.name),\n\n self.nullable,\n\n f(&self.first),\n\n f(&self.follow)\n\n )\n\n }\n\n}\n\n\n\n#[derive(Serialize)]\n\npub struct NonTerminalOutputVec<'a> {\n\n non_terminals: Vec<NonTerminalOutput<'a>>,\n\n terminal_set: HashSet<&'a str>,\n", "file_path": "src/grammar/pretty_print.rs", "rank": 65, "score": 16497.26318738964 }, { "content": " pub fn to_plaintext(&self) -> String {\n\n let states = self\n\n .states\n\n .iter()\n\n .enumerate()\n\n .map(|(i, s)| format!(\"I{}\\n{}\", i, s.to_plaintext(i == self.end)))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\\n\");\n\n\n\n states\n\n }\n\n\n\n pub fn to_latex(&self) -> String {\n\n let terminal_set: HashSet<&str> = self.terminals.iter().map(|s| s.as_str()).collect();\n\n format!(\n\n \"\\\\begin{{tikzpicture}}[node distance=5cm,block/.style={{state, rectangle, text width=6em}}]\\n{}\\n\\\\node (accept) [right of = I_1] {{accept}};\\n\\\\path [->] (I_{}) edge [right] node [above right]{{\\\\$}} (accept);\\n\\\\end{{tikzpicture}}\",\n\n self.states\n\n .iter()\n\n .enumerate()\n\n .map(|(i, s)| s.node_to_latex(i, &terminal_set))\n", "file_path": "src/grammar/pretty_print.rs", "rank": 66, "score": 16496.649584890998 }, { "content": " })\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n }\n\n\n\n pub fn to_latex(&self) -> String {\n\n let header: String = format!(\n\n \"\\\\begin{{tabular}}{{c{}}}\\n & \\\\multicolumn{{{}}}{{c}}{{action}} & \\\\multicolumn{{{}}}{{|c}}{{goto}}\\\\\\\\\",\n\n \"|l\".repeat(self.terminals.len() + self.non_terminals.len()),\n\n self.terminals.len(),\n\n self.non_terminals.len(),\n\n );\n\n\n\n let mut content: Vec<Vec<String>> = Vec::new();\n\n\n\n let mut first_row: Vec<String> = vec![String::new()];\n\n for s in self.terminals.iter().chain(self.non_terminals.iter()) {\n\n first_row.push(escape::tex(s).to_string());\n\n }\n\n let first_row = first_row.join(\" & \");\n", "file_path": "src/grammar/pretty_print.rs", "rank": 67, "score": 16496.129336389724 }, { "content": " let mut new_states: Vec<Vec<LRItem>> = vec![Vec::new(); cnt];\n\n for (i, s) in states.into_iter().enumerate() {\n\n new_states[new_id[i].unwrap()].push(s);\n\n }\n\n\n\n states = new_states\n\n .into_iter()\n\n .map(|mut arr| {\n\n for (_, v) in arr[0].edges.iter_mut() {\n\n *v = new_id[*v].unwrap();\n\n }\n\n\n\n arr.into_iter()\n\n .reduce(|mut accum, s| {\n\n for (x, y) in accum\n\n .kernel\n\n .iter_mut()\n\n .chain(accum.extend.iter_mut())\n\n .zip(s.kernel.iter().chain(s.extend.iter()))\n\n {\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 68, "score": 16495.85470159721 }, { "content": " }\n\n\n\n for (e, kernel) in edges {\n\n let mut s = LRItem::new(kernel.into_iter().collect());\n\n s.calculate_extend(self);\n\n\n\n let mut entry_or_insert = |s: LRItem| {\n\n for (i, state) in states.iter().enumerate() {\n\n if state.kernel == s.kernel && state.extend == s.extend {\n\n return i;\n\n }\n\n }\n\n states.push(s);\n\n q.push_back(states.len() - 1);\n\n states.len() - 1\n\n };\n\n\n\n let v_idx = entry_or_insert(s);\n\n states[u].edges.insert(e.clone(), v_idx);\n\n }\n", "file_path": "src/grammar/lr_fsm.rs", "rank": 69, "score": 16495.791781685377 }, { "content": " .chain(self.states.iter().enumerate().map(|(i,s)| s.edge_to_latex(i)))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\"),\n\n self.end\n\n )\n\n }\n\n}\n\n\n\nimpl LRParsingTableAction {\n\n pub fn to_plaintext(&self) -> String {\n\n match self {\n\n LRParsingTableAction::Reduce(r) => {\n\n format!(\"r({} -> {})\", r.0, r.1.join(\" \"))\n\n }\n\n LRParsingTableAction::Shift(s) => {\n\n format!(\"s{}\", s)\n\n }\n\n LRParsingTableAction::Accept => \"acc\".to_string(),\n\n }\n\n }\n", "file_path": "src/grammar/pretty_print.rs", "rank": 70, "score": 16495.418468068696 }, { "content": " let mut output: Vec<Vec<String>> = Vec::new();\n\n\n\n output.push(vec![String::new()]);\n\n for s in self.terminals.iter().chain(self.non_terminals.iter()) {\n\n output[0].push(s.clone());\n\n }\n\n\n\n for (r1, r2) in self.action.iter().zip(self.goto.iter()) {\n\n let i = output.len() - 1;\n\n let row: Vec<String> = std::iter::once(i.to_string())\n\n .chain(r1.iter().map(|actions| {\n\n actions\n\n .iter()\n\n .map(|action| action.to_plaintext())\n\n .collect::<Vec<_>>()\n\n .join(\"; \")\n\n }))\n\n .chain(r2.iter().map(|goto| {\n\n if let Some(goto) = goto {\n\n goto.to_string()\n", "file_path": "src/grammar/pretty_print.rs", "rank": 71, "score": 16495.329607646014 }, { "content": "use std::collections::HashSet;\n\n\n\nuse super::{\n\n lr_fsm::{DotProduction, LRItem, LRParsingTable, LRParsingTableAction, LRFSM},\n\n Grammar, EPSILON,\n\n};\n\nuse crowbook_text_processing::escape;\n\nuse serde::Serialize;\n\n\n", "file_path": "src/grammar/pretty_print.rs", "rank": 72, "score": 16494.813656192673 }, { "content": " } else {\n\n String::new()\n\n }\n\n }))\n\n .collect::<Vec<_>>();\n\n output.push(row);\n\n }\n\n\n\n let width: Vec<usize> = (0..output[0].len())\n\n .map(|j| output.iter().map(|row| row[j].len()).max().unwrap())\n\n .collect();\n\n\n\n output\n\n .iter()\n\n .map(|line| {\n\n line.iter()\n\n .enumerate()\n\n .map(|(i, s)| format!(\"{:>width$}\", s, width = width[i]))\n\n .collect::<Vec<_>>()\n\n .join(\" | \")\n", "file_path": "src/grammar/pretty_print.rs", "rank": 73, "score": 16494.764643297654 }, { "content": " .iter()\n\n .chain(self.extend.iter())\n\n .map(|e| e.to_latex(terminal_set))\n\n .collect::<Vec<_>>()\n\n .join(\" \\\\\\\\ \\n\");\n\n format!(\n\n \"\\\\node [block] (I_{}){}\\n{{\\n$I_{}$\\\\\\\\\\n{}\\n}};\",\n\n id,\n\n if id > 0 {\n\n if id % 2 == 0 {\n\n format!(\" [below of = I_{}] \", id - 2)\n\n } else {\n\n format!(\" [right of = I_{}] \", id - 1)\n\n }\n\n } else {\n\n String::new()\n\n },\n\n id,\n\n content\n\n )\n", "file_path": "src/grammar/pretty_print.rs", "rank": 74, "score": 16493.69047490814 }, { "content": " goto.to_string()\n\n } else {\n\n String::new()\n\n }\n\n }))\n\n .collect::<Vec<_>>();\n\n content.push(row);\n\n }\n\n\n\n let content = content\n\n .iter()\n\n .map(|row| row.join(\" & \"))\n\n .collect::<Vec<_>>();\n\n let content = content.join(\" \\\\\\\\\\n\");\n\n\n\n format!(\n\n \"{}\\n{} \\\\\\\\\\\\hline\\n{}\\n\\\\end{{tabular}}\",\n\n header, first_row, content\n\n )\n\n }\n\n}\n", "file_path": "src/grammar/pretty_print.rs", "rank": 75, "score": 16491.952577489374 }, { "content": "\n\n let terminal_set: HashSet<&str> = self.terminals.iter().map(|s| s.as_str()).collect();\n\n\n\n for (r1, r2) in self.action.iter().zip(self.goto.iter()) {\n\n let i = content.len();\n\n let row: Vec<String> = std::iter::once(i.to_string())\n\n .chain(r1.iter().map(|actions| {\n\n let r = actions\n\n .iter()\n\n .map(|action| action.to_latex(&terminal_set))\n\n .collect::<Vec<_>>()\n\n .join(\"; \");\n\n if actions.len() > 1 {\n\n format!(\"{{\\\\color{{red}}{}}}\", r)\n\n } else {\n\n r\n\n }\n\n }))\n\n .chain(r2.iter().map(|goto| {\n\n if let Some(goto) = goto {\n", "file_path": "src/grammar/pretty_print.rs", "rank": 76, "score": 16491.84018682257 }, { "content": " let kernel = self\n\n .kernel\n\n .iter()\n\n .map(|c| c.to_plaintext())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\");\n\n\n\n let extend = if self.extend.len() > 0 {\n\n format!(\n\n \"\\n---\\n{}\",\n\n self.extend\n\n .iter()\n\n .map(|c| c.to_plaintext())\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n )\n\n } else {\n\n String::new()\n\n };\n\n\n", "file_path": "src/grammar/pretty_print.rs", "rank": 77, "score": 16491.66752479748 }, { "content": " let terminals: Vec<&str> = self.terminal_iter().map(|t| t.as_str()).collect();\n\n let map: HashMap<usize, usize> = terminals\n\n .iter()\n\n .enumerate()\n\n .map(|(i, t)| (self.get_symbol_index(t).unwrap(), i))\n\n .collect();\n\n\n\n let mut rows: Vec<(&str, Vec<ProductionOutput>)> = Vec::new();\n\n for nt in self.non_terminal_iter() {\n\n let left = nt.name.as_str();\n\n let mut row: Vec<ProductionOutput> = vec![\n\n ProductionOutput {\n\n left,\n\n rights: Vec::new()\n\n };\n\n terminals.len()\n\n ];\n\n for production in &nt.productions {\n\n let first = self.calculate_first_for_production(production);\n\n\n", "file_path": "src/grammar/ll1_parsing_table.rs", "rank": 78, "score": 15685.279436744124 }, { "content": " let production_string_iter =\n\n production.iter().map(|idx| self.get_symbol_name(*idx));\n\n\n\n for col in first.iter().map(|idx| map[idx]) {\n\n row[col]\n\n .rights\n\n .push(production_string_iter.clone().collect::<Vec<_>>());\n\n }\n\n }\n\n\n\n if nt.nullable {\n\n for idx in &nt.follow {\n\n row[map[idx]].rights.push(vec![EPSILON]);\n\n }\n\n }\n\n\n\n rows.push((left, row));\n\n }\n\n\n\n LL1ParsingTable { terminals, rows }\n\n }\n\n}\n", "file_path": "src/grammar/ll1_parsing_table.rs", "rank": 79, "score": 15682.423560565834 }, { "content": " let mut line: Vec<String> = vec![format!(\"{}\", escape_tex(*left))];\n\n line.extend(\n\n row.iter()\n\n .map(|productions| productions.to_latex(false, &termintal_set)),\n\n );\n\n output.push(line.join(\" & \"));\n\n }\n\n\n\n let output = output.join(\"\\\\\\\\\\n\");\n\n\n\n header + \"\\\\\\\\\\\\hline\\n\" + &output + \"\\n\\\\end{array}\\\\]\"\n\n }\n\n}\n\n\n\nimpl Grammar {\n\n pub fn generate_ll1_parsing_table(&mut self) -> LL1ParsingTable {\n\n if !self.is_nullable_first_follow_valid() {\n\n self.calculate_nullable_first_follow();\n\n }\n\n\n", "file_path": "src/grammar/ll1_parsing_table.rs", "rank": 80, "score": 15681.345469491547 }, { "content": "use crowbook_text_processing::escape::tex as escape_tex;\n\nuse serde::Serialize;\n\nuse std::collections::{HashMap, HashSet};\n\n\n\nuse crate::Grammar;\n\n\n\nuse super::{pretty_print::ProductionOutput, EPSILON};\n\n\n\n#[derive(Serialize)]\n\npub struct LL1ParsingTable<'a> {\n\n terminals: Vec<&'a str>,\n\n rows: Vec<(&'a str, Vec<ProductionOutput<'a>>)>,\n\n}\n\n\n\nimpl LL1ParsingTable<'_> {\n\n pub fn to_plaintext(&self) -> String {\n\n let mut header: Vec<String> = vec![String::new()];\n\n header.extend(self.terminals.iter().map(|&t| t.to_string()));\n\n let mut output: Vec<Vec<String>> = vec![header];\n\n for (left, row) in &self.rows {\n", "file_path": "src/grammar/ll1_parsing_table.rs", "rank": 81, "score": 15680.564985935418 }, { "content": " })\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n }\n\n\n\n pub fn to_latex(&self) -> String {\n\n let mut header: Vec<String> = vec![format!(\n\n \"\\\\[\\\\begin{{array}}{{c{}}}\\n\",\n\n \"|l\".repeat(self.terminals.len()),\n\n )];\n\n header.extend(\n\n self.terminals\n\n .iter()\n\n .map(|&t| format!(\"\\\\text{{{}}}\", escape_tex(t))),\n\n );\n\n let header = header.join(\" & \");\n\n\n\n let mut output: Vec<String> = Vec::new();\n\n let termintal_set: HashSet<&str> = self.terminals.iter().cloned().collect();\n\n for (left, row) in &self.rows {\n", "file_path": "src/grammar/ll1_parsing_table.rs", "rank": 82, "score": 15674.625041065261 }, { "content": " let mut line: Vec<String> = vec![left.to_string()];\n\n line.extend(\n\n row.iter()\n\n .map(|productions| productions.to_plaintext(left.len(), false)),\n\n );\n\n output.push(line);\n\n }\n\n\n\n let mut width = vec![0; self.terminals.len() + 1];\n\n for j in 0..output[0].len() {\n\n width[j] = output.iter().map(|line| line[j].len()).max().unwrap();\n\n }\n\n output\n\n .iter()\n\n .map(|line| {\n\n line.iter()\n\n .enumerate()\n\n .map(|(i, s)| format!(\"{:>width$}\", s, width = width[i]))\n\n .collect::<Vec<_>>()\n\n .join(\" | \")\n", "file_path": "src/grammar/ll1_parsing_table.rs", "rank": 83, "score": 15671.843981136304 }, { "content": " assert_eq!(g.get_symbol_name(a), \"a\");\n\n\n\n assert_eq!(g.symbols[epsilon].non_terminal().unwrap().nullable, true);\n\n\n\n assert_eq!(g.symbols[s].non_terminal().unwrap().productions[0], vec![a]);\n\n }\n\n\n\n #[test]\n\n fn simple_parse_with_space() {\n\n let g = crate::Grammar::parse(\" S -> a \").unwrap();\n\n\n\n let s = g.symbol_table.get(\"S\").unwrap().clone();\n\n let a = g.symbol_table.get(\"a\").unwrap().clone();\n\n\n\n assert_eq!(g.get_symbol_name(s), \"S\");\n\n assert_eq!(g.get_symbol_name(a), \"a\");\n\n\n\n assert_eq!(g.symbols[s].non_terminal().unwrap().productions[0], vec![a]);\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 84, "score": 18.45020199757264 }, { "content": " #[test]\n\n fn simple_parse_with_space_and_newline() {\n\n let g = crate::Grammar::parse(\" S -> a \\n | b c\").unwrap();\n\n\n\n let s = g.symbol_table.get(\"S\").unwrap().clone();\n\n let a = g.symbol_table.get(\"a\").unwrap().clone();\n\n let b = g.symbol_table.get(\"b\").unwrap().clone();\n\n let c = g.symbol_table.get(\"c\").unwrap().clone();\n\n\n\n assert_eq!(g.get_symbol_name(s), \"S\");\n\n assert_eq!(g.get_symbol_name(a), \"a\");\n\n assert_eq!(g.get_symbol_name(b), \"b\");\n\n assert_eq!(g.get_symbol_name(c), \"c\");\n\n assert_eq!(g.symbols[s].non_terminal().unwrap().productions[0], vec![a]);\n\n assert_eq!(\n\n g.symbols[s].non_terminal().unwrap().productions[1],\n\n vec![b, c]\n\n );\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 85, "score": 16.021608893158174 }, { "content": " })),\n\n }\n\n }\n\n\n\n Ok(ret)\n\n}\n\n\n\n#[cfg(test)]\n\nmod parse_tests {\n\n use crate::grammar::EPSILON;\n\n\n\n #[test]\n\n fn simple_parse() {\n\n let g = crate::Grammar::parse(\"S -> a\").unwrap();\n\n\n\n let s = g.symbol_table.get(\"S\").unwrap().clone();\n\n let a = g.symbol_table.get(\"a\").unwrap().clone();\n\n let epsilon = g.symbol_table.get(EPSILON).unwrap().clone();\n\n\n\n assert_eq!(g.get_symbol_name(s), \"S\");\n", "file_path": "src/lib.rs", "rank": 86, "score": 15.561819066393559 }, { "content": " let _g = crate::Grammar::parse(\"| a b\\n S -> a\").unwrap();\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn left_contain_space() {\n\n let _g = crate::Grammar::parse(\"S a S -> x\").unwrap();\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod nullable_first_follow_test {}\n\n\n\n#[cfg(test)]\n\nmod generate_ll1_parsing_table_test {\n\n #[test]\n\n fn expression_test() {\n\n let mut g = crate::Grammar::parse(\n\n \"\n\n E -> T E'\n", "file_path": "src/lib.rs", "rank": 87, "score": 14.813018943620904 }, { "content": "## WebAssembly Library\n\n\n\n```rust\n\n#[wasm_bindgen]\n\npub fn wasm_grammar_to_output(json: &str) -> String {\n\n let args: WasmArgs = serde_json::from_str(json).unwrap();\n\n let result = grammar_to_output(&args.grammar, &args.actions, &args.outputs);\n\n serde_json::to_string(&result).unwrap()\n\n}\n\n```\n\n\n\nExample argument:\n\n\n\n```json\n\n{\n\n \"grammar\": \"E -> E + a | a\",\n\n \"actions\": [\"EliminateLeftRecursion\"],\n\n \"outputs\": [\n\n {\"Production\": \"Plain\"},\n\n {\"LL1ParsingTable\": \"LaTeX\"},\n\n {\"LRParsingTable\": [\"LR0\", \"JSON\"]}\n\n ]\n\n}\n\n```\n\n\n\nExample outputs:\n\n\n\n```json\n\n{\n\n \"Ok\": [\n\n {\n\n \"Ok\": \" E -> a E'\\nE' -> + a E'\\n | ϵ\"\n\n },\n\n {\n\n \"Ok\": \"\\\\[\\\\begin{array}{c|l|l|l}\\n & \\\\text{\\\\$} & \\\\text{+} & \\\\text{a}\\\\\\\\\\\\hline\\nE & & & E \\\\rightarrow \\\\text{a} \\\\ E'\\\\\\\\\\nE' & E' \\\\rightarrow \\\\epsilon & E' \\\\rightarrow \\\\text{+} \\\\ \\\\text{a} \\\\ E' & \\n\\\\end{array}\\\\]\"\n\n },\n\n {\n\n \"Ok\": \"{\\\"t\\\":\\\"LR0\\\",\\\"terminals\\\":[\\\"$\\\",\\\"+\\\",\\\"a\\\"],\\\"non_terminals\\\":[\\\"E\\\",\\\"E'\\\"],\\\"action\\\":[[[],[],[{\\\"Shift\\\":2}]],[[\\\"Accept\\\"],[],[]],[[{\\\"Reduce\\\":[\\\"E'\\\",[\\\"ϵ\\\"]]}],[{\\\"Shift\\\":3}],[]],[[],[],[{\\\"Shift\\\":5}]],[[{\\\"Reduce\\\":[\\\"E\\\",[\\\"a\\\",\\\"E'\\\"]]}],[],[]],[[{\\\"Reduce\\\":[\\\"E'\\\",[\\\"ϵ\\\"]]}],[{\\\"Shift\\\":3}],[]],[[{\\\"Reduce\\\":[\\\"E'\\\",[\\\"+\\\",\\\"a\\\",\\\"E'\\\"]]}],[],[]]],\\\"goto\\\":[[1,null],[null,null],[null,4],[null,null],[null,null],[null,6],[null,null]]}\"\n\n }\n\n ]\n\n}\n\n```\n\n\n\n## Rust Library\n\n\n\n```rust\n\nuse compiler_course_helper::{Grammar, LRFSMType};\n\n\n\nfn main() {\n\n let mut g = Grammar::parse(\n\n \"\n\n E -> E + T | T\n\n T -> T * F | F\n\n F -> ( E ) | id\",\n\n )\n\n .unwrap();\n\n \n\n g.eliminate_left_recursion();\n\n\n\n println!(\"{}\", g.to_production_output_vec().to_plaintext());\n\n println!(\"{}\", g.to_production_output_vec().to_latex());\n\n\n\n println!(\"{}\", g.to_non_terminal_output_vec().to_plaintext());\n", "file_path": "README.md", "rank": 88, "score": 14.620847272853513 }, { "content": "extern crate wasm_bindgen;\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse wasm_bindgen::prelude::*;\n\n\n\nmod grammar;\n\npub use grammar::lr_fsm::LRFSMType;\n\npub use grammar::Grammar;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct WasmArgs {\n\n pub grammar: String,\n\n pub actions: Vec<Action>,\n\n pub outputs: Vec<Output>,\n\n}\n\n\n\n// This function is intended to be called from JavaScript.\n\n// Example:\n\n// {\n\n// \"grammar\": \"E -> E + T | T\\nT -> T * F | F\\nF -> ( E ) | id\",\n\n// \"actions\": [\"EliminateLeftRecursion\"],\n\n// \"outputs\": [\n\n// {\"NonTerminal\": \"JSON\"},\n\n// {\"Production\": \"JSON\"},\n\n// {\"LL1ParsingTable\": \"JSON\"},\n\n// {\"LRParsingTable\": [\"LR0\", \"JSON\"]}\n\n// ]\n\n// }\n\n#[wasm_bindgen]\n", "file_path": "src/lib.rs", "rank": 89, "score": 12.654615903422568 }, { "content": " o\n\n })\n\n .collect();\n\n\n\n if i + 1 < args.len() || outputs.len() < 1 {\n\n print_help();\n\n return;\n\n }\n\n\n\n let grammar: String = if i == args.len() {\n\n std::io::stdin()\n\n .lock()\n\n .lines()\n\n .map(|l| l.unwrap())\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\")\n\n } else {\n\n fs::read_to_string(args[i].as_str()).expect(\"Failed to read file\")\n\n };\n\n\n", "file_path": "src/main.rs", "rank": 90, "score": 9.205662788678897 }, { "content": " #[test]\n\n fn empty_parse() {\n\n let _g = crate::Grammar::parse(\" \\n \").unwrap();\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn two_rightarrows_parse() {\n\n let _g = crate::Grammar::parse(\"S -> a -> b\").unwrap();\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_left_parse() {\n\n let _g = crate::Grammar::parse(\"-> a -> b\").unwrap();\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn no_previous_left_parse() {\n", "file_path": "src/lib.rs", "rank": 91, "score": 9.185174184960584 }, { "content": " E' -> + T E' | ε\n\n T -> F T'\n\n T' -> * F T' | ε\n\n F -> ( E ) | id\n\n \",\n\n )\n\n .unwrap();\n\n\n\n g.calculate_nullable_first_follow();\n\n let result = g.generate_ll1_parsing_table();\n\n println!(\"{}\", result.to_plaintext());\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 92, "score": 9.051329987411517 }, { "content": " for output in outputs {\n\n match output {\n\n Output::Production(format) => {\n\n let t = g.to_production_output_vec();\n\n ret.push(Ok(match format {\n\n Format::Plain => t.to_plaintext(),\n\n Format::LaTeX => t.to_latex(),\n\n Format::JSON => serde_json::to_string(&t).unwrap(),\n\n }));\n\n }\n\n Output::NonTerminal(format) => {\n\n let t = g.to_non_terminal_output_vec();\n\n ret.push(Ok(match format {\n\n Format::Plain => t.to_plaintext(),\n\n Format::LaTeX => t.to_latex(),\n\n Format::JSON => serde_json::to_string(&t).unwrap(),\n\n }));\n\n }\n\n Output::LL1ParsingTable(format) => {\n\n let t = g.generate_ll1_parsing_table();\n", "file_path": "src/lib.rs", "rank": 93, "score": 9.018408739950127 }, { "content": " Production(Format),\n\n NonTerminal(Format),\n\n LL1ParsingTable(Format),\n\n LRFSM(LRFSMType, Format),\n\n LRParsingTable(LRFSMType, Format),\n\n}\n\n\n\nimpl Output {\n\n pub fn format(&mut self, f: Format) {\n\n match self {\n\n Output::Production(format) => *format = f,\n\n Output::NonTerminal(format) => *format = f,\n\n Output::LL1ParsingTable(format) => *format = f,\n\n Output::LRFSM(_, format) => *format = f,\n\n Output::LRParsingTable(_, format) => *format = f,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 94, "score": 8.812745991392772 }, { "content": "# Compiler Course Helper\n\n\n\nSupport:\n\n- eliminate left recursion (require grammar with no cycles or ϵ-production)\n\n- calculate nullable, first sets, follow, sets\n\n- generate LL(1) parsing table\n\n- generate LR(0) automata, parsing table\n\n- generate LR(1) automata, parsing table\n\n- generate LALR automata, parsing table\n\n- **output format: plaintext JSON LaTeX**\n\n- **WebAssembly**\n\n\n\n## Build\n\n\n\n```\n\n$ cargo run\n\n$ cargo build --release\n\n```\n\n\n\n```\n\n$ wasm-pack build --help\n\n```\n\n\n\n## CLI\n\n\n\n### Usage\n\n\n\n```\n\n$ ./compiler-course-helper\n\nUsage: compiler-course-helper [action]... output... [option] [grammar file]\n\naction:\n\n elf: Eliminate left recursion\n\noutput:\n\n prod: Productions\n\n nff: Nullable first and follow\n\n ll1: LL(1) parsing table\n\n lr0fsm: LR(0) Automata\n\n lr1fsm: LR(1) Automata\n\n lalrfsm: LALR Automata\n\n lr0table: LR(0) parsing table\n\n lr1table: LR(1) parsing table\n\n lalrtable: LALR parsing table\n\noption:\n\n -h: Print this help\n\n -l: Print in LaTeX format\n\n -j: Print in JSON format\n\n```\n\n\n\n### Example\n\n\n\n```\n\n$ ./compiler-course-helper elf prod ll1 -l\n\nE -> E a | a (this is input)\n\n\\[\\begin{array}{cll}\\\\\n\nE & \\rightarrow & \\text{a} \\ E'\\\\\n\nE' & \\rightarrow & \\text{a} \\ E' \\mid \\epsilon\\\\\n\n\\end{array}\\]\n\n\\[\\begin{array}{c|l|l}\n\n & \\text{\\$} & \\text{a}\\\\\\hline\n\nE & & E \\rightarrow \\text{a} \\ E'\\\\\n\nE' & E' \\rightarrow \\epsilon & E' \\rightarrow \\text{a} \\ E'\n\n\\end{array}\\]\n", "file_path": "README.md", "rank": 95, "score": 8.159508964241718 }, { "content": " (\n\n \"lr1table\",\n\n Output::LRParsingTable(LRFSMType::LR1, Format::Plain),\n\n ),\n\n (\n\n \"lalrtable\",\n\n Output::LRParsingTable(LRFSMType::LALR, Format::Plain),\n\n ),\n\n ]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n\n\n let mut i: usize = 0;\n\n while i < args.len() && action_map.contains_key(args[i].as_str()) {\n\n actions.push(action_map[args[i].as_str()]);\n\n i += 1;\n\n }\n\n\n\n while i < args.len() && output_map.contains_key(args[i].as_str()) {\n", "file_path": "src/main.rs", "rank": 96, "score": 7.941795977146286 }, { "content": " ret.push(Ok(match format {\n\n Format::Plain => t.to_plaintext(),\n\n Format::LaTeX => t.to_latex(),\n\n Format::JSON => serde_json::to_string(&t).unwrap(),\n\n }));\n\n }\n\n Output::LRFSM(typ, format) => ret.push(g.to_lr_fsm(*typ).and_then(|t| {\n\n Ok(match format {\n\n Format::Plain => t.to_plaintext(),\n\n Format::LaTeX => t.to_latex(),\n\n Format::JSON => serde_json::to_string(&t).unwrap(),\n\n })\n\n })),\n\n Output::LRParsingTable(typ, format) => ret.push(g.to_lr_fsm(*typ).and_then(|t| {\n\n let t = t.to_parsing_table();\n\n Ok(match format {\n\n Format::Plain => t.to_plaintext(),\n\n Format::LaTeX => t.to_latex(),\n\n Format::JSON => serde_json::to_string(&t).unwrap(),\n\n })\n", "file_path": "src/lib.rs", "rank": 97, "score": 7.4452189914240545 }, { "content": " match grammar_to_output(&grammar, &actions, &outputs) {\n\n Ok(v) => {\n\n for (i, e) in v.into_iter().enumerate() {\n\n match e {\n\n Ok(o) => println!(\"{}\", o),\n\n Err(e) => println!(\"Error {}-th output: {}\", i, e),\n\n }\n\n }\n\n }\n\n Err(e) => {\n\n println!(\"ERROR! {}\", e);\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 98, "score": 6.309494899529301 }, { "content": "use compiler_course_helper::{grammar_to_output, Action, Format, LRFSMType, Output};\n\nuse std::{collections::HashMap, fs, io::BufRead};\n\n\n", "file_path": "src/main.rs", "rank": 99, "score": 5.641065260293004 } ]
Rust
src/services/paste.rs
zeroqn/pastebin-actix
269693f99be1d9a7cc010bf7e4392f61909659b8
use std::time::SystemTime; use actix::prelude::*; use diesel::{self, prelude::*}; use crate::common::error::ServerError; use crate::models::{ executor::DatabaseExecutor as DbExecutor, paste::{NewPaste, Paste}, }; pub struct CreatePasteMsg { pub title: String, pub body: String, pub created_at: SystemTime, } impl Message for CreatePasteMsg { type Result = Result<Paste, ServerError>; } impl Handler<CreatePasteMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: CreatePasteMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; let new_paste = NewPaste { title: &msg.title, body: &msg.body, created_at: &msg.created_at, modified_at: &msg.created_at, }; diesel::insert_into(pastes) .values(&new_paste) .get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct UpdatePasteMsg { pub id: i64, pub title: String, pub body: String, pub modified_at: SystemTime, } impl Message for UpdatePasteMsg { type Result = Result<Paste, ServerError>; } impl Handler<UpdatePasteMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: UpdatePasteMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; diesel::update(pastes.find(msg.id)) .set(( title.eq(msg.title), body.eq(msg.body), modified_at.eq(msg.modified_at), )).get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct GetPasteByIdMsg { pub id: i64, } impl Message for GetPasteByIdMsg { type Result = Result<Paste, ServerError>; } impl Handler<GetPasteByIdMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: GetPasteByIdMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; pastes .find(msg.id) .get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } #[derive(Debug)] pub enum Item { Title, Body, CreatedAt, ModifiedAt, } #[derive(Debug)] pub enum Order { Ascend, Decrease, } #[derive(Debug)] pub enum CmpOp { GT, EQ, LT, GE, LE, } #[derive(Debug)] pub struct Orderby { pub item: Item, pub order: Order, } #[derive(Debug)] pub struct TimeCondition { pub op: CmpOp, pub time: SystemTime, } macro_rules! cmp { ($query:expr, $column:expr, $cmp:expr, $cond:expr) => { match $cmp { CmpOp::GT => $query.filter($column.gt($cond)), CmpOp::EQ => $query.filter($column.eq($cond)), CmpOp::LT => $query.filter($column.lt($cond)), CmpOp::GE => $query.filter($column.ge($cond)), CmpOp::LE => $query.filter($column.le($cond)), } }; } macro_rules! order { ($query:expr, $column:expr, $order:expr) => { match $order { Order::Ascend => $query.order($column.asc()), Order::Decrease => $query.order($column.desc()), } }; } macro_rules! orderby { ($query:expr, $column:expr, $order:expr) => { match $column { Item::Title => order!($query, title, $order), Item::Body => order!($query, body, $order), Item::CreatedAt => order!($query, created_at, $order), Item::ModifiedAt => order!($query, modified_at, $order), } }; } pub struct GetPasteListMsg { pub title_pat: Option<String>, pub body_pat: Option<String>, pub created_at: Option<TimeCondition>, pub modified_at: Option<TimeCondition>, pub orderby_list: Option<Vec<Orderby>>, pub limit: Option<i64>, pub offset: Option<i64>, } impl Default for GetPasteListMsg { fn default() -> Self { GetPasteListMsg { title_pat: None, body_pat: None, created_at: None, modified_at: None, orderby_list: None, limit: Some(20), offset: Some(0), } } } impl Message for GetPasteListMsg { type Result = Result<Vec<Paste>, ServerError>; } impl Handler<GetPasteListMsg> for DbExecutor { type Result = Result<Vec<Paste>, ServerError>; fn handle(&mut self, msg: GetPasteListMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; let mut query = pastes.into_boxed(); if let Some(title_pat) = msg.title_pat { query = query.filter(title.ilike(title_pat.to_owned() + "%")); } if let Some(body_pat) = msg.body_pat { query = query.filter(body.ilike(body_pat.to_owned() + "%")); } if let Some(cond) = msg.created_at { query = cmp!(query, created_at, cond.op, cond.time); } if let Some(cond) = msg.modified_at { query = cmp!(query, modified_at, cond.op, cond.time); } if let Some(orderby_list) = msg.orderby_list { for orderby in orderby_list { query = orderby!(query, orderby.item, orderby.order); } } if let Some(limit) = msg.limit { query = query.limit(limit); } if let Some(offset) = msg.offset { query = query.offset(offset); } query .load::<Paste>(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct DelPasteByIdMsg { pub id: i64, } impl Message for DelPasteByIdMsg { type Result = Result<usize, ServerError>; } impl Handler<DelPasteByIdMsg> for DbExecutor { type Result = Result<usize, ServerError>; fn handle(&mut self, msg: DelPasteByIdMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; diesel::delete(pastes) .filter(id.eq(msg.id)) .execute(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } }
use std::time::SystemTime; use actix::prelude::*; use diesel::{self, prelude::*}; use crate::common::error::ServerError; use crate::models::{ executor::DatabaseExecutor as DbExecutor, paste::{NewPaste, Paste}, }; pub struct CreatePasteMsg { pub title: String, pub body: String, pub created_at: SystemTime, } impl Message for CreatePasteMsg { type Result = Result<Paste, ServerError>; } impl Handler<CreatePasteMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: CreatePasteMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; let new_paste = NewPaste { title: &msg.title, body: &msg.body, created_at: &msg.created_at, modified_at: &msg.created_at, }; diesel::insert_into(pastes) .values(&new_paste) .get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct UpdatePasteMsg { pub id: i64, pub title: String, pub body: String, pub modified_at: SystemTime, } impl Message for UpdatePasteMsg { type Result = Result<Paste, ServerError>; } impl Handler<UpdatePasteMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: UpdatePasteMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; diesel::update(pastes.find(msg.id)) .set(( title.eq(msg.title), body.eq(msg.body), modified_at.eq(msg.modified_at), )).get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } pub struct GetPasteByIdMsg { pub id: i64, } impl Message for GetPasteByIdMsg { type Result = Result<Paste, ServerError>; } impl Handler<GetPasteByIdMsg> for DbExecutor { type Result = Result<Paste, ServerError>; fn handle(&mut self, msg: GetPasteByIdMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; pastes .find(msg.id) .get_result(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } } #[derive(Debug)] pub enum Item { Title, Body, CreatedAt, ModifiedAt, } #[derive(Debug)] pub enum Order { Ascend, Decrease, } #[derive(Debug)] pub enum CmpOp { GT, EQ, LT, GE, LE, } #[derive(Debug)] pub struct Orderby { pub item: Item, pub order: Order, } #[derive(Debug)] pub struct TimeCondition { pub op: CmpOp, pub time: SystemTime, } macro_rules! cmp { ($query:expr, $column:expr, $cmp:expr, $cond:expr) => { match $cmp { CmpOp::GT => $query.filter($column.gt($cond)), CmpOp::EQ => $query.filter($column.eq($cond)), CmpOp::LT => $query.filter($column.lt($cond)), CmpOp::GE => $query.filter($column.ge($cond)), CmpOp::LE => $query.filter($column.le($cond)), } }; } macro_rules! order { ($query:expr, $column:expr, $order:expr) => { match $order { Order::Ascend => $query.order($column.asc()), Order::Decrease => $query.order($column.desc()), } }; } macro_rules! orderby { ($query:expr, $column:expr, $order:expr) => { match $column { Item::Title => order!($query, title, $order), Item::Body => order!($query, body, $order), Item::CreatedAt => order!($query, created_at, $order), Item::ModifiedAt => order!($query, modified_at, $order), } }; } pub struct GetPasteListMsg { pub title_pat: Option<String>, pub body_pat: Option<String>, pub created_at: Option<TimeCondition>, pub modified_at: Option<TimeCondition>, pub orderby_list: Option<Vec<Orderby>>, pub limit: Option<i64>, pub offset: Option<i64>, } impl Default for GetPasteListMsg { fn default() -> Self { GetPasteListMsg { title_pat: None, body_pat: None, created_at: None, modified_at: None, orderby_list: None, limit: Some(20), offset: Some(0), } } } impl Message for GetPasteListMsg { type Result = Result<Vec<Paste>, ServerError>; } impl Handler<GetPasteListMsg> for DbExecutor { type Result = Result<Vec<Paste>, ServerError>;
} pub struct DelPasteByIdMsg { pub id: i64, } impl Message for DelPasteByIdMsg { type Result = Result<usize, ServerError>; } impl Handler<DelPasteByIdMsg> for DbExecutor { type Result = Result<usize, ServerError>; fn handle(&mut self, msg: DelPasteByIdMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; diesel::delete(pastes) .filter(id.eq(msg.id)) .execute(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) } }
fn handle(&mut self, msg: GetPasteListMsg, _: &mut Self::Context) -> Self::Result { use crate::models::schema::pastes::dsl::*; let mut query = pastes.into_boxed(); if let Some(title_pat) = msg.title_pat { query = query.filter(title.ilike(title_pat.to_owned() + "%")); } if let Some(body_pat) = msg.body_pat { query = query.filter(body.ilike(body_pat.to_owned() + "%")); } if let Some(cond) = msg.created_at { query = cmp!(query, created_at, cond.op, cond.time); } if let Some(cond) = msg.modified_at { query = cmp!(query, modified_at, cond.op, cond.time); } if let Some(orderby_list) = msg.orderby_list { for orderby in orderby_list { query = orderby!(query, orderby.item, orderby.order); } } if let Some(limit) = msg.limit { query = query.limit(limit); } if let Some(offset) = msg.offset { query = query.offset(offset); } query .load::<Paste>(&self.0.get().map_err(ServerError::R2d2)?) .map_err(ServerError::Database) }
function_block-full_function
[ { "content": "pub fn get_paste_list(\n\n (req, conds): (HttpRequest<State>, Query<GetPasteListConds>),\n\n) -> FutureJsonResponse {\n\n let db_chan = req.state().db_chan.clone();\n\n let created_at = conds\n\n .cmp_created_at\n\n .to_owned()\n\n .map_or(Ok(None), |cmp_created_at| {\n\n parse_time_cond(&cmp_created_at).map(Option::from)\n\n });\n\n let modified_at = conds\n\n .cmp_modified_at\n\n .to_owned()\n\n .map_or(Ok(None), |cmp_modified_at| {\n\n parse_time_cond(&cmp_modified_at).map(Option::from)\n\n });\n\n let orderby_list = conds\n\n .orderby_list\n\n .to_owned()\n\n .map_or(Ok(None), |orderby_list| {\n", "file_path": "src/controllers/paste.rs", "rank": 0, "score": 93025.04365637255 }, { "content": "// format: \"Title/Body/CreatedAt/ModifiedAt:asc/decs\"\n\nfn parse_orderby(orderby_str: &str) -> Result<Vec<paste_srv::Orderby>, UserError> {\n\n use self::paste_srv::{Item, Order, Orderby};\n\n\n\n let default_err = Err(UserError::PayloadError(\n\n constant::ERR_MSG_PAYLOAD_PARSE_ORDERBY_FAIL.to_owned(),\n\n ));\n\n let comps: Vec<&str> = orderby_str.split(',').collect();\n\n if comps.is_empty() {\n\n return default_err;\n\n }\n\n let mut orderby_list: Vec<Orderby> = vec![];\n\n\n\n for comp in comps {\n\n let item_order: Vec<&str> = comp.split(':').collect();\n\n if item_order.len() != 2 {\n\n return default_err;\n\n }\n\n\n\n let item = match item_order[0] {\n\n \"Title\" => Ok(Item::Title),\n", "file_path": "src/controllers/paste.rs", "rank": 1, "score": 92291.41736069453 }, { "content": "#[test]\n\nfn test_get_paste_by_none_exist_id() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::GET, &format!(\"/pastes/{}\", 99999999))\n\n .finish()\n\n .unwrap();\n\n\n\n assert_res_err_msg!(srv, req, 404, ERR_MSG_DATA_NOT_FOUND);\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 2, "score": 90272.40221908962 }, { "content": "// format: \"GT/EQ/LT/GE/LE,seconds_since_UNIX_EPOCH\"\n\nfn parse_time_cond(cond_str: &str) -> Result<paste_srv::TimeCondition, UserError> {\n\n use self::paste_srv::{CmpOp, TimeCondition};\n\n use std::time::{Duration, UNIX_EPOCH};\n\n\n\n let default_err = Err(UserError::PayloadError(\n\n constant::ERR_MSG_PAYLOAD_PARSE_TIME_COND_FAIL.to_owned(),\n\n ));\n\n let op_secs: Vec<&str> = cond_str.split(',').collect();\n\n if op_secs.len() != 2 {\n\n return default_err;\n\n }\n\n\n\n let op = match op_secs[0] {\n\n \"GT\" => Ok(CmpOp::GT),\n\n \"EQ\" => Ok(CmpOp::EQ),\n\n \"LT\" => Ok(CmpOp::LT),\n\n \"GE\" => Ok(CmpOp::GE),\n\n \"LE\" => Ok(CmpOp::LE),\n\n _ => Err(()),\n\n };\n", "file_path": "src/controllers/paste.rs", "rank": 3, "score": 87185.39543606492 }, { "content": "pub fn del_paste_by_id(req: &HttpRequest<State>) -> FutureJsonResponse {\n\n let db_chan = req.state().db_chan.clone();\n\n\n\n call_ctrl!(|| future::ok(req.clone())\n\n .and_then(|req| req.match_info()[\"id\"].parse::<i64>())\n\n .from_err()\n\n .and_then(move |id| db_chan\n\n .send(paste_srv::DelPasteByIdMsg { id })\n\n .map_err(ServerError::MailBox)\n\n .from_err()).map(|res| res.map(|_| \"ok\")))\n\n}\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 4, "score": 84316.41227541605 }, { "content": "pub fn update_paste_by_id(req: &HttpRequest<State>) -> FutureJsonResponse {\n\n use std::time::SystemTime;\n\n\n\n let db_chan = req.state().db_chan.clone();\n\n\n\n call_ctrl!(|| req\n\n .json()\n\n .from_err()\n\n .and_then(move |updated_paste: UpdatePaste| db_chan\n\n .send(paste_srv::UpdatePasteMsg {\n\n id: updated_paste.id,\n\n title: updated_paste.title,\n\n body: updated_paste.body,\n\n modified_at: SystemTime::now(),\n\n }).map_err(ServerError::MailBox)\n\n .from_err()))\n\n}\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 5, "score": 84316.41227541605 }, { "content": "pub fn get_paste_by_id(req: &HttpRequest<State>) -> FutureJsonResponse {\n\n let db_chan = req.state().db_chan.clone();\n\n\n\n call_ctrl!(|| future::ok(req.clone())\n\n .and_then(|req| req.match_info()[\"id\"].parse::<i64>())\n\n .from_err()\n\n .and_then(move |id| db_chan\n\n .send(paste_srv::GetPasteByIdMsg { id })\n\n .map_err(ServerError::MailBox)\n\n .from_err()))\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct GetPasteListConds {\n\n title_pat: Option<String>,\n\n body_pat: Option<String>,\n\n cmp_created_at: Option<String>,\n\n cmp_modified_at: Option<String>,\n\n orderby_list: Option<String>,\n\n limit: Option<i64>,\n\n offset: Option<i64>,\n\n}\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 6, "score": 84316.41227541605 }, { "content": "#[test]\n\nfn test_del_paste_by_id() {\n\n let _lock = TEST_SUIT.begin_isolated_test();\n\n let paste_list = TEST_SUIT.data();\n\n let paste = paste_list.first().unwrap();\n\n\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::DELETE, &format!(\"/pastes/{}\", paste.id))\n\n .finish()\n\n .unwrap();\n\n\n\n assert_res!(srv, req, String, |res: String| {\n\n assert_eq!(res, \"ok\");\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 7, "score": 76745.39693444653 }, { "content": "#[test]\n\nfn test_get_paste_by_id() {\n\n let _lock = TEST_SUIT.begin_isolated_test();\n\n let paste_list = TEST_SUIT.data();\n\n let paste = paste_list.first().unwrap();\n\n\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::GET, &format!(\"/pastes/{}\", paste.id))\n\n .finish()\n\n .unwrap();\n\n\n\n assert_res!(srv, req, Paste, |fetched_paste: Paste| {\n\n assert_eq!(fetched_paste.id, paste.id);\n\n assert_eq!(fetched_paste.title, paste.title);\n\n assert_eq!(fetched_paste.body, paste.body);\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 8, "score": 76745.39693444653 }, { "content": "#[test]\n\nfn test_del_paste_by_bad_id() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::DELETE, &format!(\"/pastes/{}\", \"dddd\"))\n\n .finish()\n\n .unwrap();\n\n\n\n assert_res_err_msg!(srv, req, 400, ERR_MSG_BAD_ID);\n\n}\n", "file_path": "src/tests/paste.rs", "rank": 9, "score": 74258.05233039716 }, { "content": "#[test]\n\nfn test_get_paste_by_bad_id() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::GET, &format!(\"/pastes/{}\", \"dddd\"))\n\n .finish()\n\n .unwrap();\n\n\n\n assert_res_err_msg!(srv, req, 400, ERR_MSG_BAD_ID);\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 10, "score": 74258.05233039716 }, { "content": "pub fn create(state: State) -> App<State> {\n\n App::with_state(state)\n\n .prefix(\"/pastes\")\n\n .resource(\"/{id}\", |r| {\n\n r.route().filter(pred::Get()).a(get_paste_by_id);\n\n r.route().filter(pred::Post()).a(update_paste_by_id);\n\n r.route().filter(pred::Delete()).a(del_paste_by_id);\n\n }).resource(\"\", |r| {\n\n r.route().filter(pred::Post()).a(create_paste);\n\n r.route().filter(pred::Get()).with(get_paste_list);\n\n })\n\n}\n", "file_path": "src/apps/paste.rs", "rank": 11, "score": 72225.29835307552 }, { "content": "pub fn create_paste(req: &HttpRequest<State>) -> FutureJsonResponse {\n\n use std::time::SystemTime;\n\n\n\n let db_chan = req.state().db_chan.clone();\n\n\n\n // this requires correct content type\n\n call_ctrl!(|| req\n\n .json()\n\n .from_err()\n\n .and_then(move |new_paste: NewPaste| db_chan\n\n .send(paste_srv::CreatePasteMsg {\n\n title: new_paste.title,\n\n body: new_paste.body,\n\n created_at: SystemTime::now(),\n\n }).map_err(ServerError::MailBox)\n\n .from_err()))\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct UpdatePaste {\n\n pub id: i64,\n\n pub title: String,\n\n pub body: String,\n\n}\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 12, "score": 71168.36763013284 }, { "content": "#[test]\n\nfn test_get_paste_list_with_bad_orderby_list() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(\n\n Method::GET,\n\n &format!(\n\n \"/pastes?title_pat={}&body_pat={}&limit={}&orderby_list={}\",\n\n \"test\", \"test body\", 5, \"BAD%3Aasc\"\n\n ),\n\n ).finish()\n\n .unwrap();\n\n\n\n assert_res_err_msg!(srv, req, 400, ERR_MSG_PAYLOAD_PARSE_ORDERBY_FAIL);\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 13, "score": 70159.24152413358 }, { "content": "#[test]\n\nfn test_get_paste_list_with_bad_cmp_modified_at() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(\n\n Method::GET,\n\n &format!(\n\n \"/pastes?title_pat={}&body_pat={}&limit={}&cmp_modified_at={}\",\n\n \"test\", \"test body\", 5, \"DD%2C100000\"\n\n ),\n\n ).finish()\n\n .unwrap();\n\n\n\n assert_res_err_msg!(srv, req, 400, ERR_MSG_PAYLOAD_PARSE_TIME_COND_FAIL);\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 14, "score": 70159.24152413358 }, { "content": "#[test]\n\nfn test_get_paste_list_with_bad_cmp_created_at() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(\n\n Method::GET,\n\n &format!(\n\n \"/pastes?title_pat={}&body_pat={}&limit={}&cmp_created_at={}\",\n\n \"test\", \"test body\", 5, \"DD%2C100000\"\n\n ),\n\n ).finish()\n\n .unwrap();\n\n\n\n assert_res_err_msg!(srv, req, 400, ERR_MSG_PAYLOAD_PARSE_TIME_COND_FAIL);\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 15, "score": 70159.24152413358 }, { "content": "#[test]\n\nfn test_creat_paste() {\n\n let _lock = TEST_SUIT.begin_isolated_test();\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::POST, \"/pastes\")\n\n .content_type(CONTENT_TYPE_JSON)\n\n .body(\n\n serde_json::to_vec(&NewPaste {\n\n title: \"test new paste\".to_string(),\n\n body: \"my new paste\".to_string(),\n\n }).unwrap(),\n\n ).unwrap();\n\n\n\n assert_res!(srv, req, Paste, |created_paste: Paste| {\n\n assert!(created_paste.id > 0);\n\n assert_eq!(created_paste.title, \"test new paste\");\n\n assert_eq!(created_paste.body, \"my new paste\");\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 16, "score": 58842.364020524736 }, { "content": "#[test]\n\nfn test_update_paste() {\n\n let _lock = TEST_SUIT.begin_isolated_test();\n\n let paste_list = TEST_SUIT.data();\n\n let paste = paste_list.first().unwrap();\n\n\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::POST, &format!(\"/pastes/{}\", paste.id))\n\n .content_type(CONTENT_TYPE_JSON)\n\n .json(UpdatePaste {\n\n id: paste.id,\n\n title: \"test updated paste\".to_string(),\n\n body: \"test updated ddd body\".to_string(),\n\n }).unwrap();\n\n\n\n assert_res!(srv, req, Paste, |updated_paste: Paste| {\n\n assert!(updated_paste.id == paste.id);\n\n assert_eq!(updated_paste.title, \"test updated paste\");\n\n assert_eq!(updated_paste.body, \"test updated ddd body\");\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 17, "score": 58842.364020524736 }, { "content": "#[test]\n\nfn test_get_paste_list() {\n\n let _lock = TEST_SUIT.begin_isolated_test();\n\n let assert_pastes = |pastes: Vec<Paste>| {\n\n for (idx, paste) in pastes.iter().enumerate() {\n\n assert!(paste.title.contains(\"test\"));\n\n assert!(paste.body.contains(\"test body\"));\n\n assert_eq!(\n\n paste.title,\n\n \"test title \".to_string() + &(idx + 1).to_string()\n\n );\n\n assert_eq!(\n\n paste.body,\n\n \"test body \".to_string() + &(idx + 1).to_string()\n\n );\n\n }\n\n };\n\n\n\n let mut srv = init_server();\n\n\n\n // fetch without query string\n", "file_path": "src/tests/paste.rs", "rank": 18, "score": 57108.800613934975 }, { "content": "#[test]\n\nfn test_create_paste_with_bad_payload() {\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::POST, \"/pastes\")\n\n .content_type(CONTENT_TYPE_JSON)\n\n .body(\"{\\\"bad\\\": \\\"bad payload\\\"}\")\n\n .unwrap();\n\n\n\n assert_res_err!(srv, req, 400, |res: ResponseError| {\n\n assert!(res.msg.contains(\"Json deserialize error\"));\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 19, "score": 55510.87460326565 }, { "content": "#[test]\n\nfn test_update_paste_with_bad_payload() {\n\n let paste_list = TEST_SUIT.data();\n\n let paste = paste_list.first().unwrap();\n\n\n\n let mut srv = init_server();\n\n\n\n let req = srv\n\n .client(Method::POST, &format!(\"/pastes/{}\", paste.id))\n\n .content_type(CONTENT_TYPE_JSON)\n\n .body(\"{\\\"id\\\": \\\"dddd\\\"}\")\n\n .unwrap();\n\n\n\n assert_res_err!(srv, req, 400, |res: ResponseError| {\n\n assert!(res.msg.contains(\"Json deserialize error\"));\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 20, "score": 55510.87460326565 }, { "content": "type FutureJsonResponse = Box<Future<Item = HttpResponse, Error = UserError>>;\n\n\n\n#[macro_use]\n\npub mod macros;\n\npub mod paste;\n", "file_path": "src/controllers/mod.rs", "rank": 21, "score": 54088.81790268631 }, { "content": "fn init_server() -> TestServer {\n\n TestServer::with_factory(create_app)\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 22, "score": 48928.85367303597 }, { "content": "fn create_app() -> App<State> {\n\n paste_app::create(State {\n\n db_chan: TEST_SUIT.executor(),\n\n })\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 23, "score": 47620.85599891905 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 24, "score": 46496.45429366939 }, { "content": "struct TestSuit {\n\n database_url: String,\n\n data: Vec<Paste>,\n\n executor: Addr<DBExecutor>,\n\n locker: Arc<Mutex<()>>,\n\n}\n\n\n\nimpl TestSuit {\n\n pub fn new() -> Self {\n\n let config = Config::load(constant::TEST_CONFIG_FILENAME);\n\n let database_url = format!(\n\n \"postgres://{}:{}@{}/{}\",\n\n config.postgres.username,\n\n config.postgres.password,\n\n config.postgres.host,\n\n config.postgres.database,\n\n );\n\n\n\n let data = Self::create_data(&database_url);\n\n let pool = Self::create_pool(&database_url);\n", "file_path": "src/tests/mod.rs", "rank": 25, "score": 42987.80104417047 }, { "content": "fn main() {\n\n let config = Config::load(CONFIG_FILENAME);\n\n let server = Server::new(&config).unwrap();\n\n\n\n exit(server.start());\n\n}\n", "file_path": "src/main.rs", "rank": 26, "score": 38082.45171764477 }, { "content": "-- This file should undo anything in `up.sql`\n\nDROP TABLE pastes\n", "file_path": "migrations/2018-04-12-070219_create_pastes/down.sql", "rank": 27, "score": 25079.039285114522 }, { "content": "-- Your SQL goes here\n\nCREATE TABLE pastes (\n\n id BIGSERIAL PRIMARY KEY,\n\n title VARCHAR(200) NOT NULL,\n\n body TEXT NOT NULL,\n\n created_at TIMESTAMP NOT NULL DEFAULT NOW(),\n\n modified_at TIMESTAMP NOT NULL DEFAULT NOW()\n\n)\n", "file_path": "migrations/2018-04-12-070219_create_pastes/up.sql", "rank": 28, "score": 25079.039285114522 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 29, "score": 22147.783757715002 }, { "content": "use std::time::SystemTime;\n\n\n\nuse crate::models::schema::pastes;\n\n\n\n#[derive(Queryable, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Paste {\n\n pub id: i64,\n\n pub title: String,\n\n pub body: String,\n\n pub created_at: SystemTime,\n\n pub modified_at: SystemTime,\n\n}\n\n\n\n#[derive(Insertable)]\n\n#[table_name = \"pastes\"]\n\npub struct NewPaste<'a> {\n\n pub title: &'a str,\n\n pub body: &'a str,\n\n pub created_at: &'a SystemTime,\n\n pub modified_at: &'a SystemTime,\n\n}\n", "file_path": "src/models/paste.rs", "rank": 40, "score": 20848.845180433083 }, { "content": " parse_orderby(&orderby_list).map(Option::from)\n\n });\n\n let msg = paste_srv::GetPasteListMsg {\n\n title_pat: conds.title_pat.to_owned(),\n\n body_pat: conds.body_pat.to_owned(),\n\n limit: conds.limit,\n\n offset: conds.offset,\n\n ..Default::default()\n\n };\n\n\n\n call_ctrl!(|| future::ok(msg)\n\n .and_then(move |mut msg| created_at.map(|created_at| {\n\n msg.created_at = created_at;\n\n msg\n\n })).from_err()\n\n .and_then(move |mut msg| modified_at.map(|modified_at| {\n\n msg.modified_at = modified_at;\n\n msg\n\n })).from_err()\n\n .and_then(move |mut msg| orderby_list.map(|orderby_list| {\n", "file_path": "src/controllers/paste.rs", "rank": 43, "score": 20846.699221624916 }, { "content": " msg.orderby_list = orderby_list;\n\n msg\n\n })).and_then(move |msg| db_chan.send(msg).map_err(ServerError::MailBox).from_err()))\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct NewPaste {\n\n pub title: String,\n\n pub body: String,\n\n}\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 44, "score": 20844.63732005982 }, { "content": "\n\n // try apply some conditions\n\n let req = srv\n\n .client(\n\n Method::GET,\n\n &format!(\n\n \"/pastes?title_pat={}&body_pat={}&limit={}&cmp_created_at={}&orderby_list={}\",\n\n \"test\", \"test body\", 5, \"GT%2C100000\", \"Title%3Aasc%2CBody%3Aasc\"\n\n ),\n\n ).finish()\n\n .unwrap();\n\n\n\n assert_res!(srv, req, Vec<Paste>, |pastes: Vec<Paste>| {\n\n assert_eq!(pastes.len(), 5);\n\n assert_pastes(pastes);\n\n });\n\n}\n\n\n", "file_path": "src/tests/paste.rs", "rank": 45, "score": 20843.948480881892 }, { "content": " \"Body\" => Ok(Item::Body),\n\n \"CreatedAt\" => Ok(Item::CreatedAt),\n\n \"ModifiedAt\" => Ok(Item::ModifiedAt),\n\n _ => Err(()),\n\n };\n\n let order = match item_order[1] {\n\n \"asc\" => Ok(Order::Ascend),\n\n \"decs\" => Ok(Order::Decrease),\n\n _ => Err(()),\n\n };\n\n\n\n if let (Ok(item), Ok(order)) = (item, order) {\n\n orderby_list.push(Orderby { item, order })\n\n } else {\n\n return default_err;\n\n }\n\n }\n\n Ok(orderby_list)\n\n}\n", "file_path": "src/controllers/paste.rs", "rank": 46, "score": 20843.834461292612 }, { "content": " let req = srv.client(Method::GET, \"/pastes\").finish().unwrap();\n\n assert_res!(srv, req, Vec<Paste>, |pastes: Vec<Paste>| {\n\n assert_eq!(pastes.len(), 9);\n\n assert_pastes(pastes);\n\n });\n\n\n\n // exactly fetch\n\n let req = srv\n\n .client(\n\n Method::GET,\n\n &format!(\n\n \"/pastes?title_pat={}&body_pat={}\",\n\n \"test title\", \"test body 1\"\n\n ),\n\n ).finish()\n\n .unwrap();\n\n assert_res!(srv, req, Vec<Paste>, |mut pastes: Vec<Paste>| {\n\n assert_eq!(pastes.len(), 1);\n\n assert_eq!(pastes.pop().unwrap().title, \"test title 1\");\n\n });\n", "file_path": "src/tests/paste.rs", "rank": 47, "score": 20840.290926903413 }, { "content": "use actix_web::test::TestServer;\n\nuse actix_web::{http::Method, App, HttpMessage};\n\nuse serde_json;\n\n\n\nuse crate::apps::paste as paste_app;\n\nuse crate::common::{constant::*, error::ResponseError};\n\nuse crate::controllers::paste::{NewPaste, UpdatePaste};\n\nuse crate::models::paste::Paste;\n\nuse crate::server::State;\n\nuse crate::tests::{constant::*, TEST_SUIT};\n\n\n", "file_path": "src/tests/paste.rs", "rank": 48, "score": 20837.16825463363 }, { "content": "use actix_web::{AsyncResponder, HttpMessage, HttpRequest, HttpResponse, Query};\n\nuse futures::future::{self, Future};\n\n\n\nuse crate::common::{\n\n constant,\n\n error::{ServerError, UserError},\n\n};\n\nuse crate::controllers::FutureJsonResponse;\n\nuse crate::server::State;\n\nuse crate::services::paste as paste_srv;\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 49, "score": 20836.29332898968 }, { "content": " let secs = op_secs[1].parse::<u64>();\n\n\n\n if let (Ok(op), Ok(secs)) = (op, secs) {\n\n Ok(TimeCondition {\n\n op,\n\n time: UNIX_EPOCH + Duration::from_secs(secs),\n\n })\n\n } else {\n\n default_err\n\n }\n\n}\n\n\n", "file_path": "src/controllers/paste.rs", "rank": 50, "score": 20835.69768919878 }, { "content": "use actix_web::{pred, App};\n\n\n\nuse crate::controllers::paste::*;\n\nuse crate::server::State;\n\n\n", "file_path": "src/apps/paste.rs", "rank": 51, "score": 20835.15727407316 }, { "content": "-- This file should undo anything in `up.sql`\n", "file_path": "migrations/2018-04-12-070219_create_pastes/down.sql", "rank": 52, "score": 20832.715501301394 }, { "content": "-- Your SQL goes here\n", "file_path": "migrations/2018-04-12-070219_create_pastes/up.sql", "rank": 53, "score": 20829.76684810176 }, { "content": "DROP FUNCTION IF EXISTS diesel_set_updated_at();\n", "file_path": "migrations/00000000000000_diesel_initial_setup/down.sql", "rank": 54, "score": 18256.05259808213 }, { "content": "CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$\n\nBEGIN\n\n IF (\n\n NEW IS DISTINCT FROM OLD AND\n\n NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at\n\n ) THEN\n\n NEW.updated_at := current_timestamp;\n\n END IF;\n\n RETURN NEW;\n\nEND;\n\n$$ LANGUAGE plpgsql;\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 55, "score": 16800.11175174187 }, { "content": " use std::time::SystemTime;\n\n\n\n let conn = PgConnection::establish(database_url).unwrap();\n\n\n\n let now = SystemTime::now();\n\n let paste_list = (1..10)\n\n .map(|n| {\n\n (\n\n \"test title \".to_owned() + &n.to_string(),\n\n \"test body \".to_owned() + &n.to_string(),\n\n )\n\n }).collect::<Vec<(_, _)>>();\n\n let new_paste_list = (0..9)\n\n .map(|i| {\n\n let paste = paste_list.get(i).unwrap();\n\n NewPaste {\n\n title: &paste.0,\n\n body: &paste.1,\n\n created_at: &now,\n\n modified_at: &now,\n", "file_path": "src/tests/mod.rs", "rank": 56, "score": 12.555877335849342 }, { "content": "\n\npub mod constant {\n\n pub const TEST_CONFIG_FILENAME: &str = \"test_config.toml\";\n\n pub const ERR_MSG_BAD_ID: &str = \"bad id\";\n\n pub const ERR_MSG_DATA_NOT_FOUND: &str = \"data not found\";\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TestTxConnCustomizer;\n\n\n\nimpl CustomizeConnection<PgConnection, R2d2Error> for TestTxConnCustomizer {\n\n fn on_acquire(&self, conn: &mut PgConnection) -> Result<(), R2d2Error> {\n\n conn.begin_test_transaction().map_err(R2d2Error::QueryError)\n\n }\n\n}\n\n\n\npub struct ResetPool();\n\n\n\nimpl Message for ResetPool {\n\n type Result = Result<(), ()>;\n", "file_path": "src/tests/mod.rs", "rank": 57, "score": 11.330697211161095 }, { "content": " UserError::BadID(err)\n\n }\n\n}\n\n\n\nimpl From<PayloadError> for UserError {\n\n fn from(err: PayloadError) -> Self {\n\n UserError::PayloadError(err.to_string())\n\n }\n\n}\n\n\n\nimpl From<JsonPayloadError> for UserError {\n\n fn from(err: JsonPayloadError) -> Self {\n\n UserError::PayloadError(err.to_string())\n\n }\n\n}\n\n\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct ResponseError {\n\n pub code: u16,\n\n pub msg: String,\n", "file_path": "src/common/error.rs", "rank": 58, "score": 10.907217771991803 }, { "content": "pub const CONFIG_FILENAME: &str = \"config.toml\";\n\npub const CONTENT_TYPE_JSON: &str = \"application/json\";\n\npub const ERR_MSG_PAYLOAD_PARSE_ORDERBY_FAIL: &str = \"parse orderby list fail\";\n\npub const ERR_MSG_PAYLOAD_PARSE_TIME_COND_FAIL: &str = \"parse time condition fail\";\n", "file_path": "src/common/constant.rs", "rank": 59, "score": 10.734318106917677 }, { "content": "}\n\n\n\nimpl Handler<ResetPool> for DBExecutor {\n\n type Result = Result<(), ()>;\n\n\n\n fn handle(&mut self, _msg: ResetPool, _: &mut Self::Context) -> Self::Result {\n\n // replace old pool with newly created one\n\n self.0 = TestSuit::create_pool(&TEST_SUIT.database_url);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 60, "score": 10.469633917071175 }, { "content": "use actix::prelude::*;\n\nuse diesel::{\n\n pg::PgConnection,\n\n r2d2::{ConnectionManager, Pool},\n\n};\n\n\n\npub struct DatabaseExecutor(pub Pool<ConnectionManager<PgConnection>>);\n\n\n\nimpl Actor for DatabaseExecutor {\n\n type Context = SyncContext<Self>;\n\n}\n", "file_path": "src/models/executor.rs", "rank": 61, "score": 9.715340322695074 }, { "content": "table! {\n\n pastes (id) {\n\n id -> Int8,\n\n title -> Varchar,\n\n body -> Text,\n\n created_at -> Timestamp,\n\n modified_at -> Timestamp,\n\n }\n\n}\n", "file_path": "src/models/schema.rs", "rank": 62, "score": 9.705500079556757 }, { "content": " R2d2(#[cause] R2d2Error),\n\n}\n\n\n\n#[derive(Debug, Fail)]\n\npub enum UserError {\n\n #[fail(display = \"an internal error occurred. please try again later\")]\n\n InternalError,\n\n #[fail(display = \"bad payload: {}\", _0)]\n\n PayloadError(String),\n\n #[fail(display = \"bad id\")]\n\n BadID(#[cause] ParseIntError),\n\n #[fail(display = \"data not found\")]\n\n NotFound,\n\n #[fail(display = \"code: {}, msg: {}\", code, msg)]\n\n Custom { code: u16, msg: String },\n\n}\n\n\n\nimpl UserError {\n\n pub fn bad_request(msg: &str) -> Self {\n\n UserError::Custom {\n", "file_path": "src/common/error.rs", "rank": 63, "score": 9.520796013061574 }, { "content": " pub host: String,\n\n pub username: String,\n\n pub password: String,\n\n pub database: String,\n\n}\n\n\n\nimpl Config {\n\n pub fn load(conf_fname: &str) -> Config {\n\n use std::fs::read_to_string;\n\n use toml;\n\n\n\n let config_string =\n\n read_to_string(conf_fname).expect(&format!(\"fail to read config: {}\", conf_fname));\n\n toml::from_str(&config_string).unwrap()\n\n }\n\n}\n", "file_path": "src/common/config.rs", "rank": 64, "score": 9.391822112118886 }, { "content": "use std::sync::{Arc, Mutex, MutexGuard};\n\n\n\nuse actix::prelude::*;\n\n// prelude is required for PgConnection::establish()\n\nuse diesel::{\n\n pg::PgConnection,\n\n prelude::*,\n\n r2d2::{ConnectionManager, CustomizeConnection, Error as R2d2Error, Pool},\n\n};\n\n\n\nuse crate::common::config::Config;\n\nuse crate::models::{executor::DatabaseExecutor as DBExecutor, paste::Paste};\n\n\n\n#[macro_use]\n\npub mod macros;\n\npub mod paste;\n\n\n\nlazy_static! {\n\n static ref TEST_SUIT: TestSuit = TestSuit::new();\n\n}\n", "file_path": "src/tests/mod.rs", "rank": 65, "score": 8.569780008084745 }, { "content": "}\n\n\n\nimpl ActixResponseError for UserError {\n\n fn error_response(&self) -> HttpResponse {\n\n let res_err = match *self {\n\n UserError::InternalError => ResponseError {\n\n code: StatusCode::INTERNAL_SERVER_ERROR.as_u16(),\n\n msg: self.to_string(),\n\n },\n\n UserError::PayloadError(ref msg) => ResponseError {\n\n code: StatusCode::BAD_REQUEST.as_u16(),\n\n msg: msg.to_string(),\n\n },\n\n UserError::BadID(_) => ResponseError {\n\n code: StatusCode::BAD_REQUEST.as_u16(),\n\n msg: self.to_string(),\n\n },\n\n UserError::NotFound => ResponseError {\n\n code: StatusCode::NOT_FOUND.as_u16(),\n\n msg: self.to_string(),\n", "file_path": "src/common/error.rs", "rank": 66, "score": 8.01464506603663 }, { "content": "#[derive(Clone, Default, Deserialize)]\n\npub struct Config {\n\n pub server: ServerConfig,\n\n pub actix: ActixConfig,\n\n pub postgres: PostgresConfig,\n\n}\n\n\n\n#[derive(Clone, Default, Deserialize)]\n\npub struct ServerConfig {\n\n pub ip: String,\n\n pub port: String,\n\n}\n\n\n\n#[derive(Clone, Default, Deserialize)]\n\npub struct ActixConfig {\n\n pub connections: usize,\n\n}\n\n\n\n#[derive(Clone, Default, Deserialize)]\n\npub struct PostgresConfig {\n", "file_path": "src/common/config.rs", "rank": 67, "score": 7.954563457567398 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n\n-- `updated_at` whenever the row is modified (unless `updated_at` was included\n\n-- in the modified columns)\n\n--\n\n-- # Example\n\n--\n\n-- ```sql\n\n-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());\n\n--\n\n-- SELECT diesel_manage_updated_at('users');\n\n-- ```\n\nCREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$\n\nBEGIN\n\n EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s\n\n FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);\n\nEND;\n\n$$ LANGUAGE plpgsql;\n\n\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 68, "score": 7.6515354132319136 }, { "content": " code: StatusCode::BAD_REQUEST.as_u16(),\n\n msg: msg.to_owned(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ServerError> for UserError {\n\n fn from(err: ServerError) -> Self {\n\n match err {\n\n ServerError::Database(ref cause) => match cause {\n\n &DieselError::NotFound => UserError::NotFound,\n\n _ => UserError::InternalError,\n\n },\n\n _ => UserError::InternalError,\n\n }\n\n }\n\n}\n\n\n\nimpl From<ParseIntError> for UserError {\n\n fn from(err: ParseIntError) -> Self {\n", "file_path": "src/common/error.rs", "rank": 69, "score": 7.558533680788305 }, { "content": "macro_rules! call_ctrl {\n\n ($ctrl_fn: expr) => {\n\n $ctrl_fn()\n\n .and_then(|result| match result {\n\n Ok(result) => Ok(HttpResponse::Ok().json(result)),\n\n Err(err) => Err(UserError::from(err)),\n\n }).responder()\n\n };\n\n}\n", "file_path": "src/controllers/macros.rs", "rank": 70, "score": 7.4772457344047885 }, { "content": "use failure::Error;\n\n\n\nuse actix::{prelude::*, SystemRunner};\n\nuse actix_web::server;\n\nuse diesel::{\n\n pg::PgConnection,\n\n r2d2::{ConnectionManager, Pool},\n\n};\n\n\n\nuse crate::common::config::Config;\n\nuse crate::models::executor::DatabaseExecutor;\n\n\n\npub struct State {\n\n pub db_chan: Addr<DatabaseExecutor>,\n\n}\n\n\n\npub struct Server {\n\n runner: SystemRunner,\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 71, "score": 6.694928234764227 }, { "content": " let executor = Self::create_executor(pool);\n\n\n\n TestSuit {\n\n database_url: database_url.to_owned(),\n\n data,\n\n executor,\n\n locker: Arc::new(Mutex::new(())),\n\n }\n\n }\n\n\n\n pub fn data(&self) -> &Vec<Paste> {\n\n &self.data\n\n }\n\n\n\n pub fn executor(&self) -> Addr<DBExecutor> {\n\n self.executor.clone()\n\n }\n\n\n\n pub fn begin_isolated_test(&self) -> MutexGuard<()> {\n\n use futures::future::Future;\n", "file_path": "src/tests/mod.rs", "rank": 72, "score": 6.490459516812074 }, { "content": "-- This file was automatically created by Diesel to setup helper functions\n\n-- and other internal bookkeeping. This file is safe to edit, any future\n\n-- changes will be added to existing projects as new migrations.\n\n\n\n\n\n\n\n\n\n-- Sets up a trigger for the given table to automatically set a column called\n\n-- `updated_at` whenever the row is modified (unless `updated_at` was included\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 73, "score": 6.189886050435126 }, { "content": " },\n\n UserError::Custom { ref code, ref msg } => ResponseError {\n\n code: *code,\n\n msg: msg.to_string(),\n\n },\n\n };\n\n\n\n let status_code =\n\n StatusCode::from_u16(res_err.code).unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);\n\n\n\n HttpResponse::build(status_code)\n\n .content_type(constant::CONTENT_TYPE_JSON)\n\n .json(res_err)\n\n }\n\n}\n", "file_path": "src/common/error.rs", "rank": 74, "score": 6.002424787797594 }, { "content": " pub fn create_executor(pool: Pool<ConnectionManager<PgConnection>>) -> Addr<DBExecutor> {\n\n use std::sync::mpsc::channel;\n\n use std::thread;\n\n\n\n let (tx, rx) = channel();\n\n\n\n thread::spawn(move || {\n\n let sys = actix::System::new(\"pastebin-test\");\n\n\n\n let addr = SyncArbiter::start(1, move || DBExecutor(pool.clone()));\n\n tx.send(addr).unwrap();\n\n\n\n sys.run();\n\n });\n\n\n\n rx.recv().unwrap()\n\n }\n\n\n\n pub fn create_data(database_url: &str) -> Vec<Paste> {\n\n use crate::models::{paste::NewPaste, schema::pastes::dsl::*};\n", "file_path": "src/tests/mod.rs", "rank": 75, "score": 5.853034788864895 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 76, "score": 5.7500811318749125 }, { "content": "pub mod executor;\n\npub mod paste;\n\npub mod schema;\n", "file_path": "src/models/mod.rs", "rank": 77, "score": 5.602445061726661 }, { "content": "pub mod paste;\n", "file_path": "src/apps/mod.rs", "rank": 78, "score": 5.565061619779782 }, { "content": "pub mod paste;\n", "file_path": "src/services/mod.rs", "rank": 79, "score": 5.565061619779782 }, { "content": "use std::num::ParseIntError;\n\n\n\nuse actix::MailboxError;\n\nuse actix_web::{\n\n error::{JsonPayloadError, PayloadError, ResponseError as ActixResponseError},\n\n http::StatusCode,\n\n HttpResponse,\n\n};\n\nuse diesel::result::Error as DieselError;\n\nuse r2d2::Error as R2d2Error;\n\n\n\nuse crate::common::constant;\n\n\n\n#[derive(Debug, Fail)]\n\npub enum ServerError {\n\n #[fail(display = \"database error\")]\n\n Database(#[cause] DieselError),\n\n #[fail(display = \"actor mailbox error\")]\n\n MailBox(#[cause] MailboxError),\n\n #[fail(display = \"r2d2 error\")]\n", "file_path": "src/common/error.rs", "rank": 80, "score": 4.575870375067121 }, { "content": " }\n\n }).collect::<Vec<_>>();\n\n\n\n diesel::delete(pastes)\n\n .execute(&conn)\n\n .expect(\"fail to clear table\");\n\n\n\n diesel::insert_into(pastes)\n\n .values(&new_paste_list)\n\n .get_results(&conn)\n\n .expect(\"fail to insert test data\")\n\n }\n\n}\n", "file_path": "src/tests/mod.rs", "rank": 81, "score": 4.519670594852322 }, { "content": "macro_rules! sync_send {\n\n ($db_addr: expr, $msg: expr) => {\n\n $db_addr.send($msg).wait().unwrap().unwrap()\n\n };\n\n}\n\n\n\nmacro_rules! parse_json {\n\n ($srv: expr, $res: expr, $type: ty) => {{\n\n let body = $srv.execute($res.body()).unwrap();\n\n serde_json::from_slice::<$type>(&body).unwrap()\n\n }};\n\n}\n\n\n\nmacro_rules! assert_res_err {\n\n ($srv: expr, $req: expr, $code: expr, $assertion: expr) => {{\n\n let res = $srv.execute($req.send()).unwrap();\n\n assert!(res.status().is_client_error());\n\n\n\n let err_res = parse_json!($srv, res, ResponseError);\n\n assert_eq!(err_res.code, $code);\n", "file_path": "src/tests/macros.rs", "rank": 82, "score": 4.466571489197841 }, { "content": "impl Server {\n\n /// Create a new server instance\n\n pub fn new(config: &Config) -> Result<Self, Error> {\n\n let database_url = format!(\n\n \"postgres://{}:{}@{}/{}\",\n\n config.postgres.username,\n\n config.postgres.password,\n\n config.postgres.host,\n\n config.postgres.database,\n\n );\n\n\n\n let runner = actix::System::new(\"pastebin-actix\");\n\n\n\n let manager = ConnectionManager::<PgConnection>::new(database_url);\n\n let pool = Pool::builder()\n\n .build(manager)\n\n .expect(\"cannot build database connection pool\");\n\n let addr = SyncArbiter::start(config.actix.connections, move || {\n\n DatabaseExecutor(pool.clone())\n\n });\n", "file_path": "src/server.rs", "rank": 83, "score": 4.163541831376531 }, { "content": " $assertion(err_res);\n\n }};\n\n}\n\n\n\nmacro_rules! assert_res_err_msg {\n\n ($srv: expr, $req: expr, $code: expr, $msg: expr) => {{\n\n assert_res_err!($srv, $req, $code, |res_err: ResponseError| {\n\n assert_eq!(res_err.msg, $msg);\n\n });\n\n }};\n\n}\n\n\n\nmacro_rules! assert_res {\n\n ($srv: expr, $req: expr, $res_ty: ty, $assertion: expr) => {{\n\n let res = $srv.execute($req.send()).unwrap();\n\n assert!(res.status().is_success());\n\n\n\n $assertion(parse_json!($srv, res, $res_ty));\n\n }};\n\n}\n", "file_path": "src/tests/macros.rs", "rank": 84, "score": 3.4621659457281035 }, { "content": "pub mod config;\n\npub mod constant;\n\npub mod error;\n", "file_path": "src/common/mod.rs", "rank": 85, "score": 3.21403890096623 }, { "content": "use actix_web::HttpResponse;\n\nuse futures::future::Future;\n\n\n\nuse crate::common::error::UserError;\n\n\n", "file_path": "src/controllers/mod.rs", "rank": 86, "score": 3.1486778304439653 }, { "content": "#![feature(rust_2018_preview)]\n\n\n\n#[macro_use]\n\nextern crate diesel;\n\n#[cfg(test)]\n\n#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n#[macro_use]\n\nextern crate failure_derive;\n\n\n\nmod apps;\n\nmod common;\n\nmod controllers;\n\nmod models;\n\nmod server;\n\nmod services;\n\n#[cfg(test)]\n\nmod tests;\n\n\n\nuse std::process::exit;\n\n\n\nuse crate::common::{config::Config, constant::CONFIG_FILENAME};\n\nuse crate::server::Server;\n\n\n", "file_path": "src/main.rs", "rank": 87, "score": 2.9460720618865883 }, { "content": "\n\n let server = server::new(move || {\n\n crate::apps::paste::create(State {\n\n db_chan: addr.clone(),\n\n })\n\n });\n\n let server_url = format!(\"{}:{}\", config.server.ip, config.server.port);\n\n\n\n server.bind(server_url)?.start();\n\n\n\n Ok(Server { runner })\n\n }\n\n\n\n pub fn start(self) -> i32 {\n\n self.runner.run()\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 88, "score": 2.891046609983616 }, { "content": "-- This file was automatically created by Diesel to setup helper functions\n\n-- and other internal bookkeeping. This file is safe to edit, any future\n\n-- changes will be added to existing projects as new migrations.\n\n\n", "file_path": "migrations/00000000000000_diesel_initial_setup/down.sql", "rank": 89, "score": 2.7657449773706215 }, { "content": "\n\n let guard = match self.locker.lock() {\n\n Ok(guard) => guard,\n\n Err(poisoned) => poisoned.into_inner(),\n\n };\n\n\n\n sync_send!(self.executor, ResetPool {});\n\n\n\n guard\n\n }\n\n\n\n pub fn create_pool(database_url: &str) -> Pool<ConnectionManager<PgConnection>> {\n\n let manager = ConnectionManager::<PgConnection>::new(database_url.to_owned());\n\n Pool::builder()\n\n .max_size(1)\n\n .connection_customizer(Box::new(TestTxConnCustomizer))\n\n .build(manager)\n\n .expect(\"cannot build database connection pool\")\n\n }\n\n\n", "file_path": "src/tests/mod.rs", "rank": 90, "score": 2.2855473389740264 } ]
Rust
day-21/src/main.rs
Shriram-Balaji/rust-advent-of-code-2020
a1002a2f50d12eff744f7cb0db1f7271b9020c3a
#[macro_use] extern crate lazy_static; use regex::Regex; use std::{ collections::{BTreeMap, HashMap, HashSet}, env, fs, }; #[derive(Debug)] struct Food<'a> { ingredients: HashSet<&'a str>, allergens: Vec<&'a str>, } fn process_food(food: &str) -> Food { lazy_static! { static ref FOOD_REGEX: Regex = Regex::new(r"(\w.*)\(contains\s+(\w+.*)\)").unwrap(); } let food = food.trim(); let captures = match FOOD_REGEX.captures(food) { Some(captures) => captures, None => panic!("Invalid food item {}", food), }; let ingredients: HashSet<&str> = captures .get(1) .unwrap() .as_str() .split_ascii_whitespace() .collect(); let allergens: Vec<&str> = captures.get(2).unwrap().as_str().split(", ").collect(); Food { ingredients, allergens, } } struct Processed<'a> { ingredients_without_allergens: HashMap<&'a str, u32>, ingredients_with_allergens: Vec<&'a str>, } fn process_food_items(input: &str) -> Processed { let lines: Vec<&str> = input .lines() .filter(|x| !x.is_empty()) .map(|x| x.trim()) .collect(); let mut foods: Vec<Food> = lines.iter().map(|x| process_food(*x)).collect(); let mut unknown_allergens: HashSet<&str> = foods .iter() .map(|f| f.allergens.iter()) .flatten() .cloned() .collect(); let mut known_allergens: BTreeMap<&str, &str> = BTreeMap::new(); 'outer: loop { for allergen in &unknown_allergens.clone() { let foods_with_allergen: Vec<&Food> = foods .iter() .filter(|f| f.allergens.contains(&allergen)) .collect(); let init: HashSet<&str> = foods_with_allergen[0].ingredients.clone(); let candidate_ingredients: HashSet<&str> = foods_with_allergen.iter().fold(init.to_owned(), |i, f| { i.intersection(&f.ingredients).cloned().collect() }); if candidate_ingredients.len() == 1 { let ingredient = candidate_ingredients.iter().next().unwrap(); for f in foods.iter_mut() { f.ingredients.remove(ingredient); } known_allergens.insert(allergen, ingredient); unknown_allergens.remove(allergen); } if unknown_allergens.is_empty() { break 'outer; } } } let ingredients_without_allergens: HashMap<&str, u32> = foods .iter() .map(|f| f.ingredients.clone()) .flatten() .fold(HashMap::new(), |mut acc, value| { acc.entry(value).and_modify(|e| *e += 1).or_insert(1); acc }); let danger_list: Vec<&str> = known_allergens.values().cloned().collect(); Processed { ingredients_with_allergens: danger_list, ingredients_without_allergens, } } fn main() { let args: Vec<String> = env::args().collect::<Vec<String>>(); let filepath = args.get(1).expect("Input filepath cannot be empty!"); let input = fs::read_to_string(filepath).expect("Something went wrong while reading the input file"); let Processed { ingredients_without_allergens, ingredients_with_allergens, } = process_food_items(&input); let sum: u32 = ingredients_without_allergens .values() .map(|x| x.to_owned()) .sum(); println!("Sum of ingredients without allergens: {}", sum); println!( "Canonical Dangerous List: {:?}", ingredients_with_allergens.join(",") ); } #[cfg(test)] mod tests { use super::*; #[test] fn should_process_food() { process_food("mxmxvkd kfcds sqjhc nhms (contains dairy, fish)"); } #[test] fn should_process_food_items_list() { let input = r#"mxmxvkd kfcds sqjhc nhms (contains dairy, fish) trh fvjkl sbzzf mxmxvkd (contains dairy) sqjhc fvjkl (contains soy) sqjhc mxmxvkd sbzzf (contains fish)"#; process_food_items(input); } }
#[macro_use] extern crate lazy_static; use regex::Regex; use std::{ collections::{BTreeMap, HashMap, HashSet}, env, fs, }; #[derive(Debug)] struct Food<'a> { ingredients: HashSet<&'a str>, allergens: Vec<&'a str>, } fn process_food(food: &str) -> Food { lazy_static! { static ref FOOD_REGEX: Regex = Regex::new(r"(\w.*)\(contains\s+(\w+.*)\)").unwrap(); } let food = food.trim(); let captures = match FOOD_REGEX.captures(food) { Some(captures) => captures, None => panic!("Invalid food item {}", food), }; let ingredients: HashSet<&str> = captures .get(1) .unwrap() .as_str() .split_ascii_whitespace() .collect(); let allergens: Vec<&str> = captures.get(2).unwrap().as_str().split(", ").collect(); Food { ingredients, allergens, } } struct Processed<'a> { ingredients_without_allergens: HashMap<&'a str, u32>, ingredients_with_allergens: Vec<&'a str>, } fn process_food_items(input: &str) -> Processed { let lines: Vec<&str> = input .lines() .filter(|x| !x.is_empty()) .map(|x| x.trim()) .collect(); let mut foods: Vec<Food> = lines.iter().map(|x| process_food(*x)).collect(); let mut unknown_allergens: HashSet<&str> = foods .iter() .map(|f| f.allergens.iter()) .flatten() .cloned() .collect(); let mut known_allergens: BTreeMap<&str, &str> = BTreeMap::new(); 'outer: loop { for allergen in &unknown_allergens.clone() { let foods_with_allergen: Vec<&Food> = foods .iter() .filter(|f| f.allergens.contains(&allergen)) .collect(); let init: HashSet<&str> = foods_with_allergen[0].ingredients.clone(); let candidate_ingredients: HashSet<&str> = foods_with_allergen.iter().fold(init.to_owned(), |i, f| { i.intersection(&f.ingredients).cloned().collect() }); if candidate_ingredients.len() == 1 { let ingredient = candidate_ingredients.iter().next().unwrap(); for f in foods.iter_mut() { f.ingredients.remove(ingredient); } known_allergens.insert(allergen, ingredient); unknown_allergens.remove(allergen); } if unknown_allergens.is_empty() { break 'outer; } } } let ingredients_without_allergens: HashMap<&str, u32> = foods .iter() .map(|f| f.ingredients.clone()) .flatten() .fold(HashMap::new(), |mut acc, value| { acc.entry(value).and_modify(|e| *e += 1).or_insert(1); acc }); let danger_list: Vec<&str> = known_allergens.values().cloned().collect(); Processed { ingredients_with_allergens: danger_list, ingredients_without_allergens, } } fn main() { let args: Vec<String> = env::args().collect::<Vec<String>>(); let filepath = args.get(1).expect("Input filepath cannot be empty!"); let input = fs::read_to_string(filepath).expect("Something went wrong while reading the input file"); let Processed { ingredients_without_allergens, ingredients_with_allergens, } = process_food_items(&input); let sum: u32 = ingredients_without_allergens .values() .map(|x| x.to_owned()) .sum(); println!("Sum of ingredients without allergens: {}", sum); println!( "Canonical Dangerous List: {:?}", ingredients_with_allergens.join(",") ); } #[cfg(test)] mod tests { use super::*; #[test] fn should_process_food() { process_food("mxmxvkd kfcds sqjhc nhms (contains dairy, fish)"); } #[test]
}
fn should_process_food_items_list() { let input = r#"mxmxvkd kfcds sqjhc nhms (contains dairy, fish) trh fvjkl sbzzf mxmxvkd (contains dairy) sqjhc fvjkl (contains soy) sqjhc mxmxvkd sbzzf (contains fish)"#; process_food_items(input); }
function_block-full_function
[ { "content": "fn process(input: &str) -> (u64, Vec<&str>) {\n\n let notes: Vec<&str> = input.split('\\n').collect();\n\n let timestamp = notes[0].parse::<u64>().expect(\"Invalid timestamp\");\n\n let bus_ids: Vec<&str> = notes[1].split(',').collect();\n\n (timestamp, bus_ids)\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 1, "score": 200824.56008193013 }, { "content": "fn process(input: &str, direction: &str) -> i32 {\n\n let mut grid = create_grid(&input);\n\n let count = get_character_count_along_slope(&mut grid, direction, TREE);\n\n count\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 2, "score": 200824.56008193013 }, { "content": "fn count_answers_by_everyone(input: &str) -> u32 {\n\n let mut empty_index = 0;\n\n let mut sum: u32 = 0;\n\n let lines = input.lines().collect::<Vec<&str>>();\n\n\n\n for (index, line) in lines.iter().enumerate() {\n\n if line.is_empty() {\n\n // lines from the previous empty_index till the current index\n\n let mut until = lines[empty_index..index].to_vec();\n\n until.retain(|x| !x.is_empty());\n\n if until.len() == 1 {\n\n let deduped = dedup_chars(until.join(\"\"));\n\n sum = sum + deduped.len() as u32;\n\n } else {\n\n let count = count_common_answers(until);\n\n sum = sum + count;\n\n }\n\n empty_index = index;\n\n }\n\n }\n\n\n\n return sum;\n\n}\n\n\n", "file_path": "day-06/src/main.rs", "rank": 4, "score": 195052.94311234728 }, { "content": "fn process(input: &str) -> (i32, i32) {\n\n let ids: Vec<i32> = input\n\n .lines()\n\n .map(|line| {\n\n let seat = match parse_seat(line) {\n\n Ok(seat) => seat,\n\n Err(e) => {\n\n eprintln!(\"Error: {}\", e);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n get_seat_id(seat)\n\n })\n\n .collect::<Vec<i32>>();\n\n\n\n let max = *ids.iter().max().unwrap();\n\n return (max, get_missing_id(ids));\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 5, "score": 193079.82037623116 }, { "content": "fn read_input_to_vec(filepath: &str) -> Vec<String> {\n\n let mut input: Vec<String> = Vec::new();\n\n let contents = fs::read_to_string(filepath).expect(&format!(\n\n \"Something went wrong file reading the file at {}\",\n\n filepath\n\n ));\n\n for line in contents.lines() {\n\n input.push(line.to_owned())\n\n }\n\n return input;\n\n}\n\n\n", "file_path": "day-02/src/main.rs", "rank": 6, "score": 190635.49140849285 }, { "content": "fn get_decks(input: &str) -> (VecDeque<u32>, VecDeque<u32>) {\n\n let lines: Vec<&str> = input.lines().collect();\n\n let empty_index = lines\n\n .iter()\n\n .position(|line| line.is_empty())\n\n .expect(\"Invalid Game Input! Expected Player decks to be separated by an empty line.\");\n\n\n\n // 0th element is the Player 1: title\n\n let player1_deck = lines[1..empty_index]\n\n .iter()\n\n .map(|x| x.parse::<u32>().unwrap())\n\n .collect::<VecDeque<u32>>();\n\n\n\n // empty_index + 1 has the Player 2: title\n\n let player2_deck = lines[empty_index + 2..]\n\n .iter()\n\n .map(|x| x.parse::<u32>().unwrap())\n\n .collect::<VecDeque<u32>>();\n\n\n\n (player1_deck, player2_deck)\n\n}\n\n\n", "file_path": "day-22/src/main.rs", "rank": 7, "score": 184678.14113449468 }, { "content": "fn process(input: &str, should_validate_fields: bool) -> i32 {\n\n let mut passports: Vec<String> = Vec::new();\n\n let mut empty_index = 0;\n\n let lines: Vec<&str> = input.lines().collect::<Vec<&str>>();\n\n\n\n for (index, line) in lines.iter().enumerate() {\n\n if line.is_empty() {\n\n let mut until = lines[empty_index..index].to_vec();\n\n until.retain(|x| !x.is_empty());\n\n passports.push(until.join(\" \"));\n\n empty_index = index;\n\n }\n\n }\n\n\n\n passports.retain(|x| !x.is_empty());\n\n\n\n let count = passports\n\n .iter()\n\n .filter(|x| validate_passport(x, should_validate_fields))\n\n .count();\n\n\n\n count as i32\n\n}\n\n\n", "file_path": "day-04/src/main.rs", "rank": 8, "score": 180982.66620281187 }, { "content": "fn parse_instruction(line: &str) -> (&str, i32) {\n\n let instruction = line.split_whitespace().collect::<Vec<&str>>();\n\n let operation = match instruction.get(0) {\n\n Some(operation) => operation,\n\n None => {\n\n panic!(\"Invalid operation\")\n\n }\n\n };\n\n\n\n let argument = match instruction.get(1) {\n\n Some(argument) => argument.parse::<i32>().unwrap(),\n\n None => {\n\n panic!(\"Invalid argument\");\n\n }\n\n };\n\n\n\n (operation, argument)\n\n}\n\n\n", "file_path": "day-08/src/main.rs", "rank": 9, "score": 158009.92090954684 }, { "content": "fn parse_input(input: &str) -> Notes {\n\n let clumps: Vec<&str> = input.trim().split(\"\\n\\n\").collect();\n\n let rules: Vec<Rule> = clumps[0].trim().lines().map(parse_rule).collect();\n\n let my_ticket: Ticket = parse_ticket(clumps[1].trim().lines().nth(1).unwrap());\n\n let other_tickets: Vec<Ticket> = clumps[2].trim().lines().skip(1).map(parse_ticket).collect();\n\n\n\n Notes {\n\n rules,\n\n my_ticket,\n\n other_tickets,\n\n }\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 10, "score": 156436.40983918088 }, { "content": "fn parse(input: &str) -> Vec<(&str, i32)> {\n\n let lines = input\n\n .lines()\n\n .map(|l| l.trim())\n\n .filter(|l| !l.is_empty())\n\n .map(parse_instruction)\n\n .collect::<Vec<(&str, i32)>>();\n\n\n\n lines\n\n}\n\n\n", "file_path": "day-08/src/main.rs", "rank": 11, "score": 153450.62430460117 }, { "content": "fn parse_as_instruction(line: &str) -> NavigationInstruction {\n\n lazy_static! {\n\n static ref NAV_REGEX: Regex = Regex::new(r\"(\\w{1})(\\d+)\").unwrap();\n\n }\n\n\n\n let captures = NAV_REGEX.captures(line).unwrap();\n\n\n\n let direction_str = &captures[1];\n\n let units_str = &captures[2];\n\n\n\n let direction = match direction_str {\n\n \"N\" => Directions::North,\n\n \"E\" => Directions::East,\n\n \"W\" => Directions::West,\n\n \"S\" => Directions::South,\n\n \"L\" => Directions::Left,\n\n \"R\" => Directions::Right,\n\n \"F\" => Directions::Forward,\n\n _ => {\n\n panic!(\"Invalid instruction Direction. Should be one of N, E, W, S, L, R, F\")\n", "file_path": "day-12/src/main.rs", "rank": 12, "score": 145875.9742861558 }, { "content": "fn count_answers_by_anyone(input: &str) -> i32 {\n\n let mut answered: Vec<String> = Vec::new();\n\n let mut empty_index = 0;\n\n let lines = input.lines().collect::<Vec<&str>>();\n\n\n\n for (index, line) in lines.iter().enumerate() {\n\n if line.is_empty() {\n\n // lines from the previous empty_index till the current index\n\n let until = lines[empty_index..index].to_vec();\n\n let deduped = dedup_chars(until.join(\"\"));\n\n if !deduped.is_empty() {\n\n answered.push(deduped);\n\n }\n\n\n\n empty_index = index;\n\n }\n\n }\n\n\n\n let sum_of_counts: i32 = answered\n\n .iter()\n\n .fold(0, |acc, group| acc + group.len() as i32);\n\n\n\n sum_of_counts\n\n}\n\n\n", "file_path": "day-06/src/main.rs", "rank": 13, "score": 144906.94527066904 }, { "content": "fn parse(input: &str) -> Vec<Command> {\n\n lazy_static! {\n\n static ref MEM_REGEX: Regex = Regex::new(r\"mem\\[(\\d+)\\]\").unwrap();\n\n }\n\n\n\n let mut commands = Vec::new();\n\n let lines: Vec<&str> = input\n\n .lines()\n\n .map(|x| x.trim())\n\n .filter(|x| !x.is_empty())\n\n .collect();\n\n\n\n for line in lines {\n\n let (command_name, command_value): (&str, &str) =\n\n line.splitn(2, \" = \").collect_tuple().unwrap();\n\n\n\n if command_name.starts_with(\"mask\") {\n\n commands.push(Command::Mask(command_value))\n\n } else if command_name.starts_with(\"mem\") {\n\n let captures = MEM_REGEX\n", "file_path": "day-14/src/main.rs", "rank": 14, "score": 143427.62481981245 }, { "content": "fn create_graph(input: &str) -> BaggyColorGraph {\n\n lazy_static! {\n\n static ref COLOR_BAG_REGEX: Regex = Regex::new(r\"(\\d+)\\s+(\\w.*)bag\").unwrap();\n\n }\n\n\n\n let mut graph = BaggyColorGraph::new(HashMap::new());\n\n let lines: Vec<&str> = input\n\n .split(\"\\n\")\n\n .filter(|x| !x.is_empty())\n\n .map(|x| x.trim())\n\n .collect::<Vec<&str>>();\n\n\n\n lines.iter().for_each(|line| {\n\n let rules: Vec<&str> = line\n\n .split(\"bags contain\")\n\n .filter(|x| !x.is_empty())\n\n .collect();\n\n\n\n if !rules.is_empty() {\n\n let color = rules[0].trim();\n", "file_path": "day-07/src/main.rs", "rank": 15, "score": 142342.59124482147 }, { "content": "fn get_cups(input: &str) -> Vec<u8> {\n\n let labels: Vec<u8> = input\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap() as u8)\n\n .collect();\n\n\n\n labels\n\n}\n\n\n", "file_path": "day-23/src/main.rs", "rank": 16, "score": 140692.02244530205 }, { "content": "fn parse(input: &str) -> Vec<NavigationInstruction> {\n\n input\n\n .lines()\n\n .map(|x| x.trim())\n\n .filter(|x| !x.is_empty())\n\n .map(parse_as_instruction)\n\n .collect()\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 17, "score": 140692.02244530205 }, { "content": "fn count_common_answers(answers: Vec<&str>) -> u32 {\n\n if answers.is_empty() {\n\n return 0;\n\n }\n\n\n\n let starting_answers: Vec<char> = answers[0].chars().collect();\n\n answers\n\n .iter()\n\n .fold(starting_answers, |common_answers, answer_by_person| {\n\n common_answers.intersect(answer_by_person.chars().collect())\n\n })\n\n .len() as u32\n\n}\n\n\n", "file_path": "day-06/src/main.rs", "rank": 18, "score": 138616.90027466312 }, { "content": "fn create_seat_layout(input: &str) -> Vec<Vec<char>> {\n\n let mut grid: Vec<Vec<char>> = Vec::new();\n\n\n\n for line in input.lines().filter(|l| !l.is_empty()) {\n\n let chars: Vec<char> = line.trim().chars().collect();\n\n grid.push(chars);\n\n }\n\n\n\n grid\n\n}\n\n\n", "file_path": "day-11/src/main.rs", "rank": 19, "score": 132032.8366217349 }, { "content": "// Creates a 2D Vector of Characters\n\nfn create_grid<'a>(input: &str) -> Vec<Vec<char>> {\n\n let mut grid = Vec::new();\n\n for line in input.lines() {\n\n let elements_in_row: Vec<char> = line.chars().collect();\n\n grid.push(elements_in_row);\n\n }\n\n\n\n grid\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 20, "score": 131190.55308851 }, { "content": "fn validate_field(field: &str, value: &str) -> bool {\n\n lazy_static! {\n\n static ref HEIGHT_REGEX: Regex = Regex::new(r\"(\\d+)(\\w+)\").unwrap();\n\n }\n\n\n\n let eye_colors = vec![\"amb\", \"blu\", \"brn\", \"gry\", \"grn\", \"hzl\", \"oth\"];\n\n match field {\n\n \"byr\" => {\n\n value.len() == 4 && {\n\n let year = value.parse::<i32>().unwrap();\n\n return year >= 1920 && year <= 2002;\n\n }\n\n }\n\n \"iyr\" => {\n\n value.len() == 4 && {\n\n let year = value.parse::<i32>().unwrap();\n\n return year >= 2010 && year <= 2020;\n\n }\n\n }\n\n \"eyr\" => {\n", "file_path": "day-04/src/main.rs", "rank": 21, "score": 130985.33338831895 }, { "content": "fn is_valid_ticket_value(rules: &[Rule], ticket_value: u32) -> bool {\n\n rules.iter().any(|rule| rule.check(ticket_value))\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 23, "score": 120773.17982208793 }, { "content": "fn apply_mask(mask: &str, value: u64) -> u64 {\n\n let mut bits: Vec<char> = format!(\"{:036b}\", value).chars().collect();\n\n\n\n for (index, mask_bit) in mask.chars().enumerate() {\n\n if mask_bit == '0' || mask_bit == '1' {\n\n bits[index] = mask_bit;\n\n }\n\n }\n\n\n\n let masked_value: String = bits.into_iter().collect::<String>();\n\n u64::from_str_radix(&masked_value, 2).unwrap()\n\n}\n\n\n", "file_path": "day-14/src/main.rs", "rank": 24, "score": 114462.11217513107 }, { "content": "fn get_score(deck: &VecDeque<u32>) -> u32 {\n\n let num_cards = deck.len() as u32;\n\n deck.iter()\n\n .enumerate()\n\n .fold(0, |acc, (idx, card)| acc + card * (num_cards - idx as u32))\n\n}\n\n\n", "file_path": "day-22/src/main.rs", "rank": 25, "score": 109966.83016071859 }, { "content": "fn crop_letters(s: &str, pos: usize) -> &str {\n\n match s.char_indices().nth(pos) {\n\n Some((pos, _)) => &s[pos..],\n\n None => \"\",\n\n }\n\n}\n\n\n", "file_path": "day-04/src/main.rs", "rank": 26, "score": 106735.92513578222 }, { "content": "fn crop_letters_after(s: &str, pos: usize) -> &str {\n\n match s.char_indices().nth(pos) {\n\n Some((pos, _)) => &s[..pos],\n\n None => \"\",\n\n }\n\n}\n", "file_path": "day-02/src/main.rs", "rank": 27, "score": 106735.92513578222 }, { "content": "fn process_instructions(instructions: &Vec<(&str, i32)>) -> Result<i32, i32> {\n\n let mut processed: HashSet<isize> = HashSet::new();\n\n let mut accumulator = 0;\n\n let mut curr: isize = 0;\n\n\n\n loop {\n\n // bounds check\n\n if curr > instructions.len() as isize || curr < 0 {\n\n panic!(\"Invalid index. Out of bounds of Instruction Set\");\n\n }\n\n\n\n // if the instruction has already been processed, then its an infinite loop. So break with an error, with the acc's value\n\n if processed.contains(&curr) {\n\n break Err(accumulator);\n\n }\n\n\n\n // we have reached the end of the bootcode. so the program can terminate.\n\n if curr == instructions.len() as isize {\n\n break Ok(accumulator);\n\n }\n", "file_path": "day-08/src/main.rs", "rank": 28, "score": 105765.33074743839 }, { "content": "/// Returns the number of characters found while traversing along a given slope.\n\nfn get_character_count_along_slope(grid: &mut Vec<Vec<char>>, slope: &str, character_to_count: char) -> i32 {\n\n let mut count = 0;\n\n let jump = parse_slope(slope);\n\n\n\n let col_len = grid[0].len();\n\n\n\n let mut col_index = 0;\n\n let mut row_index = 0;\n\n\n\n for row in grid.iter() {\n\n col_index = col_index + jump.column as usize;\n\n\n\n if row_index >= row.len() {\n\n row_index = row_index + jump.row as usize % row.len();\n\n } else {\n\n row_index = row_index + jump.row as usize;\n\n }\n\n\n\n if let Some(new_row) = grid.get(row_index) {\n\n if let Some(value) = new_row.get(col_index % col_len) {\n\n if *value == character_to_count {\n\n count += 1\n\n }\n\n }\n\n }\n\n }\n\n\n\n count\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 29, "score": 104301.83273130495 }, { "content": "// Part 01\n\nfn combat(deck1: &mut Deck, deck2: &mut Deck) -> Winner {\n\n if deck1.len() != deck2.len() {\n\n panic!(\"Players don't have equal number of cards. Invalid Game!\")\n\n }\n\n\n\n loop {\n\n if deck1.is_empty() {\n\n break Winner::Player2;\n\n } else if deck2.is_empty() {\n\n break Winner::Player1;\n\n }\n\n\n\n let card1 = deck1.pop_front().unwrap();\n\n let card2 = deck2.pop_front().unwrap();\n\n\n\n if card1 > card2 {\n\n deck1.push_back(card1);\n\n deck1.push_back(card2);\n\n } else {\n\n deck2.push_back(card2);\n\n deck2.push_back(card1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-22/src/main.rs", "rank": 30, "score": 102155.61182960555 }, { "content": "// Part 02\n\nfn recursive_combat(deck1: &mut Deck, deck2: &mut Deck) -> Winner {\n\n // Store the hashes of decks from previous rounds, used for later comparison\n\n let mut previous_rounds = HashSet::new();\n\n\n\n // Start the Game Loop\n\n loop {\n\n let deck1_hash = get_hash(&deck1);\n\n let deck2_hash = get_hash(&deck2);\n\n\n\n // Check if the current decks have been seen in previous rounds\n\n if previous_rounds.contains(&deck1_hash) || previous_rounds.contains(&deck2_hash) {\n\n break Winner::Player1;\n\n } else {\n\n previous_rounds.insert(deck1_hash);\n\n previous_rounds.insert(deck2_hash);\n\n }\n\n\n\n // Get the cards from the top of the deck for comparison\n\n let card1 = deck1.pop_front().unwrap();\n\n let card2 = deck2.pop_front().unwrap();\n", "file_path": "day-22/src/main.rs", "rank": 31, "score": 100642.9704821475 }, { "content": "/// Returns the number of columns and rows to Jump while moving along a Slope\n\nfn parse_slope(slope: &str) -> Jump {\n\n let mut jump = Jump { column: 0, row: 0 };\n\n\n\n let navigation_instructions: Vec<&str> = slope.split(\",\").collect();\n\n for instruction in navigation_instructions.iter() {\n\n let slope_vec: Vec<&str> = instruction.split_whitespace().collect();\n\n let direction = slope_vec.get(0).expect(&format!(\"Invalid slope {}\", slope));\n\n\n\n let step = slope_vec.get(1).expect(&format!(\"Invalid step {}\", slope));\n\n\n\n let step = step.parse::<i32>().unwrap();\n\n\n\n match *direction {\n\n \"up\" => jump.row = -step,\n\n \"right\" => jump.column = step,\n\n \"down\" => jump.row = step,\n\n \"left\" => jump.column = -step,\n\n _ => {}\n\n }\n\n }\n\n\n\n jump\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 32, "score": 97131.43023148511 }, { "content": "fn parse_rule(rule: &str) -> Rule {\n\n lazy_static! {\n\n static ref RULE_REGEX: Regex =\n\n Regex::new(r\"(.*\\w+):\\s(\\d+)-(\\d+)\\sor\\s(\\d+)\\-(\\d+)\").unwrap();\n\n }\n\n\n\n let captures = RULE_REGEX.captures(rule).unwrap();\n\n let name: String = captures[1].to_string();\n\n let range_one = RuleRange {\n\n min: captures[2].parse::<u32>().unwrap(),\n\n max: captures[3].parse::<u32>().unwrap(),\n\n };\n\n let range_two = RuleRange {\n\n min: captures[4].parse::<u32>().unwrap(),\n\n max: captures[5].parse::<u32>().unwrap(),\n\n };\n\n\n\n let ranges = (range_one, range_two);\n\n Rule { name, ranges }\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 33, "score": 97131.43023148511 }, { "content": "fn parse_ticket(string: &str) -> Ticket {\n\n string\n\n .trim()\n\n .split(',')\n\n .map(|x| x.trim().parse::<u32>().unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 34, "score": 97131.43023148511 }, { "content": "fn navigate(navigator: &mut Navigator) -> Coordinates {\n\n let mut direction = Directions::East;\n\n match navigator.target {\n\n NavigationTarget::Ship => {\n\n let mut ship_coordinates = navigator.coordinates.clone();\n\n for instruction in navigator.instructions.iter() {\n\n let units = instruction.units as i32;\n\n let current_direction = &instruction.direction;\n\n\n\n // Turn if the current direction mentioned is either left or right, skip to the next instruction.\n\n if *current_direction == Directions::Left || *current_direction == Directions::Right\n\n {\n\n direction = turn(&direction, current_direction, units).unwrap();\n\n continue;\n\n }\n\n\n\n if *current_direction == Directions::Forward {\n\n move_along(&direction, units, &mut ship_coordinates);\n\n } else {\n\n move_along(current_direction, units, &mut ship_coordinates);\n", "file_path": "day-12/src/main.rs", "rank": 36, "score": 96565.20275661623 }, { "content": "fn get_missing_id(mut ids: Vec<i32>) -> i32 {\n\n ids.sort();\n\n\n\n let mut prev = ids[0];\n\n let ids: Vec<i32> = ids[1..].to_vec();\n\n for curr in ids {\n\n if prev != curr - 1 {\n\n break;\n\n }\n\n prev += 1;\n\n }\n\n prev + 1\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 37, "score": 88860.7483582595 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n println!(\" -- Part 01 -- \");\n\n let count = count_answers_by_anyone(&input);\n\n println!(\"Sum of Counts: {:?}\", count);\n\n\n\n println!(\" -- Part 02 -- \");\n\n let count = count_answers_by_everyone(&input);\n\n println!(\"Sum of Counts: {:?}\", count);\n\n}\n\n\n\n#[cfg(test)]\n\n\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "day-06/src/main.rs", "rank": 38, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n let instructions = parse(&input);\n\n\n\n // -- Part 01 --\n\n let mut ship_navigator = Navigator {\n\n target: NavigationTarget::Ship,\n\n instructions: &instructions,\n\n coordinates: Coordinates { x: 0, y: 0 },\n\n };\n\n\n\n let destination = navigate(&mut ship_navigator);\n\n let manhattan_distance = get_manhattan_distance(&Coordinates { x: 0, y: 0 }, &destination);\n\n println!(\"Manhattan Distance: {}\", manhattan_distance);\n\n\n\n // -- Part 02 --\n", "file_path": "day-12/src/main.rs", "rank": 39, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let input_path = match args.get(1) {\n\n Some(path) => path,\n\n None => panic!(\"Input cannot be empty!\"),\n\n };\n\n\n\n let policies_and_passwords = read_input_to_vec(input_path);\n\n\n\n let old_count = find_valid_password_count(&policies_and_passwords, PolicyType::Old);\n\n println!(\"Number of valid passwords by Old Policy: {}\", old_count);\n\n\n\n let count = find_valid_password_count(&policies_and_passwords, PolicyType::New);\n\n println!(\"Number of valid passwords by New Policy: {}\", count);\n\n}\n", "file_path": "day-02/src/main.rs", "rank": 40, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filepath = args.get(1).expect(\"Input file cannot be empty!\");\n\n let input = fs::read_to_string(filepath).expect(\"Something went wrong while reading input\");\n\n\n\n // -- Part one --\n\n println!(\"-- Part one --\");\n\n let tree_count_r3_d1 = process(&input, \"right 3, down 1\");\n\n println!(\n\n \"Number of trees for Slope - right 3; down 1: {}\",\n\n tree_count_r3_d1\n\n );\n\n\n\n // -- Part Two --\n\n let slopes = vec![\n\n \"right 1, down 1\",\n\n \"right 3, down 1\",\n\n \"right 5, down 1\",\n\n \"right 7, down 1\",\n\n \"right 1, down 2\",\n", "file_path": "day-03/src/main.rs", "rank": 41, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n let (timestamp, bus_ids) = process(&input);\n\n\n\n // -- Part 01 --\n\n let (chosen_bus, wait) = find_earliest(timestamp, bus_ids);\n\n println!(\"Chosen Bus ID: {} * Wait Time In Minutes: {} = {}\", chosen_bus, wait, chosen_bus * wait);\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 43, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n let mut graph = create_graph(&input);\n\n let bag_color = \"shiny gold\";\n\n let count = graph.count_edges_to(bag_color);\n\n if count > 0 {\n\n println!(\"Number of bags which can contain {}: {}\", bag_color, count)\n\n } else {\n\n println!(\"No bags contain the {}\", bag_color)\n\n }\n\n\n\n let bags_inside = graph.count_bags_inside(\"shiny gold\", 0);\n\n println!(\"{} can contain {} other bags\", bag_color, bags_inside);\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "day-07/src/main.rs", "rank": 44, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n // Part 01\n\n let sequence = get_cups(&input);\n\n let result = play_game(&sequence, 100, sequence.len());\n\n let labels = get_labels_after(1, result);\n\n\n\n println!(\"Labels of Cups after 1: {:?}\", labels);\n\n\n\n // Part 02\n\n let cups = play_game(&sequence, 10_000_000, 1_000_000);\n\n let cup1 = cups[1];\n\n let cup2 = cups[cup1];\n\n println!(\"Product of Cup1 and Cup2 {:?}\", cup1 * cup2);\n\n}\n\n\n", "file_path": "day-23/src/main.rs", "rank": 45, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n let mut seats = create_seat_layout(&input);\n\n\n\n // -- Part 01 --\n\n let part_01_arrangement = SeatingArrangement {\n\n rule_type: SeatingRuleTypes::One,\n\n };\n\n\n\n let count = count_occupied_seats_after_chaos(&mut seats, &part_01_arrangement);\n\n println!(\n\n \"Number of occupied seats after chaos stabilises by Seating Rule 01: {}\",\n\n count\n\n );\n\n\n\n // -- Part 02 --\n", "file_path": "day-11/src/main.rs", "rank": 46, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let filepath = args.get(1).expect(\"Input file cannot be empty!\");\n\n let input = fs::read_to_string(filepath).expect(\"Something went wrong while reading input\");\n\n\n\n // -- Part 01 --\n\n let valid = process(&input, false);\n\n println!(\"Number of valid passports: {}\", valid);\n\n\n\n // -- Part 02 --\n\n let valid = process(&input, true);\n\n println!(\"Number of valid passports after stricter validation: {}\", valid);\n\n}\n\n\n\n#[cfg(test)]\n\n\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "day-04/src/main.rs", "rank": 47, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n let starting_numbers: Vec<u64> = input\n\n .split(',')\n\n .map(|x| x.parse::<u64>().unwrap())\n\n .collect();\n\n\n\n // -- Part 01 --\n\n let n = 2020;\n\n let nth_number = find_nth_number_in_game(&starting_numbers, n);\n\n println!(\"{}th number: {}\", n, nth_number);\n\n\n\n // -- Part 02 --\n\n let n = 30_000_000;\n\n let nth_number = find_nth_number_in_game(&starting_numbers, n);\n\n println!(\"{}th number: {}\", n, nth_number);\n\n}\n\n\n", "file_path": "day-15/src/main.rs", "rank": 48, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n let public_keys: Vec<usize> = input\n\n .split('\\n')\n\n .map(|x| x.parse::<usize>().unwrap())\n\n .collect();\n\n\n\n let encryption_key = find_encryption_key(public_keys[0], public_keys[1]);\n\n println!(\"Encryption Key: {:?}\", encryption_key);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "day-25/src/main.rs", "rank": 49, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n let numbers: Vec<u64> = input\n\n .lines()\n\n .map(|x| x.parse::<u64>().unwrap())\n\n .collect::<Vec<u64>>();\n\n\n\n let invalid_number = find_number_that_disobeys_preamble(&numbers, 25);\n\n println!(\"Invalid number: {}\", invalid_number);\n\n\n\n let weakness_score = get_encryption_weakness_score(invalid_number, &numbers);\n\n println!(\"Encryption Weakness Score {}\", weakness_score);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "day-09/src/main.rs", "rank": 50, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input = fs::read_to_string(filepath).expect(\"Something went wrong while reading the file\");\n\n\n\n let (max, seat_id) = process(&input);\n\n println!(\"{:?} {}\", max, seat_id);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn should_parse_seat() {\n\n let seat = parse_seat(\"FBFBBFFRLR\").unwrap();\n\n assert_eq!(seat.row, 44);\n\n assert_eq!(seat.column, 5);\n\n }\n\n}\n", "file_path": "day-05/src/main.rs", "rank": 51, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = &args[1];\n\n\n\n println!(\"--- Part 1 ---\");\n\n let mut entries: Vec<i32> = Vec::new();\n\n let mut map: HashMap<i32, usize> = HashMap::new();\n\n let input = fs::read_to_string(filepath).unwrap();\n\n for line in input.lines() {\n\n let entry = line.parse::<i32>().unwrap();\n\n entries.push(entry);\n\n }\n\n\n\n // Hint: Two Sum\n\n // Given an array of numbers, find its pair that equals a target\n\n for (index, entry) in entries.iter().enumerate() {\n\n let complement = TARGET_SUM - entry;\n\n if map.contains_key(&complement) {\n\n let chosen_one_index = map.get(&complement).unwrap();\n\n let chosen_two_index = &index;\n", "file_path": "day-01/src/main.rs", "rank": 52, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n // -- Part 01: Combat ---\n\n let (mut player1_deck, mut player2_deck) = get_decks(&input);\n\n let winner = combat(&mut player1_deck, &mut player2_deck);\n\n let score = match winner {\n\n Winner::Player1 => get_score(&player1_deck),\n\n Winner::Player2 => get_score(&player2_deck),\n\n };\n\n\n\n println!(\"Combat Winner: {:?}, Score: {:?}\", winner, score);\n\n\n\n // -- Part 02: Recursive Combat --\n\n let (mut player1_deck, mut player2_deck) = get_decks(&input);\n\n let winner = recursive_combat(&mut player1_deck, &mut player2_deck);\n\n let score = match winner {\n", "file_path": "day-22/src/main.rs", "rank": 53, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n let adapters: Vec<u64> = input\n\n .lines()\n\n .map(|x| {\n\n x.parse::<u64>()\n\n .expect(\"invalid adapter joltage - expected to be a number\")\n\n })\n\n .collect();\n\n\n\n let device_joltage = get_input_device_joltage(&adapters);\n\n let differences = get_joltage_differences(device_joltage, &adapters);\n\n\n\n println!(\n\n \"There are {} differences by 1 jolts and {} differences by 3 jolts\",\n\n differences.one, differences.three\n", "file_path": "day-10/src/main.rs", "rank": 54, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input file\");\n\n\n\n // -- Part 01 --\n\n let instructions = parse(&input);\n\n let accumulator = process_instructions(&instructions);\n\n println!(\n\n \"The accumulator value right before going into an infinite loop: {}\",\n\n accumulator.unwrap_err()\n\n );\n\n\n\n // -- Part 02 --\n\n let accumulator = fix_bootcode_by_swap(&instructions);\n\n println!(\n\n \"The accumulator value after the program terminates is: {}\",\n\n accumulator\n\n );\n", "file_path": "day-08/src/main.rs", "rank": 55, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n // -- Part 01 --\n\n let sum = input\n\n .lines()\n\n .map(|expr| evaluate(expr, false))\n\n .fold(0, |acc, (result, _)| acc + result);\n\n\n\n println!(\"Part 01 Sum: {}\", sum);\n\n\n\n // -- Part 02 --\n\n let sum = input\n\n .lines()\n\n .map(|expr| evaluate(expr, true))\n\n .fold(0, |acc, (result, _)| acc + result);\n\n\n", "file_path": "day-18/src/main.rs", "rank": 56, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n let commands = parse(&input);\n\n let memory = execute(commands);\n\n\n\n let sum: u64 = memory.values().sum();\n\n println!(\"Sum of all values left in the memory: {:?}\", sum);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn read_input<'a>() -> &'a str {\n\n r#\"\n\nmask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\n", "file_path": "day-14/src/main.rs", "rank": 57, "score": 88669.08638997881 }, { "content": "fn main() {\n\n let args = env::args().collect::<Vec<String>>();\n\n let filepath = args.get(1).expect(\"Input filepath cannot be empty!\");\n\n let input =\n\n fs::read_to_string(filepath).expect(\"Something went wrong while reading the input!\");\n\n\n\n let notes = parse_input(&input);\n\n let nearby = &notes.other_tickets;\n\n\n\n // -- Part 01 --\n\n let error_rate = find_ticket_scanning_error_rate(&notes.rules, &nearby);\n\n println!(\"Ticket Scanning Error Rate: {}\", error_rate);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n /// used to compare vectors of the same type\n\n fn vec_compare<T: std::cmp::PartialEq>(vec1: &[T], vec2: &[T]) -> bool {\n\n (vec1.len() == vec2.len()) && vec1.iter().zip(vec2).all(|(a, b)| *a == *b)\n", "file_path": "day-16/src/main.rs", "rank": 58, "score": 88669.08638997881 }, { "content": "fn parse_seat(seat: &str) -> Result<Seat, String> {\n\n lazy_static! {\n\n static ref SEAT_REGEX: Regex = Regex::new(r\"(\\w{7})(\\w{3})\").unwrap();\n\n }\n\n\n\n if seat.len() < 10 {\n\n return Err(\"Invalid seat\".to_owned());\n\n }\n\n\n\n let captures = SEAT_REGEX.captures(seat).unwrap();\n\n let rows: &str = &captures[1];\n\n let columns: &str = &captures[2];\n\n\n\n if rows.len() < 7 || columns.len() < 3 {\n\n return Err(\"Invalid seat\".to_owned());\n\n }\n\n\n\n let row_range_delimiters = RangeDelimiters {\n\n upper: 'B',\n\n lower: 'F',\n", "file_path": "day-05/src/main.rs", "rank": 59, "score": 88552.95354479634 }, { "content": "fn fix_bootcode_by_swap(instructions: &Vec<(&str, i32)>) -> i32 {\n\n // iterate through all instructions\n\n // swap out a single nop -> jmp, and a jmp -> nop\n\n // if the program is able to terminate sucssefully, we get an Ok(acc) with the accumulator value.\n\n for (index, &instruction) in instructions.iter().enumerate() {\n\n match instruction {\n\n (\"acc\", _) => continue,\n\n (\"nop\", val) => {\n\n let mut instructions = instructions.clone();\n\n instructions[index] = (\"jmp\", val);\n\n if let Ok(accumulator) = process_instructions(&instructions) {\n\n return accumulator;\n\n }\n\n }\n\n (\"jmp\", val) => {\n\n let mut instructions = instructions.clone();\n\n instructions[index] = (\"nop\", val);\n\n if let Ok(accumulator) = process_instructions(&instructions) {\n\n return accumulator;\n\n }\n\n }\n\n _ => continue,\n\n }\n\n }\n\n return 0;\n\n}\n\n\n", "file_path": "day-08/src/main.rs", "rank": 60, "score": 86920.22862836417 }, { "content": "fn validate_passport(passport: &str, should_validate_fields: bool) -> bool {\n\n let passport_fields: Vec<&str> = passport.split_whitespace().collect();\n\n let mut required_field_map: HashMap<&str, (i32, &str)> = [\n\n (\"byr\", (0, \"\")),\n\n (\"iyr\", (0, \"\")),\n\n (\"eyr\", (0, \"\")),\n\n (\"hgt\", (0, \"\")),\n\n (\"hcl\", (0, \"\")),\n\n (\"ecl\", (0, \"\")),\n\n (\"pid\", (0, \"\")),\n\n ]\n\n .iter()\n\n .cloned()\n\n .collect();\n\n\n\n for passport_field in passport_fields.iter() {\n\n let passport_field_vec = passport_field.split(':').collect::<Vec<&str>>();\n\n let field = passport_field_vec.get(0).unwrap();\n\n let value = passport_field_vec.get(1).unwrap();\n\n required_field_map.entry(field).and_modify(|e| {\n", "file_path": "day-04/src/main.rs", "rank": 61, "score": 86920.22862836417 }, { "content": "fn find_ticket_scanning_error_rate(rules: &[Rule], tickets: &[Ticket]) -> u32 {\n\n tickets\n\n .iter()\n\n .flatten()\n\n .filter(|ticket| !is_valid_ticket_value(rules, **ticket))\n\n .copied()\n\n .sum()\n\n}\n", "file_path": "day-16/src/main.rs", "rank": 62, "score": 85721.57763046527 }, { "content": "fn is_valid(policy_password: &str, policy_type: &PolicyType) -> bool {\n\n let values: Vec<&str> = policy_password.split(\" \").collect::<Vec<&str>>();\n\n let allowed_password_range = values[0].split(\"-\").collect::<Vec<&str>>();\n\n let letter = crop_letters_after(&values[1], 1);\n\n let letter = letter\n\n .parse::<char>()\n\n .expect(\"policy letter should be a char.\");\n\n\n\n let password = &values[2];\n\n\n\n match *policy_type {\n\n PolicyType::Old => {\n\n let range_min = allowed_password_range[0]\n\n .parse::<i32>()\n\n .expect(\"invalid range min\");\n\n\n\n let range_max = allowed_password_range[1]\n\n .parse::<i32>()\n\n .expect(\"invalid range max\");\n\n\n", "file_path": "day-02/src/main.rs", "rank": 63, "score": 85383.69077910086 }, { "content": "fn count_occupied_seats(seats: &mut Vec<Vec<char>>) -> usize {\n\n seats\n\n .iter()\n\n .flatten()\n\n .filter(|seat| **seat == OCCUPIED_SEAT)\n\n .count()\n\n}\n\n\n", "file_path": "day-11/src/main.rs", "rank": 64, "score": 85178.77880753324 }, { "content": "fn evaluate(expression: &str, add_has_precedence: bool) -> (u64, usize) {\n\n let mut result = 0;\n\n let mut apply_operation = |operand, operator: Option<u8>| match operator {\n\n Some(b'+') => result += operand,\n\n Some(b'*') => result *= operand,\n\n None => result = operand,\n\n _ => {}\n\n };\n\n\n\n let mut operator = None;\n\n let mut idx = 0;\n\n\n\n // idx is incremented when we encounter an empty space, +, *\n\n while idx < expression.len() {\n\n let value = expression.as_bytes()[idx];\n\n match value {\n\n b' ' => idx += 1,\n\n b'+' => {\n\n operator = Some(value);\n\n idx += 1;\n", "file_path": "day-18/src/main.rs", "rank": 65, "score": 84868.7119184211 }, { "content": "/// recursively find the sum of all the possible arrangements\n\nfn sum_possible_arrangements(\n\n joltage: u64,\n\n adapters: &Vec<u64>,\n\n memo: &mut HashMap<u64, u64>,\n\n) -> u64 {\n\n let max_joltage = adapters.iter().max().unwrap();\n\n // there's only one possible arrangement when the joltage of a given adapter, equals the maximum possible adapter joltage itself.\n\n if joltage == *max_joltage {\n\n return 1;\n\n }\n\n\n\n // if the adapter has already been used, the number of possible arrangements when it was used is it's memoized and we return the memoized value instead.\n\n if memo.contains_key(&joltage) {\n\n return *memo.get(&joltage).unwrap();\n\n }\n\n\n\n // possible adapters are adapters whose joltages are <= current_joltage + 3\n\n // for eg. if the adapters are [1, 4, 5, 6, 7] when the current joltage is 4, the possible adapters are 5, 6 and 7.\n\n let possible_adapters = adapters\n\n .iter()\n", "file_path": "day-10/src/main.rs", "rank": 66, "score": 84774.70869322061 }, { "content": "#[test]\n\nfn should_process_and_find_earliest() {\n\n let input = r#\"939\n\n7,13,'x','x',59,'x',31,19\"#;\n\n\n\n let (timestamp, bus_ids) = process(input);\n\n let (chosen_bus, wait) = find_earliest(timestamp, bus_ids);\n\n assert_eq!(chosen_bus, 59);\n\n assert_eq!(wait, 5);\n\n}\n", "file_path": "day-13/src/main.rs", "rank": 67, "score": 84621.01525098148 }, { "content": "fn move_along(direction: &Directions, units: i32, coordinates: &mut Coordinates) {\n\n match direction {\n\n Directions::North => coordinates.y += units,\n\n Directions::East => coordinates.x += units,\n\n Directions::West => coordinates.x += -units,\n\n Directions::South => coordinates.y += -units,\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 68, "score": 83290.75749287003 }, { "content": "fn find_earliest(timestamp: u64, bus_ids: Vec<&str>) -> (u64, u64) {\n\n let (chosen_bus, wait) = bus_ids\n\n .iter()\n\n .filter(|id| **id != \"\\'x\\'\")\n\n .map(|id| {\n\n let bus_id = id.parse::<u64>().unwrap();\n\n (bus_id, bus_id - timestamp % bus_id)\n\n })\n\n .min_by_key(|(_id, offset)| *offset)\n\n .unwrap();\n\n\n\n (chosen_bus, wait)\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 69, "score": 80060.73565995108 }, { "content": "fn play_game(cup_labels: &[u8], moves: u32, total_cups: usize) -> Vec<usize> {\n\n // The number of values needs to be +1 of total_cups because the head of the LinkedList is 0 and acts as a dummy node.\n\n let mut cups: CupplyLinkedList = vec![0usize; total_cups + 1];\n\n let mut prev = cup_labels[0] as usize;\n\n\n\n // Populate the Cupply LinkedList By setting the value at index n to point to its neighbour\n\n for &label in cup_labels.iter().skip(1) {\n\n cups[prev] = label as usize;\n\n prev = label as usize;\n\n }\n\n\n\n // If there are more total cups than cup labels, populate the rest of the values in the CLL\n\n // This is to handle the specific usecase in Part 02\n\n for value in cup_labels.len() + 1..total_cups + 1 {\n\n cups[prev] = value;\n\n prev = value;\n\n }\n\n\n\n // Reset cups[prev] to point to first value once CLL is populated\n\n cups[prev] = cup_labels[0] as usize;\n", "file_path": "day-23/src/main.rs", "rank": 70, "score": 77336.20872906555 }, { "content": "fn get_range_from_seat(seat: &str, delimeters: RangeDelimiters, max_count: i32) -> SeatRange {\n\n let mut range = SeatRange {\n\n start: 0,\n\n end: max_count,\n\n };\n\n\n\n let RangeDelimiters { lower, upper } = delimeters;\n\n\n\n for char in seat.chars() {\n\n if char == lower {\n\n range.end = (range.start + range.end) / 2;\n\n } else if char == upper {\n\n range.start = range.start + (range.end - range.start) / 2;\n\n }\n\n }\n\n\n\n range\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 71, "score": 76172.98373918112 }, { "content": "fn get_loop_size(subject: usize, candidate_public_key: usize) -> LoopSize {\n\n let mut value = 1;\n\n let mut loop_size: LoopSize = 1;\n\n\n\n loop {\n\n value *= subject;\n\n value %= TRANSFORMATION_CONSTANT;\n\n if value == candidate_public_key {\n\n break loop_size;\n\n }\n\n loop_size += 1;\n\n }\n\n}\n\n\n", "file_path": "day-25/src/main.rs", "rank": 72, "score": 73119.54001863574 }, { "content": "/// checks if a target sum is present when adding any of the two numbers in a given list\n\nfn has_target_sum(target: u64, numbers: &[u64]) -> bool {\n\n for num in numbers {\n\n let complement = (target as i64 - *num as i64) as u64;\n\n // TODO: `.contains` is likely a O(n) search, could this probably be optimized?\n\n if numbers.contains(&complement) && &complement != num {\n\n return true;\n\n } else {\n\n continue;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "day-09/src/main.rs", "rank": 73, "score": 69951.97323936499 }, { "content": "fn get_input_device_joltage(joltages: &Vec<u64>) -> u64 {\n\n let max_joltage = joltages.iter().max().unwrap();\n\n return max_joltage + 3;\n\n}\n\n\n", "file_path": "day-10/src/main.rs", "rank": 74, "score": 68984.50859725932 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Bag {\n\n color: String,\n\n count: u32,\n\n}\n\n\n", "file_path": "day-07/src/main.rs", "rank": 75, "score": 67476.54065814072 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Rule {\n\n name: String,\n\n ranges: (RuleRange, RuleRange),\n\n}\n\n\n\nimpl Rule {\n\n fn check(&self, ticket_value: u32) -> bool {\n\n let (range_one, range_two) = &self.ranges;\n\n ticket_value >= range_one.min && ticket_value <= range_one.max\n\n || (ticket_value >= range_two.min && ticket_value <= range_two.max)\n\n }\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 76, "score": 67476.54065814072 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Coordinates {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 77, "score": 67476.54065814072 }, { "content": "#[derive(Debug)]\n\nstruct Notes {\n\n rules: Vec<Rule>,\n\n my_ticket: Ticket,\n\n other_tickets: Vec<Ticket>,\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 78, "score": 67473.20960087559 }, { "content": "#[derive(Debug)]\n\nstruct Seat {\n\n row: i32,\n\n column: i32,\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 79, "score": 67473.20960087559 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct Jump {\n\n column: i32,\n\n row: i32,\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 80, "score": 67473.20960087559 }, { "content": "#[derive(Debug, Clone)]\n\nstruct RuleRange {\n\n min: u32,\n\n max: u32,\n\n}\n", "file_path": "day-16/src/main.rs", "rank": 81, "score": 66067.18750915676 }, { "content": "struct SeatingArrangement {\n\n rule_type: SeatingRuleTypes,\n\n}\n\n\n", "file_path": "day-11/src/main.rs", "rank": 82, "score": 66063.85645189165 }, { "content": "struct JoltageDifference {\n\n one: u64,\n\n two: u64,\n\n three: u64,\n\n}\n\n\n", "file_path": "day-10/src/main.rs", "rank": 83, "score": 66063.85645189165 }, { "content": "#[derive(Debug)]\n\nstruct NavigationInstruction {\n\n direction: Directions,\n\n units: u32,\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 84, "score": 66063.85645189165 }, { "content": "#[derive(Debug)]\n\nstruct RangeDelimiters {\n\n upper: char,\n\n lower: char,\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 85, "score": 66063.85645189165 }, { "content": "struct SeatRange {\n\n start: i32,\n\n end: i32,\n\n}\n\n\n\nconst TOTAL_ROWS: i32 = 128;\n\nconst TOTAL_COLUMNS: i32 = 7;\n\n\n", "file_path": "day-05/src/main.rs", "rank": 86, "score": 66063.85645189165 }, { "content": "// Part 01\n\nfn get_labels_after(cup_label: usize, cups: CupplyLinkedList) -> String {\n\n let mut labels = String::new();\n\n let mut candidate = cups[cup_label];\n\n for _ in 0..8 {\n\n labels.push((b'0' + candidate as u8) as char);\n\n let next = cups[candidate];\n\n candidate = next;\n\n }\n\n\n\n labels\n\n}\n\n\n", "file_path": "day-23/src/main.rs", "rank": 87, "score": 65280.10019660444 }, { "content": "fn transform_subject_number(subject_number: usize, loop_size: usize) -> usize {\n\n let mut value: usize = 1;\n\n for _ in 0..loop_size {\n\n value *= subject_number;\n\n value %= TRANSFORMATION_CONSTANT;\n\n }\n\n\n\n value\n\n}\n\n\n", "file_path": "day-25/src/main.rs", "rank": 88, "score": 65259.065756761636 }, { "content": "struct Navigator<'a> {\n\n target: NavigationTarget,\n\n instructions: &'a Vec<NavigationInstruction>,\n\n coordinates: Coordinates,\n\n}\n\n\n", "file_path": "day-12/src/main.rs", "rank": 89, "score": 65003.512725086104 }, { "content": "struct NewPasswordPolicy {\n\n first_position: usize,\n\n last_position: usize,\n\n letter: char,\n\n}\n\n\n\nimpl ValidatePassword for NewPasswordPolicy {\n\n fn validate(&self, password: &str) -> bool {\n\n let mut has_char_at_first_position = false;\n\n let mut has_char_at_last_position = false;\n\n\n\n match password.chars().nth(self.first_position) {\n\n Some(char_at_first_position) => {\n\n has_char_at_first_position = char_at_first_position == self.letter;\n\n }\n\n None => {}\n\n }\n\n\n\n match password.chars().nth(self.last_position) {\n\n Some(char_at_last_position) => {\n", "file_path": "day-02/src/main.rs", "rank": 90, "score": 64755.355263499034 }, { "content": "struct OldPasswordPolicy {\n\n min: i32,\n\n max: i32,\n\n letter: char,\n\n}\n\n\n\nimpl ValidatePassword for OldPasswordPolicy {\n\n fn validate(&self, password: &str) -> bool {\n\n let mut occurences: HashMap<char, i32> = HashMap::new();\n\n\n\n for character in password.chars() {\n\n // increments entry by 1, if exists else inserts 1.\n\n occurences\n\n .entry(character)\n\n .and_modify(|e| *e += 1)\n\n .or_insert(1);\n\n }\n\n\n\n let has_letter = occurences.contains_key(&self.letter);\n\n\n", "file_path": "day-02/src/main.rs", "rank": 91, "score": 64755.355263499034 }, { "content": "#[derive(Debug)]\n\nstruct BaggyColorGraph {\n\n adjacency_list: HashMap<String, Vec<Bag>>,\n\n}\n\n\n\nimpl BaggyColorGraph {\n\n fn new(adjacency_list: HashMap<String, Vec<Bag>>) -> BaggyColorGraph {\n\n BaggyColorGraph { adjacency_list }\n\n }\n\n\n\n #[allow(dead_code)]\n\n fn len(&self) -> usize {\n\n self.adjacency_list.len()\n\n }\n\n\n\n fn add_vertex(&mut self, bag: &Bag) {\n\n let color = &bag.color;\n\n self.adjacency_list.insert(color.to_owned(), vec![]);\n\n }\n\n\n\n fn add_edge(&mut self, bag1: &Bag, bag2: &Bag) {\n", "file_path": "day-07/src/main.rs", "rank": 92, "score": 64755.355263499034 }, { "content": "/// checks if a contigous sequence of numbers sum up to a target sum\n\n/// essentially a two-pointer with start and stop pointers, incremented based on whether the sum is less than or greater than target.\n\nfn find_contiguos_sum(target: u64, numbers: &Vec<u64>) -> Vec<u64> {\n\n let mut start = 0;\n\n let mut end = 1;\n\n loop {\n\n let sum: u64 = numbers[start..=end].iter().sum();\n\n if sum == target {\n\n // TODO: can we return a slice instead of a .to_vec?\n\n break numbers[start..=end].to_vec();\n\n } else if sum > target {\n\n start += 1;\n\n } else {\n\n end += 1;\n\n }\n\n\n\n continue;\n\n }\n\n}\n\n\n", "file_path": "day-09/src/main.rs", "rank": 93, "score": 63552.40080688112 }, { "content": "fn execute(commands: Vec<Command>) -> HashMap<u64, u64> {\n\n let mut memory: HashMap<u64, u64> = HashMap::new();\n\n let mut bitmask = \"XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX\";\n\n for command in commands {\n\n match command {\n\n Command::Mask(value) => bitmask = value,\n\n Command::Malloc(address, value) => {\n\n let masked = apply_mask(bitmask, value);\n\n memory.insert(address, masked);\n\n }\n\n }\n\n }\n\n\n\n memory\n\n}\n\n\n", "file_path": "day-14/src/main.rs", "rank": 94, "score": 61906.648064813446 }, { "content": "type Ticket = Vec<u32>;\n\n\n", "file_path": "day-16/src/main.rs", "rank": 95, "score": 60856.865837390695 }, { "content": "type Deck = VecDeque<u32>;\n\n\n", "file_path": "day-22/src/main.rs", "rank": 96, "score": 59630.59462509671 }, { "content": "#[test]\n\nfn should_find_nth_number() {\n\n assert_eq!(find_nth_number_in_game(&[0, 3, 6], 2020), 436);\n\n assert_eq!(find_nth_number_in_game(&[1, 3, 2], 2020), 1);\n\n assert_eq!(find_nth_number_in_game(&[2, 1, 3], 2020), 10);\n\n assert_eq!(find_nth_number_in_game(&[3, 1, 2], 2020), 1836);\n\n assert_eq!(find_nth_number_in_game(&[0, 3, 6], 30_000_000), 175594);\n\n}\n", "file_path": "day-15/src/main.rs", "rank": 97, "score": 55157.82190656825 }, { "content": "fn move_and_turn_waypoint(\n\n waypoint_coordinates: &mut Coordinates,\n\n turn_direction: &Directions,\n\n angle: i32,\n\n) {\n\n let waypoint_x = waypoint_coordinates.x;\n\n let waypoint_y = waypoint_coordinates.y;\n\n\n\n let x_direction = if waypoint_x >= 0 {\n\n Directions::East\n\n } else {\n\n Directions::West\n\n };\n\n\n\n let y_direction = if waypoint_y >= 0 {\n\n Directions::North\n\n } else {\n\n Directions::South\n\n };\n\n\n", "file_path": "day-12/src/main.rs", "rank": 98, "score": 55154.360790668106 }, { "content": "fn count_occupied_seats_after_chaos(\n\n mut seats: &mut Vec<Vec<char>>,\n\n rules: &SeatingArrangement,\n\n) -> usize {\n\n let (mut new_arrangement, is_modified) = rules.apply_seating_rules(&mut seats);\n\n if !is_modified {\n\n return count_occupied_seats(&mut new_arrangement);\n\n } else {\n\n return count_occupied_seats_after_chaos(&mut new_arrangement, &rules);\n\n }\n\n}\n\n\n", "file_path": "day-11/src/main.rs", "rank": 99, "score": 54021.210044947176 } ]
Rust
src/macos/aarch64/vcpu.rs
RWTH-OS/uhyve
14e8ea129a82910c13e5a12b7893cd1badb5f380
#![allow(non_snake_case)] #![allow(clippy::identity_op)] use crate::aarch64::{ mair, tcr_size, MT_DEVICE_nGnRE, MT_DEVICE_nGnRnE, MT_DEVICE_GRE, MT_NORMAL, MT_NORMAL_NC, PSR, TCR_FLAGS, TCR_TG1_4K, VA_BITS, }; use crate::consts::*; use crate::vm::HypervisorResult; use crate::vm::SysExit; use crate::vm::VcpuStopReason; use crate::vm::VirtualCPU; use log::debug; use std::ffi::OsString; use std::path::Path; use std::path::PathBuf; use xhypervisor; use xhypervisor::{Register, SystemRegister, VirtualCpuExitReason}; pub struct UhyveCPU { id: u32, kernel_path: PathBuf, args: Vec<OsString>, vcpu: xhypervisor::VirtualCpu, vm_start: usize, } impl UhyveCPU { pub fn new(id: u32, kernel_path: PathBuf, args: Vec<OsString>, vm_start: usize) -> UhyveCPU { Self { id, kernel_path, args, vcpu: xhypervisor::VirtualCpu::new().unwrap(), vm_start, } } } impl VirtualCPU for UhyveCPU { fn init(&mut self, entry_point: u64) -> HypervisorResult<()> { debug!("Initialize VirtualCPU"); /* pstate = all interrupts masked */ let pstate: PSR = PSR::D_BIT | PSR::A_BIT | PSR::I_BIT | PSR::F_BIT | PSR::MODE_EL1H; self.vcpu.write_register(Register::CPSR, pstate.bits())?; self.vcpu.write_register(Register::PC, entry_point)?; self.vcpu.write_register(Register::X0, BOOT_INFO_ADDR)?; /* * Setup memory attribute type tables * * Memory regioin attributes for LPAE: * * n = AttrIndx[2:0] * n MAIR * DEVICE_nGnRnE 000 00000000 (0x00) * DEVICE_nGnRE 001 00000100 (0x04) * DEVICE_GRE 010 00001100 (0x0c) * NORMAL_NC 011 01000100 (0x44) * NORMAL 100 11111111 (0xff) */ let mair_el1 = mair(0x00, MT_DEVICE_nGnRnE) | mair(0x04, MT_DEVICE_nGnRE) | mair(0x0c, MT_DEVICE_GRE) | mair(0x44, MT_NORMAL_NC) | mair(0xff, MT_NORMAL); self.vcpu .write_system_register(SystemRegister::MAIR_EL1, mair_el1)?; /* * Setup translation control register (TCR) */ let aa64mmfr0_el1 = self .vcpu .read_system_register(SystemRegister::ID_AA64MMFR0_EL1)?; let tcr = ((aa64mmfr0_el1 & 0xF) << 32) | (tcr_size(VA_BITS) | TCR_TG1_4K | TCR_FLAGS); let tcr_el1 = (tcr & 0xFFFFFFF0FFFFFFFFu64) | ((aa64mmfr0_el1 & 0xFu64) << 32); self.vcpu .write_system_register(SystemRegister::TCR_EL1, tcr_el1)?; /* * Enable FP/ASIMD in Architectural Feature Access Control Register, */ let cpacr_el1 = self.vcpu.read_system_register(SystemRegister::CPACR_EL1)? | (3 << 20); self.vcpu .write_system_register(SystemRegister::CPACR_EL1, cpacr_el1)?; /* * Reset debug control register */ self.vcpu .write_system_register(SystemRegister::MDSCR_EL1, 0)?; self.vcpu .write_system_register(SystemRegister::TTBR1_EL1, 0)?; self.vcpu .write_system_register(SystemRegister::TTBR0_EL1, BOOT_PGT)?; /* * Prepare system control register (SCTRL) * Todo: - Verify if all of these bits actually should be explicitly set - Link origin of this documentation and check to which instruction set versions it applies (if applicable) - Fill in the missing Documentation for some of the bits and verify if we care about them or if loading and not setting them would be the appropriate action. */ let sctrl_el1: u64 = 0 | (1 << 26) /* UCI Enables EL0 access in AArch64 for DC CVAU, DC CIVAC, DC CVAC and IC IVAU instructions */ | (0 << 25) /* EE Explicit data accesses at EL1 and Stage 1 translation table walks at EL1 & EL0 are little-endian */ | (0 << 24) /* EOE Explicit data accesses at EL0 are little-endian */ | (1 << 23) | (1 << 22) | (1 << 20) | (0 << 19) /* WXN Regions with write permission are not forced to XN */ | (1 << 18) /* nTWE WFE instructions are executed as normal */ | (0 << 17) | (1 << 16) /* nTWI WFI instructions are executed as normal */ | (1 << 15) /* UCT Enables EL0 access in AArch64 to the CTR_EL0 register */ | (1 << 14) /* DZE Execution of the DC ZVA instruction is allowed at EL0 */ | (0 << 13) | (1 << 12) /* I Instruction caches enabled at EL0 and EL1 */ | (1 << 11) | (0 << 10) | (0 << 9) /* UMA Disable access to the interrupt masks from EL0 */ | (1 << 8) /* SED The SETEND instruction is available */ | (0 << 7) /* ITD The IT instruction functionality is available */ | (0 << 6) /* THEE ThumbEE is disabled */ | (0 << 5) /* CP15BEN CP15 barrier operations disabled */ | (1 << 4) /* SA0 Stack Alignment check for EL0 enabled */ | (1 << 3) /* SA Stack Alignment check enabled */ | (1 << 2) /* C Data and unified enabled */ | (0 << 1) /* A Alignment fault checking disabled */ | (1 << 0) /* M MMU enable */ ; self.vcpu .write_system_register(SystemRegister::SCTLR_EL1, sctrl_el1)?; Ok(()) } fn kernel_path(&self) -> &Path { self.kernel_path.as_path() } fn args(&self) -> &[OsString] { self.args.as_slice() } fn host_address(&self, addr: usize) -> usize { addr + self.vm_start } fn virt_to_phys(&self, _addr: usize) -> usize { 0 } fn r#continue(&mut self) -> HypervisorResult<VcpuStopReason> { loop { self.vcpu.run()?; let reason = self.vcpu.exit_reason(); match reason { VirtualCpuExitReason::Exception { exception } => { let ec = (exception.syndrome >> 26) & 0x3f; if ec == 0b100100u64 || ec == 0b100101u64 { let addr: u16 = exception.physical_address.try_into().unwrap(); let pc = self.vcpu.read_register(Register::PC)?; match addr { UHYVE_UART_PORT => { let x8 = (self.vcpu.read_register(Register::X8)? & 0xFF) as u8; self.uart(&[x8]).unwrap(); self.vcpu.write_register(Register::PC, pc + 4)?; } UHYVE_PORT_EXIT => { let data_addr = self.vcpu.read_register(Register::X8)?; let sysexit = unsafe { &*(self.host_address(data_addr as usize) as *const SysExit) }; return Ok(VcpuStopReason::Exit(self.exit(sysexit))); } _ => { error!("Unable to handle exception {:?}", exception); self.print_registers(); return Err(xhypervisor::Error::Error); } } } else { error!("Unsupported exception class: 0x{:x}", ec); self.print_registers(); return Err(xhypervisor::Error::Error); } } _ => { error!("Unknown exit reason: {:?}", reason); return Err(xhypervisor::Error::Error); } } } } fn run(&mut self) -> HypervisorResult<Option<i32>> { match self.r#continue()? { VcpuStopReason::Debug(_) => { unreachable!("reached debug exit without running in debugging mode") } VcpuStopReason::Exit(code) => Ok(Some(code)), VcpuStopReason::Kick => Ok(None), } } fn print_registers(&self) { println!("\nDump state of CPU {}", self.id); let pc = self.vcpu.read_register(Register::PC).unwrap(); let cpsr = self.vcpu.read_register(Register::CPSR).unwrap(); let sp = self .vcpu .read_system_register(SystemRegister::SP_EL1) .unwrap(); let sctlr = self .vcpu .read_system_register(SystemRegister::SCTLR_EL1) .unwrap(); let ttbr0 = self .vcpu .read_system_register(SystemRegister::TTBR0_EL1) .unwrap(); let lr = self.vcpu.read_register(Register::LR).unwrap(); let x0 = self.vcpu.read_register(Register::X0).unwrap(); let x1 = self.vcpu.read_register(Register::X1).unwrap(); let x2 = self.vcpu.read_register(Register::X2).unwrap(); let x3 = self.vcpu.read_register(Register::X3).unwrap(); let x4 = self.vcpu.read_register(Register::X4).unwrap(); let x5 = self.vcpu.read_register(Register::X5).unwrap(); let x6 = self.vcpu.read_register(Register::X6).unwrap(); let x7 = self.vcpu.read_register(Register::X7).unwrap(); let x8 = self.vcpu.read_register(Register::X8).unwrap(); let x9 = self.vcpu.read_register(Register::X9).unwrap(); let x10 = self.vcpu.read_register(Register::X10).unwrap(); let x11 = self.vcpu.read_register(Register::X11).unwrap(); let x12 = self.vcpu.read_register(Register::X12).unwrap(); let x13 = self.vcpu.read_register(Register::X13).unwrap(); let x14 = self.vcpu.read_register(Register::X14).unwrap(); let x15 = self.vcpu.read_register(Register::X15).unwrap(); let x16 = self.vcpu.read_register(Register::X16).unwrap(); let x17 = self.vcpu.read_register(Register::X17).unwrap(); let x18 = self.vcpu.read_register(Register::X18).unwrap(); let x19 = self.vcpu.read_register(Register::X19).unwrap(); let x20 = self.vcpu.read_register(Register::X20).unwrap(); let x21 = self.vcpu.read_register(Register::X21).unwrap(); let x22 = self.vcpu.read_register(Register::X22).unwrap(); let x23 = self.vcpu.read_register(Register::X23).unwrap(); let x24 = self.vcpu.read_register(Register::X24).unwrap(); let x25 = self.vcpu.read_register(Register::X25).unwrap(); let x26 = self.vcpu.read_register(Register::X26).unwrap(); let x27 = self.vcpu.read_register(Register::X27).unwrap(); let x28 = self.vcpu.read_register(Register::X28).unwrap(); let x29 = self.vcpu.read_register(Register::X29).unwrap(); println!("\nRegisters:"); println!("----------"); println!( "PC : {:016x} LR : {:016x} CPSR : {:016x}\n\ SP : {:016x} SCTLR : {:016x} TTBR0 : {:016x}", pc, lr, cpsr, sp, sctlr, ttbr0, ); print!( "x0 : {:016x} x1 : {:016x} x2 : {:016x}\n\ x3 : {:016x} x4 : {:016x} x5 : {:016x}\n\ x6 : {:016x} x7 : {:016x} x8 : {:016x}\n\ x9 : {:016x} x10 : {:016x} x11 : {:016x}\n\ x12 : {:016x} x13 : {:016x} x14 : {:016x}\n\ x15 : {:016x} x16 : {:016x} x17 : {:016x}\n\ x18 : {:016x} x19 : {:016x} x20 : {:016x}\n\ x21 : {:016x} x22 : {:016x} x23 : {:016x}\n\ x24 : {:016x} x25 : {:016x} x26 : {:016x}\n\ x27 : {:016x} x28 : {:016x} x29 : {:016x}\n", x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, x29, ); } } impl Drop for UhyveCPU { fn drop(&mut self) { debug!("Drop virtual CPU {}", self.id); let _ = self.vcpu.destroy(); } }
#![allow(non_snake_case)] #![allow(clippy::identity_op)] use crate::aarch64::{ mair, tcr_size, MT_DEVICE_nGnRE, MT_DEVICE_nGnRnE, MT_DEVICE_GRE, MT_NORMAL, MT_NORMAL_NC, PSR, TCR_FLAGS, TCR_TG1_4K, VA_BITS, }; use crate::consts::*; use crate::vm::HypervisorResult; use crate::vm::SysExit; use crate::vm::VcpuStopReason; use crate::vm::VirtualCPU; use log::debug; use std::ffi::OsString; use std::path::Path; use std::path::PathBuf; use xhypervisor; use xhypervisor::{Register, SystemRegister, VirtualCpuExitReason}; pub struct UhyveCPU { id: u32, kernel_path: PathBuf, args: Vec<OsString>, vcpu: xhypervisor::VirtualCpu, vm_start: usize, } impl UhyveCPU {
} impl VirtualCPU for UhyveCPU { fn init(&mut self, entry_point: u64) -> HypervisorResult<()> { debug!("Initialize VirtualCPU"); /* pstate = all interrupts masked */ let pstate: PSR = PSR::D_BIT | PSR::A_BIT | PSR::I_BIT | PSR::F_BIT | PSR::MODE_EL1H; self.vcpu.write_register(Register::CPSR, pstate.bits())?; self.vcpu.write_register(Register::PC, entry_point)?; self.vcpu.write_register(Register::X0, BOOT_INFO_ADDR)?; /* * Setup memory attribute type tables * * Memory regioin attributes for LPAE: * * n = AttrIndx[2:0] * n MAIR * DEVICE_nGnRnE 000 00000000 (0x00) * DEVICE_nGnRE 001 00000100 (0x04) * DEVICE_GRE 010 00001100 (0x0c) * NORMAL_NC 011 01000100 (0x44) * NORMAL 100 11111111 (0xff) */ let mair_el1 = mair(0x00, MT_DEVICE_nGnRnE) | mair(0x04, MT_DEVICE_nGnRE) | mair(0x0c, MT_DEVICE_GRE) | mair(0x44, MT_NORMAL_NC) | mair(0xff, MT_NORMAL); self.vcpu .write_system_register(SystemRegister::MAIR_EL1, mair_el1)?; /* * Setup translation control register (TCR) */ let aa64mmfr0_el1 = self .vcpu .read_system_register(SystemRegister::ID_AA64MMFR0_EL1)?; let tcr = ((aa64mmfr0_el1 & 0xF) << 32) | (tcr_size(VA_BITS) | TCR_TG1_4K | TCR_FLAGS); let tcr_el1 = (tcr & 0xFFFFFFF0FFFFFFFFu64) | ((aa64mmfr0_el1 & 0xFu64) << 32); self.vcpu .write_system_register(SystemRegister::TCR_EL1, tcr_el1)?; /* * Enable FP/ASIMD in Architectural Feature Access Control Register, */ let cpacr_el1 = self.vcpu.read_system_register(SystemRegister::CPACR_EL1)? | (3 << 20); self.vcpu .write_system_register(SystemRegister::CPACR_EL1, cpacr_el1)?; /* * Reset debug control register */ self.vcpu .write_system_register(SystemRegister::MDSCR_EL1, 0)?; self.vcpu .write_system_register(SystemRegister::TTBR1_EL1, 0)?; self.vcpu .write_system_register(SystemRegister::TTBR0_EL1, BOOT_PGT)?; /* * Prepare system control register (SCTRL) * Todo: - Verify if all of these bits actually should be explicitly set - Link origin of this documentation and check to which instruction set versions it applies (if applicable) - Fill in the missing Documentation for some of the bits and verify if we care about them or if loading and not setting them would be the appropriate action. */ let sctrl_el1: u64 = 0 | (1 << 26) /* UCI Enables EL0 access in AArch64 for DC CVAU, DC CIVAC, DC CVAC and IC IVAU instructions */ | (0 << 25) /* EE Explicit data accesses at EL1 and Stage 1 translation table walks at EL1 & EL0 are little-endian */ | (0 << 24) /* EOE Explicit data accesses at EL0 are little-endian */ | (1 << 23) | (1 << 22) | (1 << 20) | (0 << 19) /* WXN Regions with write permission are not forced to XN */ | (1 << 18) /* nTWE WFE instructions are executed as normal */ | (0 << 17) | (1 << 16) /* nTWI WFI instructions are executed as normal */ | (1 << 15) /* UCT Enables EL0 access in AArch64 to the CTR_EL0 register */ | (1 << 14) /* DZE Execution of the DC ZVA instruction is allowed at EL0 */ | (0 << 13) | (1 << 12) /* I Instruction caches enabled at EL0 and EL1 */ | (1 << 11) | (0 << 10) | (0 << 9) /* UMA Disable access to the interrupt masks from EL0 */ | (1 << 8) /* SED The SETEND instruction is available */ | (0 << 7) /* ITD The IT instruction functionality is available */ | (0 << 6) /* THEE ThumbEE is disabled */ | (0 << 5) /* CP15BEN CP15 barrier operations disabled */ | (1 << 4) /* SA0 Stack Alignment check for EL0 enabled */ | (1 << 3) /* SA Stack Alignment check enabled */ | (1 << 2) /* C Data and unified enabled */ | (0 << 1) /* A Alignment fault checking disabled */ | (1 << 0) /* M MMU enable */ ; self.vcpu .write_system_register(SystemRegister::SCTLR_EL1, sctrl_el1)?; Ok(()) } fn kernel_path(&self) -> &Path { self.kernel_path.as_path() } fn args(&self) -> &[OsString] { self.args.as_slice() } fn host_address(&self, addr: usize) -> usize { addr + self.vm_start } fn virt_to_phys(&self, _addr: usize) -> usize { 0 } fn r#continue(&mut self) -> HypervisorResult<VcpuStopReason> { loop { self.vcpu.run()?; let reason = self.vcpu.exit_reason(); match reason { VirtualCpuExitReason::Exception { exception } => { let ec = (exception.syndrome >> 26) & 0x3f; if ec == 0b100100u64 || ec == 0b100101u64 { let addr: u16 = exception.physical_address.try_into().unwrap(); let pc = self.vcpu.read_register(Register::PC)?; match addr { UHYVE_UART_PORT => { let x8 = (self.vcpu.read_register(Register::X8)? & 0xFF) as u8; self.uart(&[x8]).unwrap(); self.vcpu.write_register(Register::PC, pc + 4)?; } UHYVE_PORT_EXIT => { let data_addr = self.vcpu.read_register(Register::X8)?; let sysexit = unsafe { &*(self.host_address(data_addr as usize) as *const SysExit) }; return Ok(VcpuStopReason::Exit(self.exit(sysexit))); } _ => { error!("Unable to handle exception {:?}", exception); self.print_registers(); return Err(xhypervisor::Error::Error); } } } else { error!("Unsupported exception class: 0x{:x}", ec); self.print_registers(); return Err(xhypervisor::Error::Error); } } _ => { error!("Unknown exit reason: {:?}", reason); return Err(xhypervisor::Error::Error); } } } } fn run(&mut self) -> HypervisorResult<Option<i32>> { match self.r#continue()? { VcpuStopReason::Debug(_) => { unreachable!("reached debug exit without running in debugging mode") } VcpuStopReason::Exit(code) => Ok(Some(code)), VcpuStopReason::Kick => Ok(None), } } fn print_registers(&self) { println!("\nDump state of CPU {}", self.id); let pc = self.vcpu.read_register(Register::PC).unwrap(); let cpsr = self.vcpu.read_register(Register::CPSR).unwrap(); let sp = self .vcpu .read_system_register(SystemRegister::SP_EL1) .unwrap(); let sctlr = self .vcpu .read_system_register(SystemRegister::SCTLR_EL1) .unwrap(); let ttbr0 = self .vcpu .read_system_register(SystemRegister::TTBR0_EL1) .unwrap(); let lr = self.vcpu.read_register(Register::LR).unwrap(); let x0 = self.vcpu.read_register(Register::X0).unwrap(); let x1 = self.vcpu.read_register(Register::X1).unwrap(); let x2 = self.vcpu.read_register(Register::X2).unwrap(); let x3 = self.vcpu.read_register(Register::X3).unwrap(); let x4 = self.vcpu.read_register(Register::X4).unwrap(); let x5 = self.vcpu.read_register(Register::X5).unwrap(); let x6 = self.vcpu.read_register(Register::X6).unwrap(); let x7 = self.vcpu.read_register(Register::X7).unwrap(); let x8 = self.vcpu.read_register(Register::X8).unwrap(); let x9 = self.vcpu.read_register(Register::X9).unwrap(); let x10 = self.vcpu.read_register(Register::X10).unwrap(); let x11 = self.vcpu.read_register(Register::X11).unwrap(); let x12 = self.vcpu.read_register(Register::X12).unwrap(); let x13 = self.vcpu.read_register(Register::X13).unwrap(); let x14 = self.vcpu.read_register(Register::X14).unwrap(); let x15 = self.vcpu.read_register(Register::X15).unwrap(); let x16 = self.vcpu.read_register(Register::X16).unwrap(); let x17 = self.vcpu.read_register(Register::X17).unwrap(); let x18 = self.vcpu.read_register(Register::X18).unwrap(); let x19 = self.vcpu.read_register(Register::X19).unwrap(); let x20 = self.vcpu.read_register(Register::X20).unwrap(); let x21 = self.vcpu.read_register(Register::X21).unwrap(); let x22 = self.vcpu.read_register(Register::X22).unwrap(); let x23 = self.vcpu.read_register(Register::X23).unwrap(); let x24 = self.vcpu.read_register(Register::X24).unwrap(); let x25 = self.vcpu.read_register(Register::X25).unwrap(); let x26 = self.vcpu.read_register(Register::X26).unwrap(); let x27 = self.vcpu.read_register(Register::X27).unwrap(); let x28 = self.vcpu.read_register(Register::X28).unwrap(); let x29 = self.vcpu.read_register(Register::X29).unwrap(); println!("\nRegisters:"); println!("----------"); println!( "PC : {:016x} LR : {:016x} CPSR : {:016x}\n\ SP : {:016x} SCTLR : {:016x} TTBR0 : {:016x}", pc, lr, cpsr, sp, sctlr, ttbr0, ); print!( "x0 : {:016x} x1 : {:016x} x2 : {:016x}\n\ x3 : {:016x} x4 : {:016x} x5 : {:016x}\n\ x6 : {:016x} x7 : {:016x} x8 : {:016x}\n\ x9 : {:016x} x10 : {:016x} x11 : {:016x}\n\ x12 : {:016x} x13 : {:016x} x14 : {:016x}\n\ x15 : {:016x} x16 : {:016x} x17 : {:016x}\n\ x18 : {:016x} x19 : {:016x} x20 : {:016x}\n\ x21 : {:016x} x22 : {:016x} x23 : {:016x}\n\ x24 : {:016x} x25 : {:016x} x26 : {:016x}\n\ x27 : {:016x} x28 : {:016x} x29 : {:016x}\n", x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22, x23, x24, x25, x26, x27, x28, x29, ); } } impl Drop for UhyveCPU { fn drop(&mut self) { debug!("Drop virtual CPU {}", self.id); let _ = self.vcpu.destroy(); } }
pub fn new(id: u32, kernel_path: PathBuf, args: Vec<OsString>, vm_start: usize) -> UhyveCPU { Self { id, kernel_path, args, vcpu: xhypervisor::VirtualCpu::new().unwrap(), vm_start, } }
function_block-full_function
[ { "content": "/// Uses Cargo to build a kernel in the `tests/test-kernels` directory.\n\n/// Returns a path to the build binary.\n\npub fn build_hermit_bin(kernel: impl AsRef<Path>) -> PathBuf {\n\n\tlet kernel = kernel.as_ref();\n\n\tprintln!(\"Building Kernel {}\", kernel.display());\n\n\tlet kernel_src_path = Path::new(\"tests/test-kernels\");\n\n\tlet cmd = Command::new(\"cargo\")\n\n\t\t.arg(\"build\")\n\n\t\t.arg(\"--bin\")\n\n\t\t.arg(kernel)\n\n\t\t// Remove environment variables related to the current cargo instance (toolchain version, coverage flags)\n\n\t\t.env_clear()\n\n\t\t// Retain PATH since it is used to find cargo and cc\n\n\t\t.env(\"PATH\", env::var_os(\"PATH\").unwrap())\n\n\t\t.current_dir(kernel_src_path)\n\n\t\t.status()\n\n\t\t.expect(\"failed to execute `cargo build`\");\n\n\tassert!(cmd.success(), \"Test binaries could not be build\");\n\n\t[\n\n\t\tkernel_src_path,\n\n\t\tPath::new(\"target/x86_64-unknown-hermit/debug\"),\n\n\t\tPath::new(kernel),\n\n\t]\n\n\t.iter()\n\n\t.collect()\n\n}\n\n\n", "file_path": "tests/common.rs", "rank": 0, "score": 79241.1504160406 }, { "content": "pub fn detect_freq_from_cpuid(cpuid: &CpuId) -> std::result::Result<u32, FrequencyDetectionFailed> {\n\n\tdebug!(\"Trying to detect CPU frequency by tsc info\");\n\n\n\n\tlet has_invariant_tsc = cpuid\n\n\t\t.get_advanced_power_mgmt_info()\n\n\t\t.map_or(false, |apm_info| apm_info.has_invariant_tsc());\n\n\tif !has_invariant_tsc {\n\n\t\twarn!(\"TSC frequency varies with speed-stepping\")\n\n\t}\n\n\n\n\tlet tsc_frequency_hz = cpuid.get_tsc_info().map(|tinfo| {\n\n\t\tif tinfo.tsc_frequency().is_some() {\n\n\t\t\ttinfo.tsc_frequency()\n\n\t\t} else {\n\n\t\t\t// Skylake and Kabylake don't report the crystal clock, approximate with base frequency:\n\n\t\t\tcpuid\n\n\t\t\t\t.get_processor_frequency_info()\n\n\t\t\t\t.map(|pinfo| pinfo.processor_base_frequency() as u64 * MHZ_TO_HZ)\n\n\t\t\t\t.map(|cpu_base_freq_hz| {\n\n\t\t\t\t\tlet crystal_hz =\n", "file_path": "src/arch/x86_64/mod.rs", "rank": 1, "score": 73529.33683890832 }, { "content": "/// Small wrapper around [`Uhyve::run`] with default parameters for a small and\n\n/// simple uhyve vm\n\npub fn run_simple_vm(kernel_path: PathBuf) {\n\n\tlet params = Params {\n\n\t\tcpu_count: 2.try_into().unwrap(),\n\n\t\tmemory_size: Byte::from_bytes(32 * 1024 * 1024).try_into().unwrap(),\n\n\t\t..Default::default()\n\n\t};\n\n\tlet code = Uhyve::new(kernel_path, params).unwrap().run(None);\n\n\tassert_eq!(0, code);\n\n}\n", "file_path": "tests/common.rs", "rank": 2, "score": 68135.06552738413 }, { "content": "pub fn write(regs: &X86_64CoreRegs, vcpu: &VcpuFd) -> Result<(), kvm_ioctls::Error> {\n\n\tlet X86_64CoreRegs {\n\n\t\tregs,\n\n\t\teflags,\n\n\t\trip,\n\n\t\tsegments,\n\n\t\tst,\n\n\t\tfpu,\n\n\t\txmm,\n\n\t\tmxcsr,\n\n\t} = regs.clone();\n\n\n\n\tlet kvm_regs = Regs { regs, eflags, rip }.into();\n\n\tvcpu.set_regs(&kvm_regs)?;\n\n\n\n\tlet mut kvm_sregs = vcpu.get_sregs()?;\n\n\tSregs { segments }.update(&mut kvm_sregs);\n\n\tvcpu.set_sregs(&kvm_sregs)?;\n\n\n\n\tlet kvm_fpu = Fpu {\n", "file_path": "src/linux/gdb/regs.rs", "rank": 3, "score": 64797.4679311263 }, { "content": "pub fn read(vcpu: &VcpuFd, regs: &mut X86_64CoreRegs) -> Result<(), kvm_ioctls::Error> {\n\n\t// TODO: Rewrite using destructuring assignment once stabilized\n\n\n\n\tlet Regs {\n\n\t\tregs: gp_regs,\n\n\t\teflags,\n\n\t\trip,\n\n\t} = vcpu.get_regs()?.into();\n\n\tregs.regs = gp_regs;\n\n\tregs.eflags = eflags;\n\n\tregs.rip = rip;\n\n\n\n\tlet Sregs { segments } = vcpu.get_sregs()?.into();\n\n\tregs.segments = segments;\n\n\n\n\tlet Fpu {\n\n\t\tst,\n\n\t\tfpu,\n\n\t\txmm,\n\n\t\tmxcsr,\n\n\t} = vcpu.get_fpu()?.into();\n\n\tregs.st = st;\n\n\tregs.fpu = fpu;\n\n\tregs.xmm = xmm;\n\n\tregs.mxcsr = mxcsr;\n\n\n\n\tOk(())\n\n}\n\n\n", "file_path": "src/linux/gdb/regs.rs", "rank": 4, "score": 62529.811571549326 }, { "content": "pub fn get_cpu_frequency_from_os() -> std::result::Result<u32, FrequencyDetectionFailed> {\n\n\t// Determine TSC frequency by measuring it (loop for a second, record ticks)\n\n\tlet duration = Duration::from_millis(10);\n\n\tlet now = Instant::now();\n\n\tlet start = unsafe { crate::x86_64::rdtsc() };\n\n\tif start > 0 {\n\n\t\tloop {\n\n\t\t\tif now.elapsed() >= duration {\n\n\t\t\t\tbreak;\n\n\t\t\t}\n\n\t\t}\n\n\t\tlet end = unsafe { rdtsc() };\n\n\t\tOk((((end - start) * 100) / MHZ_TO_HZ).try_into().unwrap())\n\n\t} else {\n\n\t\tErr(FrequencyDetectionFailed)\n\n\t}\n\n}\n\n\n\nmod tests {\n\n\t// test is derived from\n", "file_path": "src/arch/x86_64/mod.rs", "rank": 5, "score": 60856.769280643086 }, { "content": "fn get_used_ring_offset() -> usize {\n\n\talign(\n\n\t\tget_available_ring_offset() + size_of::<u16>() * (QUEUE_LIMIT + 3),\n\n\t\tPAGE_SIZE,\n\n\t)\n\n}\n\n\n\nimpl Virtqueue {\n\n\tpub unsafe fn new(mem: *mut u8, queue_size: usize) -> Self {\n\n\t\t#[allow(clippy::cast_ptr_alignment)]\n\n\t\tlet descriptor_table = mem as *mut VringDescriptor;\n\n\t\tlet available_ring_ptr = mem.add(get_available_ring_offset());\n\n\t\tlet used_ring_ptr = mem.add(get_used_ring_offset());\n\n\t\tlet available_ring = VringAvailable::new(available_ring_ptr);\n\n\t\tlet used_ring = VringUsed::new(used_ring_ptr);\n\n\t\tVirtqueue {\n\n\t\t\tdescriptor_table,\n\n\t\t\tavailable_ring,\n\n\t\t\tused_ring,\n\n\t\t\tlast_seen_available: 0,\n", "file_path": "src/linux/virtqueue.rs", "rank": 6, "score": 50409.99356535133 }, { "content": "/// The signal for kicking vCPUs out of KVM_RUN.\n\n///\n\n/// It is used to stop a vCPU from another thread.\n\nstruct KickSignal;\n\n\n\nimpl KickSignal {\n\n\tconst RTSIG_OFFSET: libc::c_int = 0;\n\n\n\n\tfn get() -> Signal {\n\n\t\tlet kick_signal = SIGRTMIN() + Self::RTSIG_OFFSET;\n\n\t\tassert!(kick_signal <= SIGRTMAX());\n\n\t\t// TODO: Remove the transmute once realtime signals are properly supported by nix\n\n\t\t// https://github.com/nix-rust/nix/issues/495\n\n\t\tunsafe { mem::transmute(kick_signal) }\n\n\t}\n\n\n\n\tfn register_handler() -> nix::Result<()> {\n\n\t\textern \"C\" fn handle_signal(_signal: libc::c_int) {}\n\n\t\t// SAFETY: We don't use the `signal`'s return value.\n\n\t\tunsafe {\n\n\t\t\tsignal(Self::get(), SigHandler::Handler(handle_signal))?;\n\n\t\t}\n\n\t\tOk(())\n", "file_path": "src/linux/mod.rs", "rank": 7, "score": 48473.958713397085 }, { "content": "/// [`kvm_sregs`]-related [`X86_64CoreRegs`] fields.\n\nstruct Sregs {\n\n\tsegments: X86SegmentRegs,\n\n}\n\n\n\nimpl From<kvm_sregs> for Sregs {\n\n\tfn from(kvm_sregs: kvm_sregs) -> Self {\n\n\t\tlet segments = X86SegmentRegs {\n\n\t\t\tcs: kvm_sregs.cs.selector.into(),\n\n\t\t\tss: kvm_sregs.ss.selector.into(),\n\n\t\t\tds: kvm_sregs.ds.selector.into(),\n\n\t\t\tes: kvm_sregs.es.selector.into(),\n\n\t\t\tfs: kvm_sregs.fs.selector.into(),\n\n\t\t\tgs: kvm_sregs.gs.selector.into(),\n\n\t\t};\n\n\t\tSelf { segments }\n\n\t}\n\n}\n\n\n\nimpl Sregs {\n\n\tfn update(self, kvm_sregs: &mut kvm_sregs) {\n\n\t\tkvm_sregs.cs.selector = self.segments.cs.try_into().unwrap();\n\n\t\tkvm_sregs.ss.selector = self.segments.ss.try_into().unwrap();\n\n\t\tkvm_sregs.ds.selector = self.segments.ds.try_into().unwrap();\n\n\t\tkvm_sregs.es.selector = self.segments.es.try_into().unwrap();\n\n\t\tkvm_sregs.fs.selector = self.segments.fs.try_into().unwrap();\n\n\t\tkvm_sregs.gs.selector = self.segments.gs.try_into().unwrap();\n\n\t}\n\n}\n\n\n", "file_path": "src/linux/gdb/regs.rs", "rank": 8, "score": 48470.352612953946 }, { "content": "/// [`kvm_fpu`]-related [`X86_64CoreRegs`] fields.\n\nstruct Fpu {\n\n\tst: [F80; 8],\n\n\tfpu: X87FpuInternalRegs,\n\n\txmm: [u128; 16],\n\n\tmxcsr: u32,\n\n}\n\n\n\nimpl From<kvm_fpu> for Fpu {\n\n\tfn from(kvm_fpu: kvm_fpu) -> Self {\n\n\t\t// For details on `kvm_fpu` see:\n\n\t\t// * https://elixir.bootlin.com/linux/v5.13.1/source/arch/x86/include/uapi/asm/kvm.h#L163\n\n\t\t// * https://elixir.bootlin.com/linux/v5.13.1/source/arch/x86/kvm/x86.c#L10181\n\n\t\t// * https://elixir.bootlin.com/linux/v5.13.1/source/arch/x86/include/asm/fpu/types.h#L34\n\n\n\n\t\tlet st = kvm_fpu.fpr.map(|fpr| fpr[..10].try_into().unwrap());\n\n\n\n\t\tlet fpu = X87FpuInternalRegs {\n\n\t\t\tfctrl: kvm_fpu.fcw.into(),\n\n\t\t\tfstat: kvm_fpu.fsw.into(),\n\n\t\t\tftag: kvm_fpu.ftwx.into(),\n", "file_path": "src/linux/gdb/regs.rs", "rank": 9, "score": 48470.352612953946 }, { "content": "/// [`kvm_regs`]-related [`X86_64CoreRegs`] fields.\n\nstruct Regs {\n\n\tregs: [u64; 16],\n\n\teflags: u32,\n\n\trip: u64,\n\n}\n\n\n\nimpl From<kvm_regs> for Regs {\n\n\tfn from(kvm_regs: kvm_regs) -> Self {\n\n\t\tlet regs = [\n\n\t\t\tkvm_regs.rax,\n\n\t\t\tkvm_regs.rbx,\n\n\t\t\tkvm_regs.rcx,\n\n\t\t\tkvm_regs.rdx,\n\n\t\t\tkvm_regs.rsi,\n\n\t\t\tkvm_regs.rdi,\n\n\t\t\tkvm_regs.rbp,\n\n\t\t\tkvm_regs.rsp,\n\n\t\t\tkvm_regs.r8,\n\n\t\t\tkvm_regs.r9,\n\n\t\t\tkvm_regs.r10,\n", "file_path": "src/linux/gdb/regs.rs", "rank": 10, "score": 48470.352612953946 }, { "content": "#[derive(Debug)]\n\nstruct MmapMemory {\n\n\tflags: u32,\n\n\tmemory_size: usize,\n\n\tguest_address: usize,\n\n\thost_address: usize,\n\n}\n\n\n\nimpl MmapMemory {\n\n\tpub fn new(\n\n\t\tflags: u32,\n\n\t\tmemory_size: usize,\n\n\t\tguest_address: u64,\n\n\t\thuge_pages: bool,\n\n\t\tmergeable: bool,\n\n\t) -> MmapMemory {\n\n\t\tlet host_address = unsafe {\n\n\t\t\tmmap(\n\n\t\t\t\tstd::ptr::null_mut(),\n\n\t\t\t\tmemory_size,\n\n\t\t\t\tProtFlags::PROT_READ | ProtFlags::PROT_WRITE,\n", "file_path": "src/linux/uhyve.rs", "rank": 11, "score": 48470.352612953946 }, { "content": "#[derive(Debug)]\n\nstruct UhyveNetwork {\n\n\t#[allow(dead_code)]\n\n\treader: std::thread::JoinHandle<()>,\n\n\t#[allow(dead_code)]\n\n\twriter: std::thread::JoinHandle<()>,\n\n\ttx: std::sync::mpsc::SyncSender<usize>,\n\n}\n\n\n\nimpl UhyveNetwork {\n\n\tpub fn new(evtfd: EventFd, name: String, start: usize) -> Self {\n\n\t\tlet iface = Arc::new(\n\n\t\t\tIface::without_packet_info(&name, Mode::Tap).expect(\"Unable to creat TUN/TAP device\"),\n\n\t\t);\n\n\n\n\t\tlet iface_writer = Arc::clone(&iface);\n\n\t\tlet iface_reader = Arc::clone(&iface);\n\n\t\tlet (tx, rx) = sync_channel(1);\n\n\n\n\t\tlet writer = thread::spawn(move || {\n\n\t\t\tlet tx_queue = unsafe {\n", "file_path": "src/linux/uhyve.rs", "rank": 12, "score": 48470.352612953946 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct RedirectionTable {\n\n\treg: u32,\n\n}\n\n\n\nimpl RedirectionTable {\n\n\tpub fn new() -> Self {\n\n\t\tRedirectionTable { reg: 0 }\n\n\t}\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct IoApic {\n\n\tselector: usize,\n\n\trtbl: [RedirectionTable; IOAPIC_REG_TABLE + 2 * REDIR_ENTRIES],\n\n}\n\n\n\nimpl IoApic {\n\n\tpub fn new() -> Self {\n\n\t\tlet mut ioapic = IoApic {\n\n\t\t\tselector: 0,\n", "file_path": "src/macos/x86_64/ioapic.rs", "rank": 13, "score": 47103.58543468948 }, { "content": "pub trait Vm {\n\n\t/// Returns the number of cores for the vm.\n\n\tfn num_cpus(&self) -> u32;\n\n\t/// Returns a pointer to the address of the guest memory and the size of the memory in bytes.\n\n\tfn guest_mem(&self) -> (*mut u8, usize);\n\n\t#[doc(hidden)]\n\n\tfn set_offset(&mut self, offset: u64);\n\n\t/// Returns the section offsets relative to their base addresses\n\n\tfn get_offset(&self) -> u64;\n\n\t/// Sets the elf entry point.\n\n\tfn set_entry_point(&mut self, entry: u64);\n\n\tfn get_entry_point(&self) -> u64;\n\n\tfn kernel_path(&self) -> &Path;\n\n\tfn create_cpu(&self, id: u32) -> HypervisorResult<UhyveCPU>;\n\n\tfn set_boot_info(&mut self, header: *const BootInfo);\n\n\tfn cpu_online(&self) -> u32;\n\n\tfn get_ip(&self) -> Option<Ipv4Addr>;\n\n\tfn get_gateway(&self) -> Option<Ipv4Addr>;\n\n\tfn get_mask(&self) -> Option<Ipv4Addr>;\n\n\tfn verbose(&self) -> bool;\n", "file_path": "src/vm.rs", "rank": 14, "score": 46191.61621110595 }, { "content": "pub trait VirtualCPU {\n\n\t/// Initialize the cpu to start running the code ad entry_point.\n\n\tfn init(&mut self, entry_point: u64) -> HypervisorResult<()>;\n\n\n\n\t/// Continues execution.\n\n\tfn r#continue(&mut self) -> HypervisorResult<VcpuStopReason>;\n\n\n\n\t/// Start the execution of the CPU. The function will run until it crashes (`Err`) or terminate with an exit code (`Ok`).\n\n\tfn run(&mut self) -> HypervisorResult<Option<i32>>;\n\n\n\n\t/// Prints the VCPU's registers to stdout.\n\n\tfn print_registers(&self);\n\n\n\n\t/// Translates an address from the VM's physical space into the hosts virtual space.\n\n\tfn host_address(&self, addr: usize) -> usize;\n\n\n\n\t/// Looks up the guests pagetable and translates a guest's virtual address to a guest's physical address.\n\n\tfn virt_to_phys(&self, addr: usize) -> usize;\n\n\n\n\t/// Returns the (host) path of the kernel binary.\n", "file_path": "src/vm.rs", "rank": 15, "score": 44687.85848901557 }, { "content": "pub trait PciDevice {\n\n\tfn handle_read(&self, address: u32, dest: &mut [u8]);\n\n\tfn handle_write(&mut self, address: u32, src: &[u8]);\n\n}\n\n\n", "file_path": "src/linux/virtio.rs", "rank": 16, "score": 43326.4340690765 }, { "content": "fn align(addr: usize, boundary: usize) -> usize {\n\n\t(addr + boundary - 1) & !(boundary - 1)\n\n}\n\n\n", "file_path": "src/linux/virtqueue.rs", "rank": 17, "score": 39430.17060815546 }, { "content": "pub fn detect_freq_from_cpuid_hypervisor_info(\n\n\tcpuid: &CpuId,\n\n) -> std::result::Result<u32, FrequencyDetectionFailed> {\n\n\tdebug!(\"Trying to detect CPU frequency by hypervisor info\");\n\n\tlet hypervisor_info = cpuid\n\n\t\t.get_hypervisor_info()\n\n\t\t.ok_or(FrequencyDetectionFailed)?;\n\n\tdebug!(\n\n\t\t\"cpuid detected hypervisor: {:?}\",\n\n\t\thypervisor_info.identify()\n\n\t);\n\n\tlet hz = hypervisor_info\n\n\t\t.tsc_frequency()\n\n\t\t.ok_or(FrequencyDetectionFailed)? as u64\n\n\t\t* KHZ_TO_HZ;\n\n\tlet mhz: u32 = (hz / MHZ_TO_HZ).try_into().unwrap();\n\n\tif mhz > 0 {\n\n\t\tOk(mhz)\n\n\t} else {\n\n\t\tErr(FrequencyDetectionFailed)\n\n\t}\n\n}\n\n\n", "file_path": "src/arch/x86_64/mod.rs", "rank": 18, "score": 38964.2378294409 }, { "content": "pub fn run_rusty_demo(c: &mut Criterion) {\n\n\tlet uhyve_path = [env!(\"CARGO_MANIFEST_DIR\"), \"target/release/uhyve\"]\n\n\t\t.iter()\n\n\t\t.collect::<PathBuf>();\n\n\tassert!(\n\n\t\tuhyve_path.exists(),\n\n\t\t\"uhyve release build is required to run this benchmark\"\n\n\t);\n\n\n\n\tlet rusty_demo_path = [env!(\"CARGO_MANIFEST_DIR\"), \"benches_data/rusty_demo\"]\n\n\t\t.iter()\n\n\t\t.collect::<PathBuf>();\n\n\tassert!(\n\n\t\tPath::new(&rusty_demo_path).exists(),\n\n\t\t\"rusty_demo executable missing from bench_data\"\n\n\t);\n\n\n\n\tlet mut group = c.benchmark_group(\"rusty_demo\");\n\n\tgroup.measurement_time(Duration::from_secs(60));\n\n\n", "file_path": "benches/complete_binary/mod.rs", "rank": 19, "score": 35438.9093370517 }, { "content": "pub fn load_vm_hello_world(c: &mut Criterion) {\n\n\tlet path = [env!(\"CARGO_MANIFEST_DIR\"), \"benches_data/hello_world\"]\n\n\t\t.iter()\n\n\t\t.collect();\n\n\tlet params = Params {\n\n\t\tmemory_size: Byte::from_bytes(1024 * 100000).try_into().unwrap(),\n\n\t\t..Default::default()\n\n\t};\n\n\tlet mut vm = Uhyve::new(path, params).expect(\"Unable to create VM\");\n\n\n\n\tc.bench_function(\"vm::load_kernel(hello world)\", |b| {\n\n\t\tb.iter(|| unsafe {\n\n\t\t\tvm.load_kernel().unwrap();\n\n\t\t})\n\n\t});\n\n}\n\n\n\ncriterion_group!(load_kernel_benchmark_group, load_vm_hello_world);\n", "file_path": "benches/vm/mod.rs", "rank": 20, "score": 35438.9093370517 }, { "content": "pub fn run_hello_world(c: &mut Criterion) {\n\n\tlet uhyve_path = [env!(\"CARGO_MANIFEST_DIR\"), \"target/release/uhyve\"]\n\n\t\t.iter()\n\n\t\t.collect::<PathBuf>();\n\n\tassert!(\n\n\t\tuhyve_path.exists(),\n\n\t\t\"uhyve release build is required to run this benchmark\"\n\n\t);\n\n\n\n\tlet hello_world_path = [env!(\"CARGO_MANIFEST_DIR\"), \"benches_data/hello_world\"]\n\n\t\t.iter()\n\n\t\t.collect::<PathBuf>();\n\n\tassert!(\n\n\t\thello_world_path.exists(),\n\n\t\t\"hello_world executable missing from bench_data\"\n\n\t);\n\n\n\n\tlet mut group = c.benchmark_group(\"hello_world\");\n\n\tgroup.sample_size(30);\n\n\n", "file_path": "benches/complete_binary/mod.rs", "rank": 21, "score": 35438.9093370517 }, { "content": "use std::slice;\n\nuse std::sync::{Arc, Mutex};\n\nuse x86_64::registers::control::{Cr0Flags, Cr4Flags};\n\nuse x86_64::structures::paging::PageTableFlags;\n\n\n\nconst CPUID_EXT_HYPERVISOR: u32 = 1 << 31;\n\nconst CPUID_TSC_DEADLINE: u32 = 1 << 24;\n\nconst CPUID_ENABLE_MSR: u32 = 1 << 5;\n\nconst MSR_IA32_MISC_ENABLE: u32 = 0x000001a0;\n\nconst PCI_CONFIG_DATA_PORT: u16 = 0xCFC;\n\nconst PCI_CONFIG_ADDRESS_PORT: u16 = 0xCF8;\n\n\n\npub struct UhyveCPU {\n\n\tid: u32,\n\n\tvcpu: VcpuFd,\n\n\tvm_start: usize,\n\n\tkernel_path: PathBuf,\n\n\targs: Vec<OsString>,\n\n\ttx: Option<std::sync::mpsc::SyncSender<usize>>,\n\n\tvirtio_device: Arc<Mutex<VirtioNetPciDevice>>,\n", "file_path": "src/linux/vcpu.rs", "rank": 22, "score": 30215.173099856424 }, { "content": "\tpci_addr: Option<u32>,\n\n}\n\n\n\nimpl UhyveCPU {\n\n\tpub unsafe fn memory(&mut self, start_addr: u64, len: usize) -> &mut [u8] {\n\n\t\tlet phys = self.virt_to_phys(start_addr.try_into().unwrap());\n\n\t\tlet host = self.host_address(phys);\n\n\t\tslice::from_raw_parts_mut(host as *mut u8, len)\n\n\t}\n\n\n\n\tpub fn new(\n\n\t\tid: u32,\n\n\t\tkernel_path: PathBuf,\n\n\t\targs: Vec<OsString>,\n\n\t\tvcpu: VcpuFd,\n\n\t\tvm_start: usize,\n\n\t\ttx: Option<std::sync::mpsc::SyncSender<usize>>,\n\n\t\tvirtio_device: Arc<Mutex<VirtioNetPciDevice>>,\n\n\t) -> UhyveCPU {\n\n\t\tUhyveCPU {\n", "file_path": "src/linux/vcpu.rs", "rank": 23, "score": 30214.889240132943 }, { "content": "\t\tself.setup_msrs()?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn kernel_path(&self) -> &Path {\n\n\t\tself.kernel_path.as_path()\n\n\t}\n\n\n\n\tfn args(&self) -> &[OsString] {\n\n\t\tself.args.as_slice()\n\n\t}\n\n\n\n\tfn host_address(&self, addr: usize) -> usize {\n\n\t\taddr + self.vm_start\n\n\t}\n\n\n\n\tfn virt_to_phys(&self, addr: usize) -> usize {\n\n\t\t/// Number of Offset bits of a virtual address for a 4 KiB page, which are shifted away to get its Page Frame Number (PFN).\n\n\t\tpub const PAGE_BITS: usize = 12;\n", "file_path": "src/linux/vcpu.rs", "rank": 24, "score": 30207.443249892283 }, { "content": "\tpub fn get_vcpu(&self) -> &VcpuFd {\n\n\t\t&self.vcpu\n\n\t}\n\n\n\n\tpub fn get_vcpu_mut(&mut self) -> &mut VcpuFd {\n\n\t\t&mut self.vcpu\n\n\t}\n\n}\n\n\n\nimpl VirtualCPU for UhyveCPU {\n\n\tfn init(&mut self, entry_point: u64) -> HypervisorResult<()> {\n\n\t\tself.setup_long_mode(entry_point)?;\n\n\t\tself.setup_cpuid()?;\n\n\n\n\t\t// be sure that the multiprocessor is runable\n\n\t\tlet mp_state = kvm_mp_state {\n\n\t\t\tmp_state: KVM_MP_STATE_RUNNABLE,\n\n\t\t};\n\n\t\tself.vcpu.set_mp_state(mp_state)?;\n\n\n", "file_path": "src/linux/vcpu.rs", "rank": 25, "score": 30207.09353252655 }, { "content": "\t\t\t\t\t\t\t\tmatch &self.tx {\n\n\t\t\t\t\t\t\t\t\tSome(tx_channel) => tx_channel.send(1).unwrap(),\n\n\n\n\t\t\t\t\t\t\t\t\tNone => {}\n\n\t\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_EXIT => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet sysexit =\n\n\t\t\t\t\t\t\t\t\tunsafe { &*(self.host_address(data_addr) as *const SysExit) };\n\n\t\t\t\t\t\t\t\treturn Ok(VcpuStopReason::Exit(self.exit(sysexit)));\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_OPEN => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet sysopen =\n\n\t\t\t\t\t\t\t\t\tunsafe { &mut *(self.host_address(data_addr) as *mut SysOpen) };\n\n\t\t\t\t\t\t\t\tself.open(sysopen);\n\n\t\t\t\t\t\t\t}\n", "file_path": "src/linux/vcpu.rs", "rank": 26, "score": 30206.495467995934 }, { "content": "use crate::consts::*;\n\nuse crate::linux::virtio::*;\n\nuse crate::linux::KVM;\n\nuse crate::vm::HypervisorResult;\n\nuse crate::vm::SysClose;\n\nuse crate::vm::SysCmdsize;\n\nuse crate::vm::SysCmdval;\n\nuse crate::vm::SysExit;\n\nuse crate::vm::SysLseek;\n\nuse crate::vm::SysOpen;\n\nuse crate::vm::SysRead;\n\nuse crate::vm::SysUnlink;\n\nuse crate::vm::SysWrite;\n\nuse crate::vm::VcpuStopReason;\n\nuse crate::vm::VirtualCPU;\n\nuse kvm_bindings::*;\n\nuse kvm_ioctls::{VcpuExit, VcpuFd};\n\nuse std::ffi::OsString;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n", "file_path": "src/linux/vcpu.rs", "rank": 27, "score": 30205.29604953487 }, { "content": "\t\t\t\t\t\t\tUHYVE_PORT_WRITE => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet syswrite =\n\n\t\t\t\t\t\t\t\t\tunsafe { &*(self.host_address(data_addr) as *const SysWrite) };\n\n\t\t\t\t\t\t\t\tself.write(syswrite)?;\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_READ => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet sysread =\n\n\t\t\t\t\t\t\t\t\tunsafe { &mut *(self.host_address(data_addr) as *mut SysRead) };\n\n\t\t\t\t\t\t\t\tself.read(sysread);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_UNLINK => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet sysunlink = unsafe {\n\n\t\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr) as *mut SysUnlink)\n\n\t\t\t\t\t\t\t\t};\n", "file_path": "src/linux/vcpu.rs", "rank": 28, "score": 30205.107180485476 }, { "content": "\t\t}\n\n\n\n\t\t(entry & ((!0usize) << PAGE_BITS)) | (addr & !((!0usize) << PAGE_BITS))\n\n\t}\n\n\n\n\tfn r#continue(&mut self) -> HypervisorResult<VcpuStopReason> {\n\n\t\tloop {\n\n\t\t\tmatch self.vcpu.run() {\n\n\t\t\t\tOk(vcpu_stop_reason) => match vcpu_stop_reason {\n\n\t\t\t\t\tVcpuExit::Hlt => {\n\n\t\t\t\t\t\t// Ignore `VcpuExit::Hlt`\n\n\t\t\t\t\t\tdebug!(\"{:?}\", VcpuExit::Hlt);\n\n\t\t\t\t\t}\n\n\t\t\t\t\tVcpuExit::Shutdown => {\n\n\t\t\t\t\t\treturn Ok(VcpuStopReason::Exit(0));\n\n\t\t\t\t\t}\n\n\t\t\t\t\tVcpuExit::IoIn(port, addr) => match port {\n\n\t\t\t\t\t\tPCI_CONFIG_DATA_PORT => {\n\n\t\t\t\t\t\t\tif let Some(pci_addr) = self.pci_addr {\n\n\t\t\t\t\t\t\t\tif pci_addr & 0x1ff800 == 0 {\n", "file_path": "src/linux/vcpu.rs", "rank": 29, "score": 30204.880647017195 }, { "content": "\t\t\t\t\t\t\t\tself.unlink(sysunlink);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_LSEEK => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet syslseek = unsafe {\n\n\t\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr) as *mut SysLseek)\n\n\t\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\t\tself.lseek(syslseek);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_CLOSE => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet sysclose = unsafe {\n\n\t\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr) as *mut SysClose)\n\n\t\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\t\tself.close(sysclose);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t//TODO:\n\n\t\t\t\t\t\t\tPCI_CONFIG_DATA_PORT => {\n", "file_path": "src/linux/vcpu.rs", "rank": 30, "score": 30204.552511230075 }, { "content": "\t\t\t\t\t\tmatch port {\n\n\t\t\t\t\t\t\tUHYVE_UART_PORT => {\n\n\t\t\t\t\t\t\t\tself.uart(addr)?;\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_CMDSIZE => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet syssize = unsafe {\n\n\t\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr) as *mut SysCmdsize)\n\n\t\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\t\tself.cmdsize(syssize);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_CMDVAL => {\n\n\t\t\t\t\t\t\t\tlet data_addr: usize =\n\n\t\t\t\t\t\t\t\t\tunsafe { (*(addr.as_ptr() as *const u32)) as usize };\n\n\t\t\t\t\t\t\t\tlet syscmdval =\n\n\t\t\t\t\t\t\t\t\tunsafe { &*(self.host_address(data_addr) as *const SysCmdval) };\n\n\t\t\t\t\t\t\t\tself.cmdval(syscmdval);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tUHYVE_PORT_NETWRITE => {\n", "file_path": "src/linux/vcpu.rs", "rank": 31, "score": 30204.51701542809 }, { "content": "\t\t\tid,\n\n\t\t\tvcpu,\n\n\t\t\tvm_start,\n\n\t\t\tkernel_path,\n\n\t\t\targs,\n\n\t\t\ttx,\n\n\t\t\tvirtio_device,\n\n\t\t\tpci_addr: None,\n\n\t\t}\n\n\t}\n\n\n\n\tfn setup_cpuid(&self) -> Result<(), kvm_ioctls::Error> {\n\n\t\t//debug!(\"Setup cpuid\");\n\n\n\n\t\tlet mut kvm_cpuid = KVM.get_supported_cpuid(KVM_MAX_CPUID_ENTRIES)?;\n\n\t\tlet kvm_cpuid_entries = kvm_cpuid.as_mut_slice();\n\n\t\tlet i = kvm_cpuid_entries\n\n\t\t\t.iter()\n\n\t\t\t.position(|&r| r.function == 0x80000002)\n\n\t\t\t.unwrap();\n", "file_path": "src/linux/vcpu.rs", "rank": 32, "score": 30204.44255280667 }, { "content": "\n\n\t\t// create own processor string (first part)\n\n\t\tlet mut id_reg_values: [u32; 4] = [0; 4];\n\n\t\tlet id = b\"uhyve - unikerne\";\n\n\t\tunsafe {\n\n\t\t\tstd::ptr::copy_nonoverlapping(\n\n\t\t\t\tid.as_ptr(),\n\n\t\t\t\tid_reg_values.as_mut_ptr() as *mut u8,\n\n\t\t\t\tid.len(),\n\n\t\t\t);\n\n\t\t}\n\n\t\tkvm_cpuid_entries[i].eax = id_reg_values[0];\n\n\t\tkvm_cpuid_entries[i].ebx = id_reg_values[1];\n\n\t\tkvm_cpuid_entries[i].ecx = id_reg_values[2];\n\n\t\tkvm_cpuid_entries[i].edx = id_reg_values[3];\n\n\n\n\t\tlet i = kvm_cpuid_entries\n\n\t\t\t.iter()\n\n\t\t\t.position(|&r| r.function == 0x80000003)\n\n\t\t\t.unwrap();\n", "file_path": "src/linux/vcpu.rs", "rank": 33, "score": 30204.06251496689 }, { "content": "\n\n\t\t/// Number of bits of the index in each table (PML4, PDPT, PDT, PGT).\n\n\t\tpub const PAGE_MAP_BITS: usize = 9;\n\n\n\n\t\tlet executable_disable_mask = !usize::try_from(PageTableFlags::NO_EXECUTE.bits()).unwrap();\n\n\t\tlet mut page_table = self.host_address(BOOT_PML4 as usize) as *const usize;\n\n\t\tlet mut page_bits = 39;\n\n\t\tlet mut entry: usize = 0;\n\n\n\n\t\tfor _i in 0..4 {\n\n\t\t\tlet index = (addr >> page_bits) & ((1 << PAGE_MAP_BITS) - 1);\n\n\t\t\tentry = unsafe { *page_table.add(index) & executable_disable_mask };\n\n\n\n\t\t\t// bit 7 is set if this entry references a 1 GiB (PDPT) or 2 MiB (PDT) page.\n\n\t\t\tif entry & usize::try_from(PageTableFlags::HUGE_PAGE.bits()).unwrap() != 0 {\n\n\t\t\t\treturn (entry & ((!0usize) << page_bits)) | (addr & !((!0usize) << page_bits));\n\n\t\t\t} else {\n\n\t\t\t\tpage_table = self.host_address(entry & !((1 << PAGE_BITS) - 1)) as *const usize;\n\n\t\t\t\tpage_bits -= PAGE_MAP_BITS;\n\n\t\t\t}\n", "file_path": "src/linux/vcpu.rs", "rank": 34, "score": 30203.266377545126 }, { "content": "\t\t\tVcpuStopReason::Kick => Ok(None),\n\n\t\t}\n\n\t}\n\n\n\n\tfn print_registers(&self) {\n\n\t\tlet regs = self.vcpu.get_regs().unwrap();\n\n\t\tlet sregs = self.vcpu.get_sregs().unwrap();\n\n\n\n\t\tprintln!();\n\n\t\tprintln!(\"Dump state of CPU {}\", self.id);\n\n\t\tprintln!();\n\n\t\tprintln!(\"Registers:\");\n\n\t\tprintln!(\"----------\");\n\n\t\tprintln!(\"{:?}{:?}\", regs, sregs);\n\n\n\n\t\tprintln!(\"Segment registers:\");\n\n\t\tprintln!(\"------------------\");\n\n\t\tprintln!(\"register selector base limit type p dpl db s l g avl\");\n\n\t\tUhyveCPU::show_segment(\"cs \", &sregs.cs);\n\n\t\tUhyveCPU::show_segment(\"ss \", &sregs.ss);\n", "file_path": "src/linux/vcpu.rs", "rank": 35, "score": 30203.093823614843 }, { "content": "\n\n\t\t// create own processor string (second part)\n\n\t\tlet id = b\"l hypervisor\\0\";\n\n\t\tunsafe {\n\n\t\t\tstd::ptr::copy_nonoverlapping(\n\n\t\t\t\tid.as_ptr(),\n\n\t\t\t\tid_reg_values.as_mut_ptr() as *mut u8,\n\n\t\t\t\tid.len(),\n\n\t\t\t);\n\n\t\t}\n\n\t\tkvm_cpuid_entries[i].eax = id_reg_values[0];\n\n\t\tkvm_cpuid_entries[i].ebx = id_reg_values[1];\n\n\t\tkvm_cpuid_entries[i].ecx = id_reg_values[2];\n\n\t\tkvm_cpuid_entries[i].edx = id_reg_values[3];\n\n\n\n\t\tlet i = kvm_cpuid_entries\n\n\t\t\t.iter()\n\n\t\t\t.position(|&r| r.function == 0x80000004)\n\n\t\t\t.unwrap();\n\n\n", "file_path": "src/linux/vcpu.rs", "rank": 36, "score": 30201.967973836905 }, { "content": "\t\t\t\t\t\tpanic!(\"{:?}\", VcpuExit::InternalError)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tvcpu_exit => {\n\n\t\t\t\t\t\tunimplemented!(\"{:?}\", vcpu_exit)\n\n\t\t\t\t\t}\n\n\t\t\t\t},\n\n\t\t\t\tErr(err) => match err.errno() {\n\n\t\t\t\t\tlibc::EINTR => return Ok(VcpuStopReason::Kick),\n\n\t\t\t\t\t_ => return Err(err),\n\n\t\t\t\t},\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tfn run(&mut self) -> HypervisorResult<Option<i32>> {\n\n\t\tmatch self.r#continue()? {\n\n\t\t\tVcpuStopReason::Debug(_) => {\n\n\t\t\t\tunreachable!(\"reached debug exit without running in debugging mode\")\n\n\t\t\t}\n\n\t\t\tVcpuStopReason::Exit(code) => Ok(Some(code)),\n", "file_path": "src/linux/vcpu.rs", "rank": 37, "score": 30201.670876404114 }, { "content": "\t\tself.vcpu.set_sregs(&sregs)?;\n\n\n\n\t\tlet mut regs = self.vcpu.get_regs()?;\n\n\t\tregs.rflags = 2;\n\n\t\tregs.rip = entry_point;\n\n\t\tregs.rdi = BOOT_INFO_ADDR;\n\n\n\n\t\tself.vcpu.set_regs(&regs)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn show_dtable(name: &str, dtable: &kvm_dtable) {\n\n\t\tprintln!(\"{} {:?}\", name, dtable);\n\n\t}\n\n\n\n\tfn show_segment(name: &str, seg: &kvm_segment) {\n\n\t\tprintln!(\"{} {:?}\", name, seg);\n\n\t}\n\n\n", "file_path": "src/linux/vcpu.rs", "rank": 38, "score": 30201.09405050306 }, { "content": "\t\tUhyveCPU::show_segment(\"ds \", &sregs.ds);\n\n\t\tUhyveCPU::show_segment(\"es \", &sregs.es);\n\n\t\tUhyveCPU::show_segment(\"fs \", &sregs.fs);\n\n\t\tUhyveCPU::show_segment(\"gs \", &sregs.gs);\n\n\t\tUhyveCPU::show_segment(\"tr \", &sregs.tr);\n\n\t\tUhyveCPU::show_segment(\"ldt\", &sregs.ldt);\n\n\t\tUhyveCPU::show_dtable(\"gdt\", &sregs.gdt);\n\n\t\tUhyveCPU::show_dtable(\"idt\", &sregs.idt);\n\n\n\n\t\tprintln!();\n\n\t\tprintln!(\"\\nAPIC:\");\n\n\t\tprintln!(\"-----\");\n\n\t\tprintln!(\n\n\t\t\t\"efer: {:016x} apic base: {:016x}\",\n\n\t\t\tsregs.efer, sregs.apic_base\n\n\t\t);\n\n\t}\n\n}\n\n\n\nimpl Drop for UhyveCPU {\n\n\tfn drop(&mut self) {\n\n\t\tdebug!(\"Drop vCPU {}\", self.id);\n\n\t\t//self.print_registers();\n\n\t}\n\n}\n", "file_path": "src/linux/vcpu.rs", "rank": 39, "score": 30200.978000018233 }, { "content": "\t\t\t\t\t\t\t\tvirtio_device.handle_notify_output(addr, self);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tVIRTIO_PCI_QUEUE_SEL => {\n\n\t\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\t\tvirtio_device.write_selected_queue(addr);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tVIRTIO_PCI_QUEUE_PFN => {\n\n\t\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\t\tvirtio_device.write_pfn(addr, self);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t_ => {\n\n\t\t\t\t\t\t\t\tpanic!(\"Unhandled IO exit: 0x{:x}\", port);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\tVcpuExit::Debug(debug) => {\n\n\t\t\t\t\t\tinfo!(\"Caught Debug Interrupt!\");\n\n\t\t\t\t\t\treturn Ok(VcpuStopReason::Debug(debug));\n\n\t\t\t\t\t}\n\n\t\t\t\t\tVcpuExit::InternalError => {\n", "file_path": "src/linux/vcpu.rs", "rank": 40, "score": 30200.79890681317 }, { "content": "\t\t\t\t..Default::default()\n\n\t\t\t})\n\n\t\t\t.collect::<Vec<_>>();\n\n\n\n\t\t// enable fast string operations\n\n\t\tmsr_entries[0].index = MSR_IA32_MISC_ENABLE;\n\n\t\tmsr_entries[0].data = 1;\n\n\n\n\t\tlet msrs = Msrs::from_entries(&msr_entries)\n\n\t\t\t.expect(\"Unable to create initial values for the machine specific registers\");\n\n\t\tself.vcpu.set_msrs(&msrs)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn setup_long_mode(&self, entry_point: u64) -> Result<(), kvm_ioctls::Error> {\n\n\t\t//debug!(\"Setup long mode\");\n\n\n\n\t\tlet mut sregs = self.vcpu.get_sregs()?;\n\n\n", "file_path": "src/linux/vcpu.rs", "rank": 41, "score": 30200.40727995902 }, { "content": "\t\t\t\t\t\t\t\t\tlet virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\t\t\tvirtio_device.handle_read(pci_addr & 0x3ff, addr);\n\n\t\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\t\tunsafe { *(addr.as_ptr() as *mut u32) = 0xffffffff };\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\tunsafe { *(addr.as_ptr() as *mut u32) = 0xffffffff };\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tPCI_CONFIG_ADDRESS_PORT => {}\n\n\t\t\t\t\t\tVIRTIO_PCI_STATUS => {\n\n\t\t\t\t\t\t\tlet virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\tvirtio_device.read_status(addr);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tVIRTIO_PCI_HOST_FEATURES => {\n\n\t\t\t\t\t\t\tlet virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\tvirtio_device.read_host_features(addr);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tVIRTIO_PCI_GUEST_FEATURES => {\n\n\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n", "file_path": "src/linux/vcpu.rs", "rank": 42, "score": 30200.22460491114 }, { "content": "\t\t\tdb: 0,\n\n\t\t\ts: 1,\n\n\t\t\tl: 1,\n\n\t\t\tg: 1,\n\n\t\t\t..Default::default()\n\n\t\t};\n\n\n\n\t\tsregs.cs = seg;\n\n\n\n\t\tseg.type_ = 3;\n\n\t\tseg.selector = 2 << 3;\n\n\t\tseg.l = 0;\n\n\t\tsregs.ds = seg;\n\n\t\tsregs.es = seg;\n\n\t\tsregs.ss = seg;\n\n\t\t//sregs.fs = seg;\n\n\t\t//sregs.gs = seg;\n\n\t\tsregs.gdt.base = BOOT_GDT;\n\n\t\tsregs.gdt.limit = ((std::mem::size_of::<u64>() * BOOT_GDT_MAX as usize) - 1) as u16;\n\n\n", "file_path": "src/linux/vcpu.rs", "rank": 43, "score": 30199.795068919368 }, { "content": "\n\n\t\t// disable performance monitor\n\n\t\tkvm_cpuid_entries[i].eax = 0x00;\n\n\n\n\t\tself.vcpu.set_cpuid2(&kvm_cpuid)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn setup_msrs(&self) -> Result<(), kvm_ioctls::Error> {\n\n\t\t//debug!(\"Setup MSR\");\n\n\n\n\t\tlet msr_list = KVM.get_msr_index_list()?;\n\n\n\n\t\tlet mut msr_entries = msr_list\n\n\t\t\t.as_slice()\n\n\t\t\t.iter()\n\n\t\t\t.map(|i| kvm_msr_entry {\n\n\t\t\t\tindex: *i,\n\n\t\t\t\tdata: 0,\n", "file_path": "src/linux/vcpu.rs", "rank": 44, "score": 30199.738414624462 }, { "content": "\t\t\t\t\t\t\t\tif let Some(pci_addr) = self.pci_addr {\n\n\t\t\t\t\t\t\t\t\tif pci_addr & 0x1ff800 == 0 {\n\n\t\t\t\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\t\t\t\tvirtio_device.handle_write(pci_addr & 0x3ff, addr);\n\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tPCI_CONFIG_ADDRESS_PORT => {\n\n\t\t\t\t\t\t\t\tself.pci_addr = Some(unsafe { *(addr.as_ptr() as *const u32) });\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tVIRTIO_PCI_STATUS => {\n\n\t\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\t\tvirtio_device.write_status(addr);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tVIRTIO_PCI_GUEST_FEATURES => {\n\n\t\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\t\tvirtio_device.write_requested_features(addr);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tVIRTIO_PCI_QUEUE_NOTIFY => {\n\n\t\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n", "file_path": "src/linux/vcpu.rs", "rank": 45, "score": 30199.231003888286 }, { "content": "\t\t\t\t\t\t\tvirtio_device.read_requested_features(addr);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tVIRTIO_PCI_CONFIG_OFF_MSIX_OFF..=VIRTIO_PCI_CONFIG_OFF_MSIX_OFF_MAX => {\n\n\t\t\t\t\t\t\tlet virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\tvirtio_device\n\n\t\t\t\t\t\t\t\t.read_mac_byte(addr, port - VIRTIO_PCI_CONFIG_OFF_MSIX_OFF);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tVIRTIO_PCI_ISR => {\n\n\t\t\t\t\t\t\tlet mut virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\tvirtio_device.reset_interrupt()\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tVIRTIO_PCI_LINK_STATUS_MSIX_OFF => {\n\n\t\t\t\t\t\t\tlet virtio_device = self.virtio_device.lock().unwrap();\n\n\t\t\t\t\t\t\tvirtio_device.read_link_status(addr);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t_ => {\n\n\t\t\t\t\t\t\tinfo!(\"Unhanded IO Exit\");\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t},\n\n\t\t\t\t\tVcpuExit::IoOut(port, addr) => {\n", "file_path": "src/linux/vcpu.rs", "rank": 46, "score": 30199.211550859058 }, { "content": "\t\tlet cr0 = Cr0Flags::PROTECTED_MODE_ENABLE\n\n\t\t\t| Cr0Flags::EXTENSION_TYPE\n\n\t\t\t| Cr0Flags::NUMERIC_ERROR\n\n\t\t\t| Cr0Flags::PAGING;\n\n\t\tsregs.cr0 = cr0.bits();\n\n\n\n\t\tsregs.cr3 = BOOT_PML4;\n\n\n\n\t\tlet cr4 = Cr4Flags::PHYSICAL_ADDRESS_EXTENSION;\n\n\t\tsregs.cr4 = cr4.bits();\n\n\n\n\t\tsregs.efer = EFER_LME | EFER_LMA | EFER_NXE;\n\n\n\n\t\tlet mut seg = kvm_segment {\n\n\t\t\tbase: 0,\n\n\t\t\tlimit: 0xffffffff,\n\n\t\t\tselector: 1 << 3,\n\n\t\t\tpresent: 1,\n\n\t\t\ttype_: 11,\n\n\t\t\tdpl: 0,\n", "file_path": "src/linux/vcpu.rs", "rank": 47, "score": 30197.317065111463 }, { "content": "\t\t// create own processor string (third part)\n\n\t\tkvm_cpuid_entries[i].eax = 0;\n\n\t\tkvm_cpuid_entries[i].ebx = 0;\n\n\t\tkvm_cpuid_entries[i].ecx = 0;\n\n\t\tkvm_cpuid_entries[i].edx = 0;\n\n\n\n\t\tlet i = kvm_cpuid_entries\n\n\t\t\t.iter()\n\n\t\t\t.position(|&r| r.function == 1)\n\n\t\t\t.unwrap();\n\n\n\n\t\t// CPUID to define basic cpu features\n\n\t\tkvm_cpuid_entries[i].ecx |= CPUID_EXT_HYPERVISOR; // propagate that we are running on a hypervisor\n\n\t\tkvm_cpuid_entries[i].ecx |= CPUID_TSC_DEADLINE; // enable TSC deadline feature\n\n\t\tkvm_cpuid_entries[i].edx |= CPUID_ENABLE_MSR; // enable msr support\n\n\n\n\t\tlet i = kvm_cpuid_entries\n\n\t\t\t.iter()\n\n\t\t\t.position(|&r| r.function == 0x0A)\n\n\t\t\t.unwrap();\n", "file_path": "src/linux/vcpu.rs", "rank": 48, "score": 30197.317065111463 }, { "content": "// Constructor for a conventional segment GDT (or LDT) entry\n\npub fn create_gdt_entry(flags: u64, base: u64, limit: u64) -> u64 {\n\n\t((base & 0xff000000u64) << (56 - 24))\n\n\t\t| ((flags & 0x0000f0ffu64) << 40)\n\n\t\t| ((limit & 0x000f0000u64) << (48 - 16))\n\n\t\t| ((base & 0x00ffffffu64) << 16)\n\n\t\t| (limit & 0x0000ffffu64)\n\n}\n", "file_path": "src/arch/x86_64/mod.rs", "rank": 49, "score": 28789.477692658038 }, { "content": "\tapic_base: u64,\n\n\tioapic: Arc<Mutex<IoApic>>,\n\n}\n\n\n\nimpl UhyveCPU {\n\n\tpub fn new(\n\n\t\tid: u32,\n\n\t\tkernel_path: PathBuf,\n\n\t\targs: Vec<OsString>,\n\n\t\tvm_start: usize,\n\n\t\tioapic: Arc<Mutex<IoApic>>,\n\n\t) -> UhyveCPU {\n\n\t\tUhyveCPU {\n\n\t\t\tid,\n\n\t\t\tkernel_path,\n\n\t\t\targs,\n\n\t\t\tvcpu: xhypervisor::VirtualCpu::new().unwrap(),\n\n\t\t\tvm_start,\n\n\t\t\tapic_base: APIC_DEFAULT_BASE,\n\n\t\t\tioapic,\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 51, "score": 28638.881174827082 }, { "content": "\tstatic ref CAP_PROCBASED2: u64 = {\n\n\t\tlet cap: u64 = { read_vmx_cap(&xhypervisor::VMXCap::PROCBASED2).unwrap() };\n\n\t\tcap2ctrl(cap, CPU_BASED2_RDTSCP | CPU_BASED2_APIC_REG_VIRT)\n\n\t};\n\n\tstatic ref CAP_ENTRY: u64 = {\n\n\t\tlet cap: u64 = { read_vmx_cap(&xhypervisor::VMXCap::ENTRY).unwrap() };\n\n\t\tcap2ctrl(cap, VMENTRY_LOAD_EFER | VMENTRY_GUEST_IA32E)\n\n\t};\n\n\tstatic ref CAP_EXIT: u64 = {\n\n\t\tlet cap: u64 = { read_vmx_cap(&xhypervisor::VMXCap::EXIT).unwrap() };\n\n\t\tcap2ctrl(cap, 0)\n\n\t};\n\n}\n\n\n\npub struct UhyveCPU {\n\n\tid: u32,\n\n\tkernel_path: PathBuf,\n\n\targs: Vec<OsString>,\n\n\tvcpu: xhypervisor::VirtualCpu,\n\n\tvm_start: usize,\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 52, "score": 28633.108987075928 }, { "content": "\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\t_ => {\n\n\t\t\t\t\tpanic!(\"IO-APIC Emulation failed\");\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t};\n\n\n\n\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tpub fn get_vcpu(&self) -> &xhypervisor::VirtualCpu {\n\n\t\t&self.vcpu\n\n\t}\n\n}\n\n\n\nimpl VirtualCPU for UhyveCPU {\n\n\tfn init(&mut self, entry_point: u64) -> HypervisorResult<()> {\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 54, "score": 28631.20864984812 }, { "content": "\t\t\t}\n\n\t\t\t0x80000003 => {\n\n\t\t\t\t// create own processor string (second part)\n\n\t\t\t\tlet mut id_reg_values: [u32; 4] = [0; 4];\n\n\t\t\t\tlet id = b\"l hypervisor\\0\";\n\n\t\t\t\tunsafe {\n\n\t\t\t\t\tstd::ptr::copy_nonoverlapping(\n\n\t\t\t\t\t\tid.as_ptr(),\n\n\t\t\t\t\t\tid_reg_values.as_mut_ptr() as *mut u8,\n\n\t\t\t\t\t\tid.len(),\n\n\t\t\t\t\t);\n\n\t\t\t\t}\n\n\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RAX, id_reg_values[0] as u64)?;\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RBX, id_reg_values[1] as u64)?;\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RCX, id_reg_values[2] as u64)?;\n\n\t\t\t\tself.vcpu\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 55, "score": 28629.55851992624 }, { "content": "\t\t\t0x80000002 => {\n\n\t\t\t\t// create own processor string (first part)\n\n\t\t\t\tlet mut id_reg_values: [u32; 4] = [0; 4];\n\n\t\t\t\tlet id = b\"uhyve - unikerne\";\n\n\t\t\t\tunsafe {\n\n\t\t\t\t\tstd::ptr::copy_nonoverlapping(\n\n\t\t\t\t\t\tid.as_ptr(),\n\n\t\t\t\t\t\tid_reg_values.as_mut_ptr() as *mut u8,\n\n\t\t\t\t\t\tid.len(),\n\n\t\t\t\t\t);\n\n\t\t\t\t}\n\n\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RAX, id_reg_values[0] as u64)?;\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RBX, id_reg_values[1] as u64)?;\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RCX, id_reg_values[2] as u64)?;\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RDX, id_reg_values[3] as u64)?;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 56, "score": 28629.271318964857 }, { "content": "\tfn args(&self) -> &[OsString] {\n\n\t\tself.args.as_slice()\n\n\t}\n\n\n\n\tfn host_address(&self, addr: usize) -> usize {\n\n\t\taddr + self.vm_start\n\n\t}\n\n\n\n\tfn virt_to_phys(&self, addr: usize) -> usize {\n\n\t\t/// Number of Offset bits of a virtual address for a 4 KiB page, which are shifted away to get its Page Frame Number (PFN).\n\n\t\tpub const PAGE_BITS: usize = 12;\n\n\n\n\t\t/// Number of bits of the index in each table (PML4, PDPT, PDT, PGT).\n\n\t\tpub const PAGE_MAP_BITS: usize = 9;\n\n\n\n\t\tlet executable_disable_mask = !usize::try_from(PageTableFlags::NO_EXECUTE.bits()).unwrap();\n\n\t\tlet mut page_table = self.host_address(BOOT_PML4 as usize) as *const usize;\n\n\t\tlet mut page_bits = 39;\n\n\t\tlet mut entry: usize = 0;\n\n\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 57, "score": 28629.020875005706 }, { "content": "\t/// System Call Flag Mask (R/W) See Table 35-2.\n\n\tpub const IA32_FMASK: u32 = 0xc0000084;\n\n\n\n\t/// Map of BASE Address of FS (R/W) See Table 35-2.\n\n\tpub const IA32_FS_BASE: u32 = 0xc0000100;\n\n\n\n\t/// Map of BASE Address of GS (R/W) See Table 35-2.\n\n\tpub const IA32_GS_BASE: u32 = 0xc0000101;\n\n\n\n\t/// Swap Target of BASE Address of GS (R/W) See Table 35-2.\n\n\tpub const IA32_KERNEL_GSBASE: u32 = 0xc0000102;\n\n\n\n\t/// AUXILIARY TSC Signature. (R/W) See Table 35-2 and Section 17.13.2, IA32_TSC_AUX Register and RDTSCP Support.\n\n\tpub const IA32_TSC_AUX: u32 = 0xc0000103;\n\n}\n\n\n\nuse msr::*;\n\n\n\n/* desired control word constrained by hardware/hypervisor capabilities */\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 59, "score": 28628.275412172763 }, { "content": "\t\t\t\t\t.write_register(&Register::RDX, id_reg_values[3] as u64)?;\n\n\t\t\t}\n\n\t\t\t0x80000004 => {\n\n\t\t\t\tself.vcpu.write_register(&Register::RAX, 0)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RBX, 0)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RCX, 0)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RDX, 0)?;\n\n\t\t\t}\n\n\t\t\t_ => {\n\n\t\t\t\tlet extended_features = (rax == 7) && (rcx == 0);\n\n\t\t\t\tlet processor_info = rax == 1;\n\n\t\t\t\tlet result = unsafe { __cpuid_count(rax as u32, rcx as u32) };\n\n\n\n\t\t\t\tlet rax = result.eax as u64;\n\n\t\t\t\tlet mut rbx = result.ebx as u64;\n\n\t\t\t\tlet mut rcx = result.ecx as u64;\n\n\t\t\t\tlet rdx = result.edx as u64;\n\n\n\n\t\t\t\tif processor_info {\n\n\t\t\t\t\t// inform that the kernel is running within a hypervisor\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 62, "score": 28627.459354773106 }, { "content": "\tpub const IA32_X2APIC_LVT_LINT0: u32 = 0x835;\n\n\n\n\t/// If ( CPUID.01H:ECX.\\[bit 21\\] = 1 )\n\n\tpub const IA32_X2APIC_LVT_LINT1: u32 = 0x836;\n\n\n\n\t/// If ( CPUID.01H:ECX.\\[bit 21\\] = 1 )\n\n\tpub const IA32_X2APIC_LVT_ERROR: u32 = 0x837;\n\n\n\n\t/// If ( CPUID.80000001.EDX.\\[bit 20\\] or CPUID.80000001.EDX.\\[bit 29\\])\n\n\tpub const IA32_EFER: u32 = 0xc0000080;\n\n\n\n\t/// System Call Target Address (R/W) See Table 35-2.\n\n\tpub const IA32_STAR: u32 = 0xc0000081;\n\n\n\n\t/// IA-32e Mode System Call Target Address (R/W) See Table 35-2.\n\n\tpub const IA32_LSTAR: u32 = 0xc0000082;\n\n\n\n\t/// System Call Target Address the compatibility mode.\n\n\tpub const IA32_CSTAR: u32 = 0xc0000083;\n\n\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 65, "score": 28627.264548082425 }, { "content": "use std::ffi::OsString;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::sync::{Arc, Mutex};\n\nuse x86_64::registers::control::Cr0Flags;\n\nuse x86_64::registers::control::Cr4Flags;\n\nuse x86_64::structures::gdt::SegmentSelector;\n\nuse x86_64::structures::paging::PageTableFlags;\n\nuse x86_64::PrivilegeLevel;\n\nuse xhypervisor;\n\nuse xhypervisor::consts::vmcs::*;\n\nuse xhypervisor::consts::vmx_cap::{\n\n\tCPU_BASED2_APIC_REG_VIRT, CPU_BASED2_RDTSCP, CPU_BASED_MONITOR, CPU_BASED_MSR_BITMAPS,\n\n\tCPU_BASED_MWAIT, CPU_BASED_SECONDARY_CTLS, CPU_BASED_TPR_SHADOW, CPU_BASED_TSC_OFFSET,\n\n\tPIN_BASED_INTR, PIN_BASED_NMI, PIN_BASED_VIRTUAL_NMI, VMENTRY_GUEST_IA32E, VMENTRY_LOAD_EFER,\n\n};\n\nuse xhypervisor::consts::vmx_exit;\n\nuse xhypervisor::{read_vmx_cap, Register};\n\n\n\n/// Extracted from `x86::msr`.\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 67, "score": 28627.066225730723 }, { "content": "\n\n\t/// x2APIC End of Interrupt. If ( CPUID.01H:ECX.\\[bit 21\\] = 1 )\n\n\tpub const IA32_X2APIC_EOI: u32 = 0x80b;\n\n\n\n\t/// x2APIC Spurious Interrupt Vector register (R/W)\n\n\tpub const IA32_X2APIC_SIVR: u32 = 0x80f;\n\n\n\n\t/// x2APIC Interrupt Command register (R/W)\n\n\tpub const IA32_X2APIC_ICR: u32 = 0x830;\n\n\n\n\t/// x2APIC LVT Timer Interrupt register (R/W)\n\n\tpub const IA32_X2APIC_LVT_TIMER: u32 = 0x832;\n\n\n\n\t/// x2APIC LVT Thermal Sensor Interrupt register (R/W)\n\n\tpub const IA32_X2APIC_LVT_THERMAL: u32 = 0x833;\n\n\n\n\t/// x2APIC LVT Performance Monitor register (R/W)\n\n\tpub const IA32_X2APIC_LVT_PMI: u32 = 0x834;\n\n\n\n\t/// If ( CPUID.01H:ECX.\\[bit 21\\] = 1 )\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 68, "score": 28626.6466178168 }, { "content": "mod msr {\n\n\t/// See Section 17.13, Time-Stamp Counter.\n\n\tpub const TSC: u32 = 0x10;\n\n\n\n\t/// APIC Location and Status (R/W) See Table 35-2. See Section 10.4.4, Local APIC Status and Location.\n\n\tpub const IA32_APIC_BASE: u32 = 0x1b;\n\n\n\n\t/// CS register target for CPL 0 code (R/W) See Table 35-2. See Section 5.8.7, Performing Fast Calls to System Procedures with the SYSENTER and SYSEXIT Instructions.\n\n\tpub const IA32_SYSENTER_CS: u32 = 0x174;\n\n\n\n\t/// Stack pointer for CPL 0 stack (R/W) See Table 35-2. See Section 5.8.7, Performing Fast Calls to System Procedures with the SYSENTER and SYSEXIT Instructions.\n\n\tpub const IA32_SYSENTER_ESP: u32 = 0x175;\n\n\n\n\t/// CPL 0 code entry point (R/W) See Table 35-2. See Section 5.8.7, Performing Fast Calls to System Procedures with the SYSENTER and SYSEXIT Instructions.\n\n\tpub const IA32_SYSENTER_EIP: u32 = 0x176;\n\n\n\n\tpub const IA32_MISC_ENABLE: u32 = 0x1a0;\n\n\n\n\t/// x2APIC Task Priority register (R/W)\n\n\tpub const IA32_X2APIC_TPR: u32 = 0x808;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 69, "score": 28626.52090195794 }, { "content": "\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet sysexit = unsafe {\n\n\t\t\t\t\t\t\t\t&*(self.host_address(data_addr as usize) as *const SysExit)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\treturn Ok(VcpuStopReason::Exit(self.exit(sysexit)));\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_OPEN => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet sysopen = unsafe {\n\n\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr as usize) as *mut SysOpen)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.open(sysopen);\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_WRITE => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet syswrite = unsafe {\n\n\t\t\t\t\t\t\t\t&*(self.host_address(data_addr as usize) as *const SysWrite)\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 70, "score": 28626.00921882294 }, { "content": "\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_LSEEK => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet syslseek = unsafe {\n\n\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr as usize) as *mut SysLseek)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.lseek(syslseek);\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_CLOSE => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet sysclose = unsafe {\n\n\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr as usize) as *mut SysClose)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.close(sysclose);\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 71, "score": 28625.580400413397 }, { "content": "\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.write(syswrite).unwrap();\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_READ => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet sysread = unsafe {\n\n\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr as usize) as *mut SysRead)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.read(sysread);\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_UNLINK => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet sysunlink = unsafe {\n\n\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr as usize) as *mut SysUnlink)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.unlink(sysunlink);\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 72, "score": 28625.455357816674 }, { "content": "\t\t\t\t\t\tUHYVE_PORT_CMDSIZE => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet syssize = unsafe {\n\n\t\t\t\t\t\t\t\t&mut *(self.host_address(data_addr as usize) as *mut SysCmdsize)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.cmdsize(syssize);\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_CMDVAL => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n\n\t\t\t\t\t\t\tlet syscmdval = unsafe {\n\n\t\t\t\t\t\t\t\t&*(self.host_address(data_addr as usize) as *const SysCmdval)\n\n\t\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\tself.cmdval(syscmdval);\n\n\t\t\t\t\t\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tUHYVE_PORT_EXIT => {\n\n\t\t\t\t\t\t\tlet data_addr: u64 =\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 73, "score": 28625.424982638073 }, { "content": "\t\tfor _i in 0..4 {\n\n\t\t\tlet index = (addr >> page_bits) & ((1 << PAGE_MAP_BITS) - 1);\n\n\t\t\tentry = unsafe { *page_table.add(index) & executable_disable_mask };\n\n\n\n\t\t\t// bit 7 is set if this entry references a 1 GiB (PDPT) or 2 MiB (PDT) page.\n\n\t\t\tif entry & usize::try_from(PageTableFlags::HUGE_PAGE.bits()).unwrap() != 0 {\n\n\t\t\t\treturn (entry & ((!0usize) << page_bits)) | (addr & !((!0usize) << page_bits));\n\n\t\t\t} else {\n\n\t\t\t\tpage_table = self.host_address(entry & !((1 << PAGE_BITS) - 1)) as *const usize;\n\n\t\t\t\tpage_bits -= PAGE_MAP_BITS;\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\t(entry & ((!0usize) << PAGE_BITS)) | (addr & !((!0usize) << PAGE_BITS))\n\n\t}\n\n\n\n\tfn r#continue(&mut self) -> HypervisorResult<VcpuStopReason> {\n\n\t\tloop {\n\n\t\t\t/*if self.extint_pending == true {\n\n\t\t\t\tlet irq_info = self.vcpu.read_vmcs(VMCS_CTRL_VMENTRY_IRQ_INFO)?;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 74, "score": 28625.399580958594 }, { "content": "#![allow(non_snake_case)]\n\n\n\nuse crate::consts::*;\n\nuse crate::macos::x86_64::ioapic::IoApic;\n\nuse crate::vm::HypervisorResult;\n\nuse crate::vm::SysClose;\n\nuse crate::vm::SysCmdsize;\n\nuse crate::vm::SysCmdval;\n\nuse crate::vm::SysExit;\n\nuse crate::vm::SysLseek;\n\nuse crate::vm::SysOpen;\n\nuse crate::vm::SysRead;\n\nuse crate::vm::SysUnlink;\n\nuse crate::vm::SysWrite;\n\nuse crate::vm::VcpuStopReason;\n\nuse crate::vm::VirtualCPU;\n\nuse burst::x86::{disassemble_64, InstructionOperation, OperandType};\n\nuse lazy_static::lazy_static;\n\nuse log::{debug, trace};\n\nuse std::arch::x86_64::__cpuid_count;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 75, "score": 28625.27676636954 }, { "content": "\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RDX, (self.apic_base >> 32) & 0xFFFFFFFF)?;\n\n\t\t\t}\n\n\t\t\trcx => {\n\n\t\t\t\tpanic!(\"Unable to read msr 0x{:x}!\", rcx)\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn emulate_wrmsr(&mut self, rip: u64) -> HypervisorResult<()> {\n\n\t\tlet len = self.vcpu.read_vmcs(VMCS_RO_VMEXIT_INSTR_LEN)?;\n\n\t\tlet rcx = self.vcpu.read_register(&Register::RCX)? & 0xFFFFFFFF;\n\n\n\n\t\tmatch rcx as u32 {\n\n\t\t\tIA32_EFER => {\n\n\t\t\t\tlet rax = self.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 76, "score": 28625.112297523476 }, { "content": "\tfn emulate_rdmsr(&mut self, rip: u64) -> HypervisorResult<()> {\n\n\t\tlet len = self.vcpu.read_vmcs(VMCS_RO_VMEXIT_INSTR_LEN)?;\n\n\t\tlet rcx = self.vcpu.read_register(&Register::RCX)? & 0xFFFFFFFF;\n\n\n\n\t\tmatch rcx as u32 {\n\n\t\t\tIA32_EFER => {\n\n\t\t\t\tlet efer = self.vcpu.read_vmcs(VMCS_GUEST_IA32_EFER)?;\n\n\t\t\t\tlet rax = efer & 0xFFFFFFFF;\n\n\t\t\t\tlet rdx = efer >> 32;\n\n\n\n\t\t\t\tself.vcpu.write_register(&Register::RAX, rax)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RDX, rdx)?;\n\n\t\t\t}\n\n\t\t\tIA32_MISC_ENABLE => {\n\n\t\t\t\tself.vcpu.write_register(&Register::RAX, 0)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RDX, 0)?;\n\n\t\t\t}\n\n\t\t\tIA32_APIC_BASE => {\n\n\t\t\t\tself.vcpu\n\n\t\t\t\t\t.write_register(&Register::RAX, self.apic_base & 0xFFFFFFFF)?;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 77, "score": 28624.99268468566 }, { "content": "\t\t}\n\n\t}\n\n\n\n\tfn print_registers(&self) {\n\n\t\tprintln!(\"\\nDump state of CPU {}\", self.id);\n\n\t\tprintln!(\"VMCS:\");\n\n\t\tprintln!(\"-----\");\n\n\t\tprintln!(\n\n\t\t\t\"CR0: mask {:016x} shadow {:016x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CR0_MASK).unwrap(),\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CR0_SHADOW).unwrap()\n\n\t\t);\n\n\t\tprintln!(\n\n\t\t\t\"CR4: mask {:016x} shadow {:016x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CR4_MASK).unwrap(),\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CR4_SHADOW).unwrap()\n\n\t\t);\n\n\t\tprintln!(\n\n\t\t\t\"Pinbased: {:016x}\\n1st: {:016x}\\n2nd: {:016x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_PIN_BASED).unwrap(),\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 78, "score": 28624.96436415432 }, { "content": "\n\n\tfn setup_msr(&mut self) -> Result<(), xhypervisor::Error> {\n\n\t\tdebug!(\"Enable MSR registers\");\n\n\n\n\t\tself.vcpu.enable_native_msr(IA32_FS_BASE, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_GS_BASE, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_KERNEL_GSBASE, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_SYSENTER_CS, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_SYSENTER_EIP, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_SYSENTER_ESP, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_STAR, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_LSTAR, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_CSTAR, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_FMASK, true)?;\n\n\t\tself.vcpu.enable_native_msr(TSC, true)?;\n\n\t\tself.vcpu.enable_native_msr(IA32_TSC_AUX, true)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 79, "score": 28624.89107270502 }, { "content": "\tfn setup_capabilities(&mut self) -> Result<(), xhypervisor::Error> {\n\n\t\tdebug!(\"Setup VMX capabilities\");\n\n\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_PIN_BASED, *CAP_PINBASED)?;\n\n\t\tdebug!(\n\n\t\t\t\"Pin-Based VM-Execution Controls 0x{:x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_PIN_BASED)?\n\n\t\t);\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_CPU_BASED, *CAP_PROCBASED)?;\n\n\t\tdebug!(\n\n\t\t\t\"Primary Processor-Based VM-Execution Controls 0x{:x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CPU_BASED)?\n\n\t\t);\n\n\t\tself.vcpu\n\n\t\t\t.write_vmcs(VMCS_CTRL_CPU_BASED2, *CAP_PROCBASED2)?;\n\n\t\tdebug!(\n\n\t\t\t\"Secondary Processor-Based VM-Execution Controls 0x{:x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CPU_BASED2)?\n\n\t\t);\n\n\t\tself.vcpu\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 80, "score": 28624.87628446528 }, { "content": "\t\t}\n\n\t}\n\n\n\n\tfn setup_system_gdt(&mut self) -> Result<(), xhypervisor::Error> {\n\n\t\tdebug!(\"Setup GDT\");\n\n\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_CS_LIMIT, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_CS_BASE, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_CS_AR, 0x209B)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SS_LIMIT, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SS_BASE, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SS_AR, 0x4093)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_DS_LIMIT, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_DS_BASE, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_DS_AR, 0x4093)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_ES_LIMIT, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_ES_BASE, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_ES_AR, 0x4093)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_FS_LIMIT, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_FS_BASE, 0)?;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 81, "score": 28624.818260595224 }, { "content": "\t\tself.vcpu.write_vmcs(VMCS_GUEST_FS_AR, 0x4093)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_GS_LIMIT, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_GS_BASE, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_GS_AR, 0x4093)?;\n\n\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_GDTR_BASE, BOOT_GDT)?;\n\n\t\tself.vcpu.write_vmcs(\n\n\t\t\tVMCS_GUEST_GDTR_LIMIT,\n\n\t\t\t((std::mem::size_of::<u64>() * BOOT_GDT_MAX as usize) - 1) as u64,\n\n\t\t)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_IDTR_BASE, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_IDTR_LIMIT, 0xffff)?;\n\n\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_TR, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_TR_LIMIT, 0xffff)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_TR_AR, 0x8b)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_TR_BASE, 0)?;\n\n\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_LDTR, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_LDTR_LIMIT, 0xffff)?;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 82, "score": 28624.752249887657 }, { "content": "\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn emulate_ioapic(&mut self, rip: u64, address: u64) -> HypervisorResult<()> {\n\n\t\tlet len = self.vcpu.read_vmcs(VMCS_RO_VMEXIT_INSTR_LEN)?;\n\n\t\tlet qualification = self.vcpu.read_vmcs(VMCS_RO_EXIT_QUALIFIC)?;\n\n\t\tlet read = (qualification & (1 << 0)) != 0;\n\n\t\tlet write = (qualification & (1 << 1)) != 0;\n\n\t\tlet code =\n\n\t\t\tunsafe { std::slice::from_raw_parts(self.host_address(rip as usize) as *const u8, 8) };\n\n\n\n\t\tif let Ok(instr) = disassemble_64(code, rip as usize, code.len()) {\n\n\t\t\tmatch instr.operation {\n\n\t\t\t\tInstructionOperation::MOV => {\n\n\t\t\t\t\tif write {\n\n\t\t\t\t\t\tlet val = match instr.operands[1].operand {\n\n\t\t\t\t\t\t\tOperandType::IMM => instr.operands[1].immediate as u64,\n\n\t\t\t\t\t\t\tOperandType::REG_EDI => {\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 83, "score": 28624.724836336205 }, { "content": "\t\t\t(gs_ar >> 4) & 0x1, (gs_ar >> 13) & 0x1, (gs_ar >> 15) & 0x1, (gs_ar >> 12) & 1);\n\n\t\tprintln!(\"tr {:04x} {:016x} {:08x} {:02x} {:x} {:x} {:x} {:x} {:x} {:x} {:x}\",\n\n\t\t\ttr, tr_base, tr_limit, (tr_ar) & 0xf, (tr_ar >> 7) & 0x1, (tr_ar >> 5) & 0x3, (tr_ar >> 14) & 0x1,\n\n\t\t\t(tr_ar >> 4) & 0x1, (tr_ar >> 13) & 0x1, (tr_ar >> 15) & 0x1, (tr_ar >> 12) & 1);\n\n\t\tprintln!(\"ldt {:04x} {:016x} {:08x} {:02x} {:x} {:x} {:x} {:x} {:x} {:x} {:x}\",\n\n\t\t\tldtr, ldtr_base, ldtr_limit, (ldtr_ar) & 0xf, (ldtr_ar >> 7) & 0x1, (ldtr_ar >> 5) & 0x3, (ldtr_ar >> 14) & 0x1,\n\n\t\t\t(ldtr_ar >> 4) & 0x1, (ldtr_ar >> 13) & 0x1, (ldtr_ar >> 15) & 0x1, (ldtr_ar >> 12) & 1);\n\n\n\n\t\tlet gdt_base = self.vcpu.read_vmcs(VMCS_GUEST_GDTR_BASE).unwrap();\n\n\t\tlet gdt_limit = self.vcpu.read_vmcs(VMCS_GUEST_GDTR_LIMIT).unwrap();\n\n\t\tprintln!(\"gdt {:016x} {:08x}\", gdt_base, gdt_limit);\n\n\t\tlet idt_base = self.vcpu.read_vmcs(VMCS_GUEST_IDTR_BASE).unwrap();\n\n\t\tlet idt_limit = self.vcpu.read_vmcs(VMCS_GUEST_IDTR_LIMIT).unwrap();\n\n\t\tprintln!(\"idt {:016x} {:08x}\", idt_base, idt_limit);\n\n\t\tprintln!(\n\n\t\t\t\"VMCS link pointer {:016x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_GUEST_LINK_POINTER).unwrap()\n\n\t\t);\n\n\t}\n\n}\n\n\n\nimpl Drop for UhyveCPU {\n\n\tfn drop(&mut self) {\n\n\t\tdebug!(\"Drop virtual CPU {}\", self.id);\n\n\t\tlet _ = self.vcpu.destroy();\n\n\t}\n\n}\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 84, "score": 28624.6860579711 }, { "content": "\t\tlet tr_limit = self.vcpu.read_vmcs(VMCS_GUEST_TR_LIMIT).unwrap();\n\n\t\tlet tr_base = self.vcpu.read_vmcs(VMCS_GUEST_TR_BASE).unwrap();\n\n\t\tlet tr_ar = self.vcpu.read_vmcs(VMCS_GUEST_TR_AR).unwrap();\n\n\t\tlet ldtr_limit = self.vcpu.read_vmcs(VMCS_GUEST_LDTR_LIMIT).unwrap();\n\n\t\tlet ldtr_base = self.vcpu.read_vmcs(VMCS_GUEST_LDTR_BASE).unwrap();\n\n\t\tlet ldtr_ar = self.vcpu.read_vmcs(VMCS_GUEST_LDTR_AR).unwrap();\n\n\n\n\t\t/*\n\n\t\t * Format of Access Rights\n\n\t\t * -----------------------\n\n\t\t * 3-0 : Segment type\n\n\t\t * 4 : S — Descriptor type (0 = system; 1 = code or data)\n\n\t\t * 6-5 : DPL — Descriptor privilege level\n\n\t\t * 7 : P — Segment present\n\n\t\t * 11-8: Reserved\n\n\t\t * 12 : AVL — Available for use by system software\n\n\t\t * 13 : L — 64-bit mode active (for CS only)\n\n\t\t * 14 : D/B — Default operation size (0 = 16-bit segment; 1 = 32-bit segment)\n\n\t\t * 15 : G — Granularity\n\n\t\t * 16 : Segment unusable (0 = usable; 1 = unusable)\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 85, "score": 28624.18602884254 }, { "content": "\t\tself.vcpu.write_register(&Register::RSI, 0)?;\n\n\t\tself.vcpu.write_register(&Register::RDI, BOOT_INFO_ADDR)?;\n\n\t\tself.vcpu.write_register(&Register::R8, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R9, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R10, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R11, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R12, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R13, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R14, 0)?;\n\n\t\tself.vcpu.write_register(&Register::R15, 0)?;\n\n\t\tself.setup_system_gdt()?;\n\n\t\tself.setup_system_64bit()?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn kernel_path(&self) -> &Path {\n\n\t\tself.kernel_path.as_path()\n\n\t}\n\n\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 87, "score": 28623.370700750405 }, { "content": "\t\tself.setup_capabilities()?;\n\n\t\tself.setup_msr()?;\n\n\n\n\t\tself.vcpu\n\n\t\t\t.write_vmcs(VMCS_CTRL_EXC_BITMAP, (1 << 3) | (1 << 1))?;\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_TPR_THRESHOLD, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SYSENTER_EIP, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SYSENTER_ESP, 0)?;\n\n\n\n\t\tdebug!(\"Setup general purpose registers\");\n\n\t\tself.vcpu.write_register(&Register::RIP, entry_point)?;\n\n\t\tself.vcpu.write_register(&Register::RFLAGS, 0x2)?;\n\n\t\t// create temporary stack to boot the kernel\n\n\t\tself.vcpu\n\n\t\t\t.write_register(&Register::RSP, 0x200000 - 0x1000)?;\n\n\t\tself.vcpu.write_register(&Register::RBP, 0)?;\n\n\t\tself.vcpu.write_register(&Register::RAX, 0)?;\n\n\t\tself.vcpu.write_register(&Register::RBX, 0)?;\n\n\t\tself.vcpu.write_register(&Register::RCX, 0)?;\n\n\t\tself.vcpu.write_register(&Register::RDX, 0)?;\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 88, "score": 28623.336563499906 }, { "content": "\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CPU_BASED).unwrap(),\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_CPU_BASED2).unwrap()\n\n\t\t);\n\n\t\tprintln!(\n\n\t\t\t\"Entry: {:016x}\\nExit: {:016x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_VMENTRY_CONTROLS).unwrap(),\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_VMEXIT_CONTROLS).unwrap()\n\n\t\t);\n\n\n\n\t\tprintln!(\"\\nRegisters:\");\n\n\t\tprintln!(\"----------\");\n\n\n\n\t\tlet rip = self.vcpu.read_register(&Register::RIP).unwrap();\n\n\t\tlet rflags = self.vcpu.read_register(&Register::RFLAGS).unwrap();\n\n\t\tlet rsp = self.vcpu.read_register(&Register::RSP).unwrap();\n\n\t\tlet rbp = self.vcpu.read_register(&Register::RBP).unwrap();\n\n\t\tlet rax = self.vcpu.read_register(&Register::RAX).unwrap();\n\n\t\tlet rbx = self.vcpu.read_register(&Register::RBX).unwrap();\n\n\t\tlet rcx = self.vcpu.read_register(&Register::RCX).unwrap();\n\n\t\tlet rdx = self.vcpu.read_register(&Register::RDX).unwrap();\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 89, "score": 28623.289354296474 }, { "content": "\t\tself.vcpu\n\n\t\t\t.write_vmcs(VMCS_GUEST_IA32_EFER, EFER_LME | EFER_LMA)?;\n\n\n\n\t\tself.vcpu.write_vmcs(\n\n\t\t\tVMCS_CTRL_CR0_MASK,\n\n\t\t\t(Cr0Flags::CACHE_DISABLE | Cr0Flags::NOT_WRITE_THROUGH | cr0).bits(),\n\n\t\t)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_CR0_SHADOW, cr0.bits())?;\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_CR4_MASK, cr4.bits())?;\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_CR4_SHADOW, cr4.bits())?;\n\n\n\n\t\tself.vcpu.write_register(&Register::CR0, cr0.bits())?;\n\n\t\tself.vcpu.write_register(&Register::CR4, cr4.bits())?;\n\n\t\tself.vcpu.write_register(&Register::CR3, BOOT_PML4)?;\n\n\t\tself.vcpu.write_register(&Register::DR7, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SYSENTER_ESP, 0)?;\n\n\t\tself.vcpu.write_vmcs(VMCS_GUEST_SYSENTER_EIP, 0)?;\n\n\n\n\t\tOk(())\n\n\t}\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 91, "score": 28623.217623085424 }, { "content": "\t\tlet rsi = self.vcpu.read_register(&Register::RSI).unwrap();\n\n\t\tlet rdi = self.vcpu.read_register(&Register::RDI).unwrap();\n\n\t\tlet r8 = self.vcpu.read_register(&Register::R8).unwrap();\n\n\t\tlet r9 = self.vcpu.read_register(&Register::R9).unwrap();\n\n\t\tlet r10 = self.vcpu.read_register(&Register::R10).unwrap();\n\n\t\tlet r11 = self.vcpu.read_register(&Register::R11).unwrap();\n\n\t\tlet r12 = self.vcpu.read_register(&Register::R12).unwrap();\n\n\t\tlet r13 = self.vcpu.read_register(&Register::R13).unwrap();\n\n\t\tlet r14 = self.vcpu.read_register(&Register::R14).unwrap();\n\n\t\tlet r15 = self.vcpu.read_register(&Register::R15).unwrap();\n\n\n\n\t\tprint!(\n\n\t\t\t\"rip: {:016x} rsp: {:016x} flags: {:016x}\\n\\\n\n\t\t\trax: {:016x} rbx: {:016x} rcx: {:016x}\\n\\\n\n\t\t\trdx: {:016x} rsi: {:016x} rdi: {:016x}\\n\\\n\n\t\t\trbp: {:016x} r8: {:016x} r9: {:016x}\\n\\\n\n\t\t\tr10: {:016x} r11: {:016x} r12: {:016x}\\n\\\n\n\t\t\tr13: {:016x} r14: {:016x} r15: {:016x}\\n\",\n\n\t\t\trip,\n\n\t\t\trsp,\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 92, "score": 28623.172167740708 }, { "content": "\t\tlet ldtr = self.vcpu.read_register(&Register::LDTR).unwrap();\n\n\n\n\t\tlet cs_limit = self.vcpu.read_vmcs(VMCS_GUEST_CS_LIMIT).unwrap();\n\n\t\tlet cs_base = self.vcpu.read_vmcs(VMCS_GUEST_CS_BASE).unwrap();\n\n\t\tlet cs_ar = self.vcpu.read_vmcs(VMCS_GUEST_CS_AR).unwrap();\n\n\t\tlet ss_limit = self.vcpu.read_vmcs(VMCS_GUEST_SS_LIMIT).unwrap();\n\n\t\tlet ss_base = self.vcpu.read_vmcs(VMCS_GUEST_SS_BASE).unwrap();\n\n\t\tlet ss_ar = self.vcpu.read_vmcs(VMCS_GUEST_SS_AR).unwrap();\n\n\t\tlet ds_limit = self.vcpu.read_vmcs(VMCS_GUEST_DS_LIMIT).unwrap();\n\n\t\tlet ds_base = self.vcpu.read_vmcs(VMCS_GUEST_DS_BASE).unwrap();\n\n\t\tlet ds_ar = self.vcpu.read_vmcs(VMCS_GUEST_DS_AR).unwrap();\n\n\t\tlet es_limit = self.vcpu.read_vmcs(VMCS_GUEST_ES_LIMIT).unwrap();\n\n\t\tlet es_base = self.vcpu.read_vmcs(VMCS_GUEST_ES_BASE).unwrap();\n\n\t\tlet es_ar = self.vcpu.read_vmcs(VMCS_GUEST_ES_AR).unwrap();\n\n\t\tlet fs_limit = self.vcpu.read_vmcs(VMCS_GUEST_FS_LIMIT).unwrap();\n\n\t\tlet fs_base = self.vcpu.read_vmcs(VMCS_GUEST_FS_BASE).unwrap();\n\n\t\tlet fs_ar = self.vcpu.read_vmcs(VMCS_GUEST_FS_AR).unwrap();\n\n\t\tlet gs_limit = self.vcpu.read_vmcs(VMCS_GUEST_GS_LIMIT).unwrap();\n\n\t\tlet gs_base = self.vcpu.read_vmcs(VMCS_GUEST_GS_BASE).unwrap();\n\n\t\tlet gs_ar = self.vcpu.read_vmcs(VMCS_GUEST_GS_AR).unwrap();\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 93, "score": 28623.166684360996 }, { "content": "\t\t\t\t\trcx |= 1 << 31;\n\n\t\t\t\t}\n\n\n\n\t\t\t\tif extended_features {\n\n\t\t\t\t\t// disable SGX support\n\n\t\t\t\t\trbx &= !(1 << 2);\n\n\t\t\t\t}\n\n\n\n\t\t\t\tself.vcpu.write_register(&Register::RAX, rax)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RBX, rbx)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RCX, rcx)?;\n\n\t\t\t\tself.vcpu.write_register(&Register::RDX, rdx)?;\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tself.vcpu.write_register(&Register::RIP, rip + len)?;\n\n\n\n\t\tOk(())\n\n\t}\n\n\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 94, "score": 28623.16351686856 }, { "content": "\t\tlet cr3 = self.vcpu.read_register(&Register::CR3).unwrap();\n\n\t\tlet cr4 = self.vcpu.read_register(&Register::CR4).unwrap();\n\n\t\tlet efer = self.vcpu.read_vmcs(VMCS_GUEST_IA32_EFER).unwrap();\n\n\n\n\t\tprintln!(\n\n\t\t\t\"cr0: {:016x} cr2: {:016x} cr3: {:016x}\\ncr4: {:016x} efer: {:016x}\",\n\n\t\t\tcr0, cr2, cr3, cr4, efer\n\n\t\t);\n\n\n\n\t\tprintln!(\"\\nSegment registers:\");\n\n\t\tprintln!(\"------------------\");\n\n\t\tprintln!(\"register selector base limit type p dpl db s l g avl\");\n\n\n\n\t\tlet cs = self.vcpu.read_register(&Register::CS).unwrap();\n\n\t\tlet ds = self.vcpu.read_register(&Register::DS).unwrap();\n\n\t\tlet es = self.vcpu.read_register(&Register::ES).unwrap();\n\n\t\tlet ss = self.vcpu.read_register(&Register::SS).unwrap();\n\n\t\tlet fs = self.vcpu.read_register(&Register::FS).unwrap();\n\n\t\tlet gs = self.vcpu.read_register(&Register::GS).unwrap();\n\n\t\tlet tr = self.vcpu.read_register(&Register::TR).unwrap();\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 95, "score": 28623.1419893358 }, { "content": "\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RDI)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tOperandType::REG_ESI => {\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RSI)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tOperandType::REG_EBP => {\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RBP)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tOperandType::REG_EAX => {\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RAX)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tOperandType::REG_EBX => {\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RBX)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tOperandType::REG_ECX => {\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RCX)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tOperandType::REG_EDX => {\n\n\t\t\t\t\t\t\t\tself.vcpu.read_register(&Register::RDX)? & 0xFFFFFFFF\n\n\t\t\t\t\t\t\t}\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 96, "score": 28623.10229832985 }, { "content": "\t\t\t\tlet flags = self.vcpu.read_register(&Register::RFLAGS)?;\n\n\t\t\t\tlet ignore_irq = self.vcpu.read_vmcs(VMCS_GUEST_IGNORE_IRQ)?;\n\n\n\n\t\t\t\tif ignore_irq & 1 != 1\n\n\t\t\t\t\t&& irq_info & (1 << 31) != (1 << 31)\n\n\t\t\t\t\t&& flags & (1 << 9) == (1 << 9)\n\n\t\t\t\t{\n\n\t\t\t\t\t// deliver timer interrupt, we don't support other kind of interrupts\n\n\t\t\t\t\t// => see table 24-15 of the Intel Manual\n\n\t\t\t\t\tlet info = 0x20 | (0 << 8) | (1 << 31);\n\n\t\t\t\t\tself.vcpu.write_vmcs(VMCS_CTRL_VMENTRY_IRQ_INFO, info)?;\n\n\t\t\t\t\tself.extint_pending = false;\n\n\t\t\t\t}\n\n\t\t\t}*/\n\n\n\n\t\t\tself.vcpu.run()?;\n\n\n\n\t\t\tlet reason = self.vcpu.read_vmcs(VMCS_RO_EXIT_REASON)? & 0xffff;\n\n\t\t\tlet rip = self.vcpu.read_register(&Register::RIP)?;\n\n\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 97, "score": 28623.024656218135 }, { "content": "\t\t\t.write_vmcs(VMCS_CTRL_VMENTRY_CONTROLS, *CAP_ENTRY)?;\n\n\t\tdebug!(\n\n\t\t\t\"VM-Entry Controls 0x{:x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_VMENTRY_CONTROLS)?\n\n\t\t);\n\n\t\tself.vcpu.write_vmcs(VMCS_CTRL_VMEXIT_CONTROLS, *CAP_EXIT)?;\n\n\t\tdebug!(\n\n\t\t\t\"VM-Exit Controls 0x{:x}\",\n\n\t\t\tself.vcpu.read_vmcs(VMCS_CTRL_VMEXIT_CONTROLS)?\n\n\t\t);\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn emulate_cpuid(&mut self, rip: u64) -> HypervisorResult<()> {\n\n\t\tlet len = self.vcpu.read_vmcs(VMCS_RO_VMEXIT_INSTR_LEN)?;\n\n\t\tlet rax = self.vcpu.read_register(&Register::RAX)?;\n\n\t\tlet rcx = self.vcpu.read_register(&Register::RCX)?;\n\n\n\n\t\tmatch rax {\n", "file_path": "src/macos/x86_64/vcpu.rs", "rank": 99, "score": 28622.920858584326 } ]
Rust
gcode/src/words.rs
Michael-F-Bryan/gcode-rs
3cfd2fe1787fcd234bf135bbc7250aa1b5b67ca6
use crate::{ lexer::{Lexer, Token, TokenType}, Comment, Span, }; use core::fmt::{self, Display, Formatter}; #[derive(Debug, Copy, Clone, PartialEq)] #[cfg_attr( feature = "serde-1", derive(serde_derive::Serialize, serde_derive::Deserialize) )] #[repr(C)] pub struct Word { pub letter: char, pub value: f32, pub span: Span, } impl Word { pub fn new(letter: char, value: f32, span: Span) -> Self { Word { letter, value, span, } } } impl Display for Word { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{}{}", self.letter, self.value) } } #[derive(Debug, Copy, Clone, PartialEq)] pub(crate) enum Atom<'input> { Word(Word), Comment(Comment<'input>), BrokenWord(Token<'input>), Unknown(Token<'input>), } impl<'input> Atom<'input> { pub(crate) fn span(&self) -> Span { match self { Atom::Word(word) => word.span, Atom::Comment(comment) => comment.span, Atom::Unknown(token) | Atom::BrokenWord(token) => token.span, } } } #[derive(Debug, Clone, PartialEq)] pub(crate) struct WordsOrComments<'input, I> { tokens: I, last_letter: Option<Token<'input>>, } impl<'input, I> WordsOrComments<'input, I> where I: Iterator<Item = Token<'input>>, { pub(crate) fn new(tokens: I) -> Self { WordsOrComments { tokens, last_letter: None, } } } impl<'input, I> Iterator for WordsOrComments<'input, I> where I: Iterator<Item = Token<'input>>, { type Item = Atom<'input>; fn next(&mut self) -> Option<Self::Item> { while let Some(token) = self.tokens.next() { let Token { kind, value, span } = token; match kind { TokenType::Unknown => return Some(Atom::Unknown(token)), TokenType::Comment => { return Some(Atom::Comment(Comment { value, span })) }, TokenType::Letter if self.last_letter.is_none() => { self.last_letter = Some(token); }, TokenType::Number if self.last_letter.is_some() => { let letter_token = self.last_letter.take().unwrap(); let span = letter_token.span.merge(span); debug_assert_eq!(letter_token.value.len(), 1); let letter = letter_token.value.chars().next().unwrap(); let value = value.parse().expect(""); return Some(Atom::Word(Word { letter, value, span, })); }, _ => return Some(Atom::BrokenWord(token)), } } self.last_letter.take().map(Atom::BrokenWord) } } impl<'input> From<&'input str> for WordsOrComments<'input, Lexer<'input>> { fn from(other: &'input str) -> WordsOrComments<'input, Lexer<'input>> { WordsOrComments::new(Lexer::new(other)) } } #[cfg(test)] mod tests { use super::*; use crate::lexer::Lexer; #[test] fn pass_comments_through() { let mut words = WordsOrComments::new(Lexer::new("(this is a comment) 3.14")); let got = words.next().unwrap(); let comment = "(this is a comment)"; let expected = Atom::Comment(Comment { value: comment, span: Span { start: 0, end: comment.len(), line: 0, }, }); assert_eq!(got, expected); } #[test] fn pass_garbage_through() { let text = "!@#$ *"; let mut words = WordsOrComments::new(Lexer::new(text)); let got = words.next().unwrap(); let expected = Atom::Unknown(Token { value: text, kind: TokenType::Unknown, span: Span { start: 0, end: text.len(), line: 0, }, }); assert_eq!(got, expected); } #[test] fn numbers_are_garbage_if_they_dont_have_a_letter_in_front() { let text = "3.14 ()"; let mut words = WordsOrComments::new(Lexer::new(text)); let got = words.next().unwrap(); let expected = Atom::BrokenWord(Token { value: "3.14", kind: TokenType::Number, span: Span { start: 0, end: 4, line: 0, }, }); assert_eq!(got, expected); } #[test] fn recognise_a_valid_word() { let text = "G90"; let mut words = WordsOrComments::new(Lexer::new(text)); let got = words.next().unwrap(); let expected = Atom::Word(Word { letter: 'G', value: 90.0, span: Span { start: 0, end: text.len(), line: 0, }, }); assert_eq!(got, expected); } }
use crate::{ lexer::{Lexer, Token, TokenType}, Comment, Span, }; use core::fmt::{self, Display, Formatter}; #[derive(Debug, Copy, Clone, PartialEq)] #[cfg_attr( feature = "serde-1", derive(serde_derive::Serialize, serde_derive::Deserialize) )] #[repr(C)] pub struct Word { pub letter: char, pub value: f32, pub span: Span, } impl Word { pub fn new(letter: char, value: f32, span: Span) -> Self { Word { letter, value, span, } } } impl Display for Word { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { write!(f, "{}{}", self.letter, self.value) } } #[derive(Debug, Copy, Clone, PartialEq)] pub(crate) enum Atom<'input> { Word(Word), Comment(Comment<'input>), BrokenWord(Token<'input>), Unknown(Token<'input>), } impl<'input> Atom<'input> { pub(crate) fn span(&self) -> Span { match self { Atom::Word(word) => word.span, Atom::Comment(comment) => comment.span, Atom::Unknown(token) | Atom::BrokenWord(token) => token.span, } } } #[derive(Debug, Clone, PartialEq)] pub(crate) struct WordsOrComments<'input, I> { tokens: I, last_letter: Option<Token<'input>>, } impl<'input, I> WordsOrComments<'input, I> where I: Iterator<Item = Token<'input>>, { pub(crate) fn new(tokens: I) -> Self { WordsOrComments { tokens, last_letter: None, } } } impl<'input, I> Iterator for WordsOrComments<'input, I> where I: Iterator<Item = Token<'input>>, { type Item = Atom<'input>;
() { let mut words = WordsOrComments::new(Lexer::new("(this is a comment) 3.14")); let got = words.next().unwrap(); let comment = "(this is a comment)"; let expected = Atom::Comment(Comment { value: comment, span: Span { start: 0, end: comment.len(), line: 0, }, }); assert_eq!(got, expected); } #[test] fn pass_garbage_through() { let text = "!@#$ *"; let mut words = WordsOrComments::new(Lexer::new(text)); let got = words.next().unwrap(); let expected = Atom::Unknown(Token { value: text, kind: TokenType::Unknown, span: Span { start: 0, end: text.len(), line: 0, }, }); assert_eq!(got, expected); } #[test] fn numbers_are_garbage_if_they_dont_have_a_letter_in_front() { let text = "3.14 ()"; let mut words = WordsOrComments::new(Lexer::new(text)); let got = words.next().unwrap(); let expected = Atom::BrokenWord(Token { value: "3.14", kind: TokenType::Number, span: Span { start: 0, end: 4, line: 0, }, }); assert_eq!(got, expected); } #[test] fn recognise_a_valid_word() { let text = "G90"; let mut words = WordsOrComments::new(Lexer::new(text)); let got = words.next().unwrap(); let expected = Atom::Word(Word { letter: 'G', value: 90.0, span: Span { start: 0, end: text.len(), line: 0, }, }); assert_eq!(got, expected); } }
fn next(&mut self) -> Option<Self::Item> { while let Some(token) = self.tokens.next() { let Token { kind, value, span } = token; match kind { TokenType::Unknown => return Some(Atom::Unknown(token)), TokenType::Comment => { return Some(Atom::Comment(Comment { value, span })) }, TokenType::Letter if self.last_letter.is_none() => { self.last_letter = Some(token); }, TokenType::Number if self.last_letter.is_some() => { let letter_token = self.last_letter.take().unwrap(); let span = letter_token.span.merge(span); debug_assert_eq!(letter_token.value.len(), 1); let letter = letter_token.value.chars().next().unwrap(); let value = value.parse().expect(""); return Some(Atom::Word(Word { letter, value, span, })); }, _ => return Some(Atom::BrokenWord(token)), } } self.last_letter.take().map(Atom::BrokenWord) } } impl<'input> From<&'input str> for WordsOrComments<'input, Lexer<'input>> { fn from(other: &'input str) -> WordsOrComments<'input, Lexer<'input>> { WordsOrComments::new(Lexer::new(other)) } } #[cfg(test)] mod tests { use super::*; use crate::lexer::Lexer; #[test] fn pass_comments_through
random
[ { "content": "/// Parse each [`GCode`] in some text, ignoring any errors that may occur or\n\n/// [`Comment`]s that are found.\n\n///\n\n/// This function is probably what you are looking for if you just want to read\n\n/// the [`GCode`] commands in a program. If more detailed information is needed,\n\n/// have a look at [`full_parse_with_callbacks()`].\n\npub fn parse<'input>(src: &'input str) -> impl Iterator<Item = GCode> + 'input {\n\n full_parse_with_callbacks(src, Nop).flat_map(|line| line.into_gcodes())\n\n}\n\n\n", "file_path": "gcode/src/parser.rs", "rank": 0, "score": 129934.91336416674 }, { "content": "/// Parse each [`Line`] in some text, using the provided [`Callbacks`] when a\n\n/// parse error occurs that we can recover from.\n\n///\n\n/// Unlike [`parse()`], this function will also give you access to any comments\n\n/// and line numbers that are found, plus the location of the entire [`Line`]\n\n/// in its source text.\n\npub fn full_parse_with_callbacks<'input, C: Callbacks + 'input>(\n\n src: &'input str,\n\n callbacks: C,\n\n) -> impl Iterator<Item = Line<'input>> + 'input {\n\n let tokens = Lexer::new(src);\n\n let atoms = WordsOrComments::new(tokens);\n\n Lines::new(atoms, callbacks)\n\n}\n\n\n\n/// A parser for parsing g-code programs.\n\n#[derive(Debug)]\n\npub struct Parser<'input, C, B = DefaultBuffers> {\n\n // Explicitly instantiate Lines so Parser's type parameters don't expose\n\n // internal details\n\n lines: Lines<'input, WordsOrComments<'input, Lexer<'input>>, C, B>,\n\n}\n\n\n\nimpl<'input, C, B> Parser<'input, C, B> {\n\n /// Create a new [`Parser`] from some source text and a set of\n\n /// [`Callbacks`].\n", "file_path": "gcode/src/parser.rs", "rank": 1, "score": 64761.61852542701 }, { "content": "function translateComment(gcode: wasm.Comment): Comment {\n\n const translated = {\n\n text: gcode.text,\n\n span: translateSpan(gcode.span),\n\n };\n\n\n\n gcode.free();\n\n return translated;\n", "file_path": "wasm/ts/index.ts", "rank": 2, "score": 45353.825498675586 }, { "content": "function translateSpan(span: wasm.Span): Span {\n\n const translated = {\n\n start: span.start,\n\n end: span.end,\n\n line: span.line,\n\n };\n\n return translated;\n", "file_path": "wasm/ts/index.ts", "rank": 3, "score": 45214.80804808096 }, { "content": "struct PanicOnError;\n\n\n\nimpl gcode::Callbacks for PanicOnError {\n\n fn unknown_content(&mut self, text: &str, span: Span) {\n\n panic!(\"Unknown content at {:?}: {}\", span, text);\n\n }\n\n\n\n fn gcode_buffer_overflowed(\n\n &mut self,\n\n _mnemonic: Mnemonic,\n\n _major_number: u32,\n\n _minor_number: u32,\n\n _arguments: &[Word],\n\n _span: Span,\n\n ) {\n\n panic!(\"Buffer overflow\");\n\n }\n\n\n\n fn unexpected_line_number(&mut self, line_number: f32, span: Span) {\n\n panic!(\"Unexpected line number at {:?}: {}\", span, line_number);\n", "file_path": "gcode/tests/smoke_test.rs", "rank": 4, "score": 44224.81969622376 }, { "content": "/// Callbacks used during the parsing process to indicate possible errors.\n\npub trait Callbacks {\n\n /// The parser encountered some text it wasn't able to make sense of.\n\n fn unknown_content(&mut self, _text: &str, _span: Span) {}\n\n\n\n /// The [`Buffers::Commands`] buffer had insufficient capacity when trying\n\n /// to add a [`GCode`].\n\n fn gcode_buffer_overflowed(\n\n &mut self,\n\n _mnemonic: Mnemonic,\n\n _major_number: u32,\n\n _minor_number: u32,\n\n _arguments: &[Word],\n\n _span: Span,\n\n ) {\n\n }\n\n\n\n /// The [`Buffers::Arguments`] buffer had insufficient capacity when trying\n\n /// to add a [`Word`].\n\n ///\n\n /// To aid in diagnostics, the caller is also given the [`GCode`]'s\n", "file_path": "gcode/src/callbacks.rs", "rank": 5, "score": 42649.58903681909 }, { "content": "#[test]\n\n#[ignore]\n\nfn expected_program_2_output() {\n\n // N10 T2 M3 S447 F80\n\n // N20 G0 X112 Y-2\n\n // ;N30 Z-5\n\n // N40 G41\n\n // N50 G1 X95 Y8 M8\n\n // ;N60 X32\n\n // ;N70 X5 Y15\n\n // ;N80 Y52\n\n // N90 G2 X15 Y62 I10 J0\n\n // N100 G1 X83\n\n // N110 G3 X95 Y50 I12 J0\n\n // N120 G1 Y-12\n\n // N130 G40\n\n // N140 G0 Z100 M9\n\n // ;N150 X150 Y150\n\n // N160 M30\n\n\n\n let src = include_str!(\"data/program_2.gcode\");\n\n\n", "file_path": "gcode/tests/smoke_test.rs", "rank": 6, "score": 42268.95974488934 }, { "content": "/// A set of type aliases defining the types to use when storing data.\n\npub trait Buffers<'input> {\n\n /// The [`Buffer`] used to store [`GCode`] arguments.\n\n type Arguments: Buffer<Word> + Default;\n\n /// The [`Buffer`] used to store [`GCode`]s.\n\n type Commands: Buffer<GCode<Self::Arguments>> + Default;\n\n /// The [`Buffer`] used to store [`Comment`]s.\n\n type Comments: Buffer<Comment<'input>> + Default;\n\n}\n\n\n", "file_path": "gcode/src/buffers.rs", "rank": 7, "score": 39997.94544057862 }, { "content": "/// Something which can store items sequentially in memory. This doesn't\n\n/// necessarily require dynamic memory allocation.\n\npub trait Buffer<T> {\n\n /// Try to add another item to this [`Buffer`], returning the item if there\n\n /// is no more room.\n\n fn try_push(&mut self, item: T) -> Result<(), CapacityError<T>>;\n\n\n\n /// The items currently stored in the [`Buffer`].\n\n fn as_slice(&self) -> &[T];\n\n}\n\n\n\nimpl<T, A: Array<Item = T>> Buffer<T> for ArrayVec<A> {\n\n fn try_push(&mut self, item: T) -> Result<(), CapacityError<T>> {\n\n ArrayVec::try_push(self, item).map_err(|e| CapacityError(e.element()))\n\n }\n\n\n\n fn as_slice(&self) -> &[T] { &self }\n\n}\n\n\n\n/// The smallest usable set of [`Buffers`].\n\n///\n\n/// ```rust\n", "file_path": "gcode/src/buffers.rs", "rank": 8, "score": 39994.049388880245 }, { "content": "struct DebugBuffer<'a, B, T> {\n\n buffer: &'a B,\n\n _item: PhantomData<&'a T>,\n\n}\n\n\n\nimpl<'a, T, B: Buffer<T>> DebugBuffer<'a, B, T> {\n\n fn new(buffer: &'a B) -> Self {\n\n DebugBuffer {\n\n buffer,\n\n _item: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, B, T> Debug for DebugBuffer<'a, B, T>\n\nwhere\n\n B: Buffer<T>,\n\n T: Debug,\n\n{\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n let entries =\n\n self.buffer.as_slice().iter().map(|item| item as &dyn Debug);\n\n\n\n f.debug_list().entries(entries).finish()\n\n }\n\n}\n", "file_path": "gcode/src/buffers.rs", "rank": 9, "score": 38185.06896620443 }, { "content": "#[derive(Debug)]\n\nstruct Lines<'input, I, C, B>\n\nwhere\n\n I: Iterator<Item = Atom<'input>>,\n\n{\n\n atoms: Peekable<I>,\n\n callbacks: C,\n\n last_gcode_type: Option<Word>,\n\n _buffers: PhantomData<B>,\n\n}\n\n\n\nimpl<'input, I, C, B> Lines<'input, I, C, B>\n\nwhere\n\n I: Iterator<Item = Atom<'input>>,\n\n{\n\n fn new(atoms: I, callbacks: C) -> Self {\n\n Lines {\n\n atoms: atoms.peekable(),\n\n callbacks,\n\n last_gcode_type: None,\n\n _buffers: PhantomData,\n", "file_path": "gcode/src/parser.rs", "rank": 10, "score": 37118.572326654474 }, { "content": "#[allow(dead_code)]\n\nfn handle_percent(src: &str) -> String {\n\n let pieces: Vec<&str> = src.split('%').collect();\n\n\n\n match pieces.len() {\n\n 0 => unreachable!(),\n\n 1 => src.to_string(),\n\n 2 => pieces[0].to_string(),\n\n 3 => pieces[1].to_string(),\n\n _ => panic!(),\n\n }\n\n}\n\n\n", "file_path": "gcode/tests/smoke_test.rs", "rank": 11, "score": 35696.63991449925 }, { "content": "#[allow(dead_code)]\n\nfn sanitise_input(src: &str) -> String {\n\n let mut src = src.to_string();\n\n let callbacks = [handle_percent, ignore_message_lines];\n\n\n\n for cb in &callbacks {\n\n src = cb(&src);\n\n }\n\n\n\n src\n\n}\n\n\n", "file_path": "gcode/tests/smoke_test.rs", "rank": 12, "score": 35696.63991449925 }, { "content": "#[allow(dead_code)]\n\nfn ignore_message_lines(src: &str) -> String {\n\n // \"M117 Printing...\" uses string arguments, not the normal char-float word\n\n let blacklist = [\"M117\"];\n\n\n\n src.lines()\n\n .filter(|line| blacklist.iter().all(|word| !line.contains(word)))\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\")\n\n}\n", "file_path": "gcode/tests/smoke_test.rs", "rank": 13, "score": 34888.44539705661 }, { "content": "use crate::Span;\n\n\n\n/// A comment.\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\n#[cfg_attr(\n\n feature = \"serde-1\",\n\n derive(serde_derive::Serialize, serde_derive::Deserialize)\n\n)]\n\npub struct Comment<'input> {\n\n /// The comment itself.\n\n pub value: &'input str,\n\n /// Where the comment is located in the original string.\n\n pub span: Span,\n\n}\n", "file_path": "gcode/src/comment.rs", "rank": 14, "score": 31078.26271012672 }, { "content": "use core::{\n\n cmp,\n\n fmt::{self, Debug, Formatter},\n\n ops::Range,\n\n};\n\n\n\n/// A half-open range which indicates the location of something in a body of\n\n/// text.\n\n#[derive(Copy, Clone, Eq)]\n\n#[cfg_attr(\n\n feature = \"serde-1\",\n\n derive(serde_derive::Serialize, serde_derive::Deserialize)\n\n)]\n\n#[repr(C)]\n\npub struct Span {\n\n /// The byte index corresponding to the item's start.\n\n pub start: usize,\n\n /// The index one byte past the item's end.\n\n pub end: usize,\n\n /// The (zero-based) line number.\n", "file_path": "gcode/src/span.rs", "rank": 15, "score": 30885.448255639698 }, { "content": " pub line: usize,\n\n}\n\n\n\nimpl Span {\n\n /// A placeholder [`Span`] which will be ignored by [`Span::merge()`] and\n\n /// equality checks.\n\n pub const PLACEHOLDER: Span =\n\n Span::new(usize::max_value(), usize::max_value(), usize::max_value());\n\n\n\n /// Create a new [`Span`].\n\n pub const fn new(start: usize, end: usize, line: usize) -> Self {\n\n Span { start, end, line }\n\n }\n\n\n\n /// Get the string this [`Span`] corresponds to.\n\n ///\n\n /// Passing in a different string will probably lead to... strange...\n\n /// results.\n\n pub fn get_text<'input>(&self, src: &'input str) -> Option<&'input str> {\n\n src.get(self.start..self.end)\n", "file_path": "gcode/src/span.rs", "rank": 16, "score": 30880.5573398682 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n if self.is_placeholder() {\n\n write!(f, \"<placeholder>\")\n\n } else {\n\n let Span { start, end, line } = self;\n\n\n\n f.debug_struct(\"Span\")\n\n .field(\"start\", start)\n\n .field(\"end\", end)\n\n .field(\"line\", line)\n\n .finish()\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Span {\n\n fn default() -> Span { Span::PLACEHOLDER }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "gcode/src/span.rs", "rank": 17, "score": 30880.553231417278 }, { "content": "\n\n self.start == start && self.end == end && self.line == line\n\n }\n\n}\n\n\n\nimpl PartialEq for Span {\n\n fn eq(&self, other: &Span) -> bool {\n\n let Span { start, end, line } = *other;\n\n\n\n self.is_placeholder()\n\n || other.is_placeholder()\n\n || (self.start == start && self.end == end && self.line == line)\n\n }\n\n}\n\n\n\nimpl From<Span> for Range<usize> {\n\n fn from(other: Span) -> Range<usize> { other.start..other.end }\n\n}\n\n\n\nimpl Debug for Span {\n", "file_path": "gcode/src/span.rs", "rank": 18, "score": 30873.844367770125 }, { "content": " }\n\n\n\n /// Merge two [`Span`]s, making sure [`Span::PLACEHOLDER`] spans go away.\n\n pub fn merge(self, other: Span) -> Span {\n\n if self.is_placeholder() {\n\n other\n\n } else if other.is_placeholder() {\n\n self\n\n } else {\n\n Span {\n\n start: cmp::min(self.start, other.start),\n\n end: cmp::max(self.end, other.end),\n\n line: cmp::min(self.line, other.line),\n\n }\n\n }\n\n }\n\n\n\n /// Is this a [`Span::PLACEHOLDER`]?\n\n pub fn is_placeholder(self) -> bool {\n\n let Span { start, end, line } = Span::PLACEHOLDER;\n", "file_path": "gcode/src/span.rs", "rank": 19, "score": 30873.413557023006 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn a_span_is_equal_to_itself() {\n\n let span = Span::new(1, 2, 3);\n\n\n\n assert_eq!(span, span);\n\n }\n\n\n\n #[test]\n\n fn all_spans_are_equal_to_the_placeholder() {\n\n let inputs = vec![\n\n Span::default(),\n\n Span::PLACEHOLDER,\n\n Span::new(42, 0, 0),\n\n Span::new(0, 42, 0),\n\n Span::new(0, 0, 42),\n\n ];\n\n\n\n for input in inputs {\n\n assert_eq!(input, Span::PLACEHOLDER);\n\n }\n\n }\n\n}\n", "file_path": "gcode/src/span.rs", "rank": 20, "score": 30872.712085090174 }, { "content": "function getAll<TContainer, TItem>(line: TContainer, getter: (line: TContainer, index: number) => TItem | undefined): TItem[] {\n\n const items = [];\n\n let i = 0;\n\n\n\n while (true) {\n\n const item = getter(line, i);\n\n\n\n if (item) {\n\n items.push(item);\n\n } else {\n\n break;\n\n }\n\n\n\n i++;\n\n }\n\n\n\n return items;\n", "file_path": "wasm/ts/index.ts", "rank": 31, "score": 22930.05654502095 }, { "content": "export interface Callbacks {\n\n unknown_content?(text: string, span: Span): void;\n\n\n\n gcode_buffer_overflowed?(\n\n mnemonic: string,\n\n number: number,\n\n span: Span,\n\n ): void;\n\n\n\n gcode_argument_buffer_overflowed?(\n\n mnemonic: string,\n\n number: number,\n\n argument: wasm.Word,\n\n ): void;\n\n\n\n comment_buffer_overflow?(\n\n comment: string,\n\n span: Span,\n\n ): void;\n\n\n\n unexpected_line_number?(\n\n line_number: number,\n\n span: Span,\n\n ): void;\n\n\n\n argument_without_a_command?(\n\n letter: string,\n\n value: number,\n\n span: Span,\n\n ): void;\n\n\n\n number_without_a_letter?(\n\n value: string,\n\n span: Span,\n\n ): void;\n\n\n\n letter_without_a_number?(\n\n value: string,\n\n span: Span,\n\n ): void;\n", "file_path": "wasm/ts/index.ts", "rank": 32, "score": 22930.05654502095 }, { "content": "function translateArguments(gcode: wasm.GCode): Arguments {\n\n const map: Arguments = {};\n\n\n\n for (const word of getAll(gcode, (g, i) => g.get_argument(i))) {\n\n try {\n\n map[word.letter] = word.value;\n\n } finally {\n\n word.free();\n\n }\n\n }\n\n\n\n return map;\n", "file_path": "wasm/ts/index.ts", "rank": 33, "score": 22200.877601411485 }, { "content": "function translateLine(line: wasm.Line): Line {\n\n try {\n\n return {\n\n comments: getAll(line, (l, i) => l.get_comment(i)).map(translateComment),\n\n gcodes: getAll(line, (l, i) => l.get_gcode(i)).map(translateGCode),\n\n span: line.span,\n\n };\n\n } finally {\n\n line.free();\n\n }\n", "file_path": "wasm/ts/index.ts", "rank": 34, "score": 22200.877601411485 }, { "content": "function translateGCode(gcode: wasm.GCode): GCode {\n\n const translated = {\n\n mnemonic: gcode.mnemonic,\n\n number: gcode.number,\n\n arguments: translateArguments(gcode),\n\n span: translateSpan(gcode.span),\n\n };\n\n\n\n gcode.free();\n\n return translated;\n", "file_path": "wasm/ts/index.ts", "rank": 35, "score": 21516.645328378843 }, { "content": "# gcode-rs\n\n\n\n[![Crates.io version](https://img.shields.io/crates/v/gcode.svg)](https://crates.io/crates/gcode)\n\n[![Docs](https://docs.rs/gcode/badge.svg)](https://docs.rs/gcode/)\n\n[![Build Status](https://travis-ci.org/Michael-F-Bryan/gcode-rs.svg?branch=master)](https://travis-ci.org/Michael-F-Bryan/gcode-rs)\n\n\n\nA gcode parser designed for use in `#[no_std]` environments.\n\n\n\nFor an example of the `gcode` crate in use, see \n\n[@etrombly][etrombly]'s [`gcode-yew`][gc-y].\n\n\n\n## Useful Links\n\n\n\n- [The thread that kicked this idea off][thread]\n\n- [Rendered Documentation][docs]\n\n- [NIST GCode Interpreter Spec][nist]\n\n\n\n## License\n\n\n\nThis project is licensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE_APACHE.md) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE_MIT.md) or\n\n http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\nIt is recommended to always use [cargo-crev][crev] to verify the\n\ntrustworthiness of each of your dependencies, including this one.\n\n\n\n### Contribution\n\n\n\nThe intent of this crate is to be free of soundness bugs. The developers will\n\ndo their best to avoid them, and welcome help in analyzing and fixing them.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n\n\n\n[thread]:https://users.rust-lang.org/t/g-code-interpreter/10930\n\n[docs]: https://michael-f-bryan.github.io/gcode-rs/\n\n[p3]: https://github.com/Michael-F-Bryan/gcode-rs/blob/master/tests/data/program_3.gcode\n\n[nist]: http://ws680.nist.gov/publication/get_pdf.cfm?pub_id=823374\n\n[cargo-c]: https://github.com/lu-zero/cargo-c\n\n[etrombly]: https://github.com/etrombly\n\n[gc-y]: https://github.com/etrombly/gcode-yew\n\n[crev]: https://github.com/crev-dev/cargo-crev\n", "file_path": "README.md", "rank": 36, "score": 19703.561253271968 }, { "content": "# gcode-rs\n\n\n\n[![Crates.io version](https://img.shields.io/crates/v/gcode.svg)](https://crates.io/crates/gcode)\n\n[![Docs](https://docs.rs/gcode/badge.svg)](https://docs.rs/gcode/)\n\n[![Build Status](https://travis-ci.org/Michael-F-Bryan/gcode-rs.svg?branch=master)](https://travis-ci.org/Michael-F-Bryan/gcode-rs)\n\n\n\nA gcode parser designed for use in `#[no_std]` environments.\n\n\n\nFor an example of the `gcode` crate in use, see \n\n[@etrombly][etrombly]'s [`gcode-yew`][gc-y].\n\n\n\n## Useful Links\n\n\n\n- [The thread that kicked this idea off][thread]\n\n- [Rendered Documentation][docs]\n\n- [NIST GCode Interpreter Spec][nist]\n\n\n\n## License\n\n\n\nThis project is licensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE_APACHE.md) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE_MIT.md) or\n\n http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\nIt is recommended to always use [cargo-crev][crev] to verify the\n\ntrustworthiness of each of your dependencies, including this one.\n\n\n\n### Contribution\n\n\n\nThe intent of this crate is to be free of soundness bugs. The developers will\n\ndo their best to avoid them, and welcome help in analyzing and fixing them.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n\n\n\n[thread]:https://users.rust-lang.org/t/g-code-interpreter/10930\n\n[docs]: https://michael-f-bryan.github.io/gcode-rs/\n\n[p3]: https://github.com/Michael-F-Bryan/gcode-rs/blob/master/tests/data/program_3.gcode\n\n[nist]: http://ws680.nist.gov/publication/get_pdf.cfm?pub_id=823374\n\n[cargo-c]: https://github.com/lu-zero/cargo-c\n\n[etrombly]: https://github.com/etrombly\n\n[gc-y]: https://github.com/etrombly/gcode-yew\n\n[crev]: https://github.com/crev-dev/cargo-crev\n", "file_path": "gcode/README.md", "rank": 37, "score": 19163.16260827578 }, { "content": "# gcode-rs\n\n\n\n[![Crates.io version](https://img.shields.io/crates/v/gcode.svg)](https://crates.io/crates/gcode)\n\n[![Docs](https://docs.rs/gcode/badge.svg)](https://docs.rs/gcode/)\n\n[![Build Status](https://travis-ci.org/Michael-F-Bryan/gcode-rs.svg?branch=master)](https://travis-ci.org/Michael-F-Bryan/gcode-rs)\n\n\n\nA gcode parser designed for use in `#[no_std]` environments.\n\n\n\nFor an example of the `gcode` crate in use, see \n\n[@etrombly][etrombly]'s [`gcode-yew`][gc-y].\n\n\n\n## Useful Links\n\n\n\n- [The thread that kicked this idea off][thread]\n\n- [Rendered Documentation][docs]\n\n- [NIST GCode Interpreter Spec][nist]\n\n\n\n## License\n\n\n\nThis project is licensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE_APACHE.md) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE_MIT.md) or\n\n http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\nIt is recommended to always use [cargo-crev][crev] to verify the\n\ntrustworthiness of each of your dependencies, including this one.\n\n\n\n### Contribution\n\n\n\nThe intent of this crate is to be free of soundness bugs. The developers will\n\ndo their best to avoid them, and welcome help in analyzing and fixing them.\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n\n\n\n[thread]:https://users.rust-lang.org/t/g-code-interpreter/10930\n\n[docs]: https://michael-f-bryan.github.io/gcode-rs/\n\n[p3]: https://github.com/Michael-F-Bryan/gcode-rs/blob/master/tests/data/program_3.gcode\n\n[nist]: http://ws680.nist.gov/publication/get_pdf.cfm?pub_id=823374\n\n[cargo-c]: https://github.com/lu-zero/cargo-c\n\n[etrombly]: https://github.com/etrombly\n\n[gc-y]: https://github.com/etrombly/gcode-yew\n\n[crev]: https://github.com/crev-dev/cargo-crev\n", "file_path": "wasm/README.md", "rank": 38, "score": 19163.16260827578 }, { "content": "Copyright (c) 2020 Michael Bryan <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any\n\nperson obtaining a copy of this software and associated\n\ndocumentation files (the \"Software\"), to deal in the\n\nSoftware without restriction, including without\n\nlimitation the rights to use, copy, modify, merge,\n\npublish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following\n\nconditions:\n\n\n\nThe above copyright notice and this permission notice\n\nshall be included in all copies or substantial portions\n\nof the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\nANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\nTO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\nSHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\nDEALINGS IN THE SOFTWARE.\n", "file_path": "LICENSE_MIT.md", "rank": 39, "score": 19160.883697228877 }, { "content": " the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n\n the conditions stated in this License.\n\n\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n", "file_path": "LICENSE_APACHE.md", "rank": 40, "score": 19159.561594462695 }, { "content": " Work and such Derivative Works in Source or Object form.\n\n\n\n3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n\n stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n", "file_path": "LICENSE_APACHE.md", "rank": 41, "score": 19158.49807794667 }, { "content": " Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE_APACHE.md", "rank": 42, "score": 19158.36746824151 }, { "content": " with Licensor regarding such Contributions.\n\n\n\n6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n\n risks associated with Your exercise of permissions under this License.\n\n\n\n8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n", "file_path": "LICENSE_APACHE.md", "rank": 43, "score": 19158.206294919648 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n\n subsequently incorporated within the Work.\n\n\n\n2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n", "file_path": "LICENSE_APACHE.md", "rank": 44, "score": 19156.375002397253 }, { "content": " has been advised of the possibility of such damages.\n\n\n\n9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\nEND OF TERMS AND CONDITIONS\n", "file_path": "LICENSE_APACHE.md", "rank": 45, "score": 19155.189355606963 }, { "content": "Copyright (c) 2020 Michael Bryan <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any\n\nperson obtaining a copy of this software and associated\n\ndocumentation files (the \"Software\"), to deal in the\n\nSoftware without restriction, including without\n\nlimitation the rights to use, copy, modify, merge,\n\npublish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following\n\nconditions:\n\n\n\nThe above copyright notice and this permission notice\n\nshall be included in all copies or substantial portions\n\nof the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\nANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\nTO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\nSHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\nDEALINGS IN THE SOFTWARE.\n", "file_path": "wasm/LICENSE_MIT.md", "rank": 46, "score": 18649.347563424068 }, { "content": "Copyright (c) 2020 Michael Bryan <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any\n\nperson obtaining a copy of this software and associated\n\ndocumentation files (the \"Software\"), to deal in the\n\nSoftware without restriction, including without\n\nlimitation the rights to use, copy, modify, merge,\n\npublish, distribute, sublicense, and/or sell copies of\n\nthe Software, and to permit persons to whom the Software\n\nis furnished to do so, subject to the following\n\nconditions:\n\n\n\nThe above copyright notice and this permission notice\n\nshall be included in all copies or substantial portions\n\nof the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF\n\nANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED\n\nTO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A\n\nPARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT\n\nSHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY\n\nCLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION\n\nOF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR\n\nIN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n\nDEALINGS IN THE SOFTWARE.\n", "file_path": "gcode/LICENSE_MIT.md", "rank": 47, "score": 18649.347563424068 }, { "content": " the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n\n the conditions stated in this License.\n\n\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n", "file_path": "wasm/LICENSE_APACHE.md", "rank": 48, "score": 18648.025460657886 }, { "content": " the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n\n the conditions stated in this License.\n\n\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n", "file_path": "gcode/LICENSE_APACHE.md", "rank": 49, "score": 18648.025460657886 }, { "content": " Work and such Derivative Works in Source or Object form.\n\n\n\n3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n\n stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n", "file_path": "gcode/LICENSE_APACHE.md", "rank": 50, "score": 18646.96194414186 }, { "content": " Work and such Derivative Works in Source or Object form.\n\n\n\n3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n\n stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n", "file_path": "wasm/LICENSE_APACHE.md", "rank": 51, "score": 18646.96194414186 }, { "content": " Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "gcode/LICENSE_APACHE.md", "rank": 52, "score": 18646.8313344367 }, { "content": " Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "wasm/LICENSE_APACHE.md", "rank": 53, "score": 18646.8313344367 }, { "content": " with Licensor regarding such Contributions.\n\n\n\n6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n\n risks associated with Your exercise of permissions under this License.\n\n\n\n8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n", "file_path": "gcode/LICENSE_APACHE.md", "rank": 54, "score": 18646.67016111484 }, { "content": " with Licensor regarding such Contributions.\n\n\n\n6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n\n risks associated with Your exercise of permissions under this License.\n\n\n\n8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n", "file_path": "wasm/LICENSE_APACHE.md", "rank": 55, "score": 18646.67016111484 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n\n subsequently incorporated within the Work.\n\n\n\n2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n", "file_path": "wasm/LICENSE_APACHE.md", "rank": 56, "score": 18644.838868592444 }, { "content": " (an example is provided in the Appendix below).\n\n\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n\n form, that is based on (or derived from) the Work and for which the\n\n editorial revisions, annotations, elaborations, or other modifications\n\n represent, as a whole, an original work of authorship. For the purposes\n\n of this License, Derivative Works shall not include works that remain\n\n separable from, or merely link (or bind by name) to the interfaces of,\n\n the Work and Derivative Works thereof.\n\n\n\n \"Contribution\" shall mean any work of authorship, including\n\n the original version of the Work and any modifications or additions\n\n to that Work or Derivative Works thereof, that is intentionally\n\n submitted to Licensor for inclusion in the Work by the copyright owner\n\n or by an individual or Legal Entity authorized to submit on behalf of\n\n the copyright owner. For the purposes of this definition, \"submitted\"\n\n means any form of electronic, verbal, or written communication sent\n\n to the Licensor or its representatives, including but not limited to\n\n communication on electronic mailing lists, source code control systems,\n\n and issue tracking systems that are managed by, or on behalf of, the\n\n Licensor for the purpose of discussing and improving the Work, but\n\n excluding communication that is conspicuously marked or otherwise\n\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n\n on behalf of whom a Contribution has been received by Licensor and\n\n subsequently incorporated within the Work.\n\n\n\n2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n", "file_path": "gcode/LICENSE_APACHE.md", "rank": 57, "score": 18644.838868592444 }, { "content": "import * as wasm from \"@michael-f-bryan/gcode-wasm\";\n\n\n\nexport type Line = {\n\n gcodes: GCode[],\n\n comments: Comment[],\n\n span: Span,\n\n};\n\n\n\nexport type Comment = {\n\n text: string,\n\n span: Span,\n\n};\n\n\n\ntype Arguments = { [key: string]: number };\n\n\n\nexport type GCode = {\n\n mnemonic: string,\n\n number: number,\n\n arguments: Arguments,\n\n span: Span,\n\n};\n\n\n\nexport type Span = {\n\n start: number,\n\n end: number,\n\n line: number,\n\n}\n\n\n\nexport interface Callbacks {\n\n unknown_content?(text: string, span: Span): void;\n\n\n\n gcode_buffer_overflowed?(\n\n mnemonic: string,\n\n number: number,\n\n span: Span,\n\n ): void;\n\n\n\n gcode_argument_buffer_overflowed?(\n\n mnemonic: string,\n\n number: number,\n\n argument: wasm.Word,\n\n ): void;\n\n\n\n comment_buffer_overflow?(\n\n comment: string,\n\n span: Span,\n\n ): void;\n\n\n\n unexpected_line_number?(\n\n line_number: number,\n\n span: Span,\n\n ): void;\n\n\n\n argument_without_a_command?(\n\n letter: string,\n\n value: number,\n\n span: Span,\n\n ): void;\n\n\n\n number_without_a_letter?(\n\n value: string,\n\n span: Span,\n\n ): void;\n\n\n\n letter_without_a_number?(\n\n value: string,\n\n span: Span,\n\n ): void;\n\n}\n\n\n\nexport function* parseLines(text: string, callbacks?: Callbacks): Iterable<Line> {\n\n const parser = new wasm.Parser(text, callbacks);\n\n\n\n try {\n\n while (true) {\n\n const line = parser.next_line();\n\n\n\n if (line) {\n\n yield translateLine(line);\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n } finally {\n\n parser.free();\n\n }\n\n}\n\n\n\nexport function* parse(text: string, callbacks?: Callbacks): Iterable<GCode> {\n\n for (const line of parseLines(text, callbacks)) {\n\n for (const gcode of line.gcodes) {\n\n yield gcode;\n\n }\n\n }\n\n}\n\n\n\nfunction translateLine(line: wasm.Line): Line {\n\n try {\n\n return {\n\n comments: getAll(line, (l, i) => l.get_comment(i)).map(translateComment),\n\n gcodes: getAll(line, (l, i) => l.get_gcode(i)).map(translateGCode),\n\n span: line.span,\n\n };\n\n } finally {\n\n line.free();\n\n }\n\n}\n\n\n\nfunction translateGCode(gcode: wasm.GCode): GCode {\n\n const translated = {\n\n mnemonic: gcode.mnemonic,\n\n number: gcode.number,\n\n arguments: translateArguments(gcode),\n\n span: translateSpan(gcode.span),\n\n };\n\n\n\n gcode.free();\n\n return translated;\n\n}\n\n\n\nfunction translateArguments(gcode: wasm.GCode): Arguments {\n\n const map: Arguments = {};\n\n\n\n for (const word of getAll(gcode, (g, i) => g.get_argument(i))) {\n\n try {\n\n map[word.letter] = word.value;\n\n } finally {\n\n word.free();\n\n }\n\n }\n\n\n\n return map;\n\n}\n\n\n\nfunction translateComment(gcode: wasm.Comment): Comment {\n\n const translated = {\n\n text: gcode.text,\n\n span: translateSpan(gcode.span),\n\n };\n\n\n\n gcode.free();\n\n return translated;\n\n}\n\n\n\nfunction translateSpan(span: wasm.Span): Span {\n\n const translated = {\n\n start: span.start,\n\n end: span.end,\n\n line: span.line,\n\n };\n\n return translated;\n\n}\n\n\n\nfunction getAll<TContainer, TItem>(line: TContainer, getter: (line: TContainer, index: number) => TItem | undefined): TItem[] {\n\n const items = [];\n\n let i = 0;\n\n\n\n while (true) {\n\n const item = getter(line, i);\n\n\n\n if (item) {\n\n items.push(item);\n\n } else {\n\n break;\n\n }\n\n\n\n i++;\n\n }\n\n\n\n return items;\n\n}\n", "file_path": "wasm/ts/index.ts", "rank": 58, "score": 18643.653221802153 }, { "content": " has been advised of the possibility of such damages.\n\n\n\n9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\nEND OF TERMS AND CONDITIONS\n", "file_path": "gcode/LICENSE_APACHE.md", "rank": 59, "score": 18643.653221802153 }, { "content": " has been advised of the possibility of such damages.\n\n\n\n9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\nEND OF TERMS AND CONDITIONS\n", "file_path": "wasm/LICENSE_APACHE.md", "rank": 60, "score": 18643.653221802153 }, { "content": "module.exports = {\n\n transform: { '^.+\\\\.ts?$': 'ts-jest' },\n\n testEnvironment: 'node',\n\n testRegex: '.*\\\\.(test|spec)?\\\\.(ts|tsx)$',\n\n moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node']\n", "file_path": "wasm/jest.config.js", "rank": 61, "score": 18643.653221802153 }, { "content": "import { parse, GCode } from \"./index\";\n\n\n\ndescribe(\"gcode parsing\", () => {\n\n it(\"can parse G90\", () => {\n\n const src = \"G90\";\n\n const expected: GCode[] = [\n\n {\n\n mnemonic: \"G\",\n\n number: 90,\n\n arguments: {},\n\n span: {\n\n start: 0,\n\n end: src.length,\n\n line: 0,\n\n }\n\n },\n\n ];\n\n\n\n const got = Array.from(parse(src));\n\n\n\n expect(got).toEqual(expected);\n\n });\n\n\n\n it(\"can parse more complex items\", () => {\n\n const src = \"G01 (the x-coordinate) X50 Y (comment between Y and number) -10.0\";\n\n const expected: GCode[] = [\n\n {\n\n mnemonic: \"G\",\n\n number: 1,\n\n arguments: {\n\n X: 50,\n\n Y: -10\n\n },\n\n span: {\n\n start: 0,\n\n end: src.length,\n\n line: 0,\n\n }\n\n },\n\n ];\n\n\n\n const got = Array.from(parse(src));\n\n\n\n expect(got).toEqual(expected);\n\n });\n", "file_path": "wasm/ts/index.test.ts", "rank": 62, "score": 18158.72743717102 }, { "content": "use crate::Span;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub(crate) enum TokenType {\n\n Letter,\n\n Number,\n\n Comment,\n\n Unknown,\n\n}\n\n\n\nimpl From<char> for TokenType {\n\n fn from(c: char) -> TokenType {\n\n if c.is_ascii_alphabetic() {\n\n TokenType::Letter\n\n } else if c.is_ascii_digit() || c == '.' || c == '-' || c == '+' {\n\n TokenType::Number\n\n } else if c == '(' || c == ';' || c == ')' {\n\n TokenType::Comment\n\n } else {\n\n TokenType::Unknown\n", "file_path": "gcode/src/lexer.rs", "rank": 63, "score": 35.43416828570626 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub(crate) struct Token<'input> {\n\n pub(crate) kind: TokenType,\n\n pub(crate) value: &'input str,\n\n pub(crate) span: Span,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub(crate) struct Lexer<'input> {\n\n current_position: usize,\n\n current_line: usize,\n\n src: &'input str,\n\n}\n\n\n\nimpl<'input> Lexer<'input> {\n\n pub(crate) fn new(src: &'input str) -> Self {\n", "file_path": "gcode/src/lexer.rs", "rank": 64, "score": 27.43339039375643 }, { "content": " Span(other)\n\n }\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Word(gcode::Word);\n\n\n\n#[wasm_bindgen]\n\nimpl Word {\n\n #[wasm_bindgen(getter)]\n\n pub fn letter(&self) -> char { self.0.letter }\n\n\n\n #[wasm_bindgen(getter)]\n\n pub fn value(&self) -> f32 { self.0.value }\n\n\n\n #[wasm_bindgen(getter)]\n\n pub fn span(&self) -> Span { Span(self.0.span) }\n\n}\n\n\n", "file_path": "wasm/rust/simple_wrappers.rs", "rank": 65, "score": 26.625655103313523 }, { "content": " &mut self,\n\n letter: char,\n\n value: f32,\n\n span: Span,\n\n ) {\n\n (*self).argument_without_a_command(letter, value, span);\n\n }\n\n\n\n fn number_without_a_letter(&mut self, value: &str, span: Span) {\n\n (*self).number_without_a_letter(value, span);\n\n }\n\n\n\n fn letter_without_a_number(&mut self, value: &str, span: Span) {\n\n (*self).letter_without_a_number(value, span);\n\n }\n\n}\n\n\n\n/// A set of callbacks that ignore any errors that occur.\n\n#[derive(Debug, Copy, Clone, PartialEq, Default)]\n\npub struct Nop;\n\n\n\nimpl Callbacks for Nop {}\n", "file_path": "gcode/src/callbacks.rs", "rank": 66, "score": 26.599268963792625 }, { "content": " })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn tokenize_letter(&mut self) -> Option<Token<'input>> {\n\n let c = self.rest().chars().next()?;\n\n let start = self.current_position;\n\n\n\n if c.is_ascii_alphabetic() {\n\n self.current_position += 1;\n\n Some(Token {\n\n kind: TokenType::Letter,\n\n value: &self.src[start..=start],\n\n span: Span {\n\n start,\n\n end: start + 1,\n\n line: self.current_line,\n\n },\n", "file_path": "gcode/src/lexer.rs", "rank": 67, "score": 23.187745484794434 }, { "content": "use crate::{\n\n buffers::{self, Buffer, Buffers, CapacityError, DefaultBuffers},\n\n Comment, GCode, Span, Word,\n\n};\n\nuse core::fmt::{self, Debug, Formatter};\n\n\n\n/// A single line, possibly containing some [`Comment`]s or [`GCode`]s.\n\n#[derive(Clone, PartialEq)]\n\n#[cfg_attr(\n\n feature = \"serde-1\",\n\n derive(serde_derive::Serialize, serde_derive::Deserialize)\n\n)]\n\npub struct Line<'input, B: Buffers<'input> = DefaultBuffers> {\n\n gcodes: B::Commands,\n\n comments: B::Comments,\n\n line_number: Option<Word>,\n\n span: Span,\n\n}\n\n\n\nimpl<'input, B> Debug for Line<'input, B>\n", "file_path": "gcode/src/line.rs", "rank": 68, "score": 23.180940546210007 }, { "content": " &mut self,\n\n _letter: char,\n\n _value: f32,\n\n _span: Span,\n\n ) {\n\n }\n\n\n\n /// A [`Word`]'s number was encountered without an accompanying letter.\n\n fn number_without_a_letter(&mut self, _value: &str, _span: Span) {}\n\n\n\n /// A [`Word`]'s letter was encountered without an accompanying number.\n\n fn letter_without_a_number(&mut self, _value: &str, _span: Span) {}\n\n}\n\n\n\nimpl<'a, C: Callbacks + ?Sized> Callbacks for &'a mut C {\n\n fn unknown_content(&mut self, text: &str, span: Span) {\n\n (*self).unknown_content(text, span);\n\n }\n\n\n\n fn gcode_buffer_overflowed(\n", "file_path": "gcode/src/callbacks.rs", "rank": 69, "score": 22.929400547962274 }, { "content": "\n\n #[derive(Debug, Copy, Clone, PartialEq)]\n\n enum BigBuffers {}\n\n\n\n impl<'input> Buffers<'input> for BigBuffers {\n\n type Arguments = ArrayVec<[Word; 16]>;\n\n type Commands = ArrayVec<[GCode<Self::Arguments>; 16]>;\n\n type Comments = ArrayVec<[Comment<'input>; 16]>;\n\n }\n\n\n\n fn parse(\n\n src: &str,\n\n ) -> Lines<'_, impl Iterator<Item = Atom<'_>>, Nop, BigBuffers> {\n\n let tokens = Lexer::new(src);\n\n let atoms = WordsOrComments::new(tokens);\n\n Lines::new(atoms, Nop)\n\n }\n\n\n\n #[test]\n\n fn we_can_parse_a_comment() {\n", "file_path": "gcode/src/parser.rs", "rank": 70, "score": 22.693010090159074 }, { "content": "//! Internals for the `@michael-f-bryan/gcode` package. Not intended for public\n\n//! use.\n\n\n\nmod callbacks;\n\nmod parser;\n\nmod simple_wrappers;\n\n\n\npub use callbacks::JavaScriptCallbacks;\n\npub use parser::Parser;\n\npub use simple_wrappers::{Comment, GCode, Line, Span, Word};\n\n\n\nuse gcode::Mnemonic;\n\n\n\npub(crate) fn mnemonic_letter(m: Mnemonic) -> char {\n\n match m {\n\n Mnemonic::General => 'G',\n\n Mnemonic::Miscellaneous => 'M',\n\n Mnemonic::ProgramNumber => 'O',\n\n Mnemonic::ToolChange => 'T',\n\n }\n\n}", "file_path": "wasm/rust/lib.rs", "rank": 71, "score": 21.608506646800148 }, { "content": " pub fn new(src: &'input str, callbacks: C) -> Self {\n\n let tokens = Lexer::new(src);\n\n let atoms = WordsOrComments::new(tokens);\n\n let lines = Lines::new(atoms, callbacks);\n\n Parser { lines }\n\n }\n\n}\n\n\n\nimpl<'input, B> From<&'input str> for Parser<'input, Nop, B> {\n\n fn from(src: &'input str) -> Self { Parser::new(src, Nop) }\n\n}\n\n\n\nimpl<'input, C: Callbacks, B: Buffers<'input>> Iterator\n\n for Parser<'input, C, B>\n\n{\n\n type Item = Line<'input, B>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> { self.lines.next() }\n\n}\n\n\n", "file_path": "gcode/src/parser.rs", "rank": 72, "score": 21.55350904879243 }, { "content": " fn peek(&self) -> Option<TokenType> {\n\n self.rest().chars().next().map(TokenType::from)\n\n }\n\n}\n\n\n\nimpl<'input> From<&'input str> for Lexer<'input> {\n\n fn from(other: &'input str) -> Lexer<'input> { Lexer::new(other) }\n\n}\n\n\n\nimpl<'input> Iterator for Lexer<'input> {\n\n type Item = Token<'input>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n const MSG: &str =\n\n \"This should be unreachable, we've already done a bounds check\";\n\n self.skip_whitespace();\n\n\n\n let start = self.current_position;\n\n let line = self.current_line;\n\n\n", "file_path": "gcode/src/lexer.rs", "rank": 73, "score": 21.425241525857846 }, { "content": " while let Some(kind) = self.peek() {\n\n if kind != TokenType::Unknown && self.current_position != start {\n\n // we've finished processing some garbage\n\n let end = self.current_position;\n\n return Some(Token {\n\n kind: TokenType::Unknown,\n\n value: &self.src[start..end],\n\n span: Span::new(start, end, line),\n\n });\n\n }\n\n\n\n match kind {\n\n TokenType::Comment => {\n\n return Some(self.tokenize_comment().expect(MSG))\n\n },\n\n TokenType::Letter => {\n\n return Some(self.tokenize_letter().expect(MSG))\n\n },\n\n TokenType::Number => {\n\n return Some(self.tokenize_number().expect(MSG))\n", "file_path": "gcode/src/lexer.rs", "rank": 74, "score": 21.333326229473034 }, { "content": " fn as_slice(&self) -> &[T] { &self }\n\n }\n\n}\n\n\n\n/// An error returned when [`Buffer::try_push()`] fails.\n\n///\n\n/// When a [`Buffer`] can't add an item, it will use [`CapacityError`] to pass\n\n/// the original item back to the caller.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct CapacityError<T>(pub T);\n\n\n\nimpl<T: Debug> Display for CapacityError<T> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n write!(f, \"insufficient capacity\")\n\n }\n\n}\n\n\n\nwith_std! {\n\n impl<T: Debug> std::error::Error for CapacityError<T> {}\n\n}\n", "file_path": "gcode/src/buffers.rs", "rank": 75, "score": 21.26944249931261 }, { "content": "use crate::{\n\n buffers::{Buffer, CapacityError, DefaultArguments},\n\n Span, Word,\n\n};\n\nuse core::fmt::{self, Debug, Display, Formatter};\n\n\n\n/// The general category for a [`GCode`].\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\n#[cfg_attr(\n\n feature = \"serde-1\",\n\n derive(serde_derive::Serialize, serde_derive::Deserialize)\n\n)]\n\n#[repr(C)]\n\npub enum Mnemonic {\n\n /// Preparatory commands, often telling the controller what kind of motion\n\n /// or offset is desired.\n\n General,\n\n /// Auxilliary commands.\n\n Miscellaneous,\n\n /// Used to give the current program a unique \"name\".\n", "file_path": "gcode/src/gcode.rs", "rank": 76, "score": 21.147960316944598 }, { "content": " },\n\n }\n\n }\n\n\n\n fn handle_broken_word(&mut self, token: Token<'_>) {\n\n if token.kind == TokenType::Letter {\n\n self.callbacks\n\n .letter_without_a_number(token.value, token.span);\n\n } else {\n\n self.callbacks\n\n .number_without_a_letter(token.value, token.span);\n\n }\n\n }\n\n\n\n fn on_arg_push_error(&mut self, gcode: &GCode<B::Arguments>, arg: Word) {\n\n self.callbacks.gcode_argument_buffer_overflowed(\n\n gcode.mnemonic(),\n\n gcode.major_number(),\n\n gcode.minor_number(),\n\n arg,\n", "file_path": "gcode/src/parser.rs", "rank": 77, "score": 20.840640605191982 }, { "content": " &self.src[self.current_position..]\n\n }\n\n }\n\n\n\n fn skip_whitespace(&mut self) { let _ = self.chomp(char::is_whitespace); }\n\n\n\n fn tokenize_comment(&mut self) -> Option<Token<'input>> {\n\n let start = self.current_position;\n\n let line = self.current_line;\n\n\n\n if self.rest().starts_with(';') {\n\n // the comment is every character from ';' to '\\n' or EOF\n\n let comment = self.chomp(|c| c != '\\n').unwrap_or(\"\");\n\n let end = self.current_position;\n\n\n\n Some(Token {\n\n kind: TokenType::Comment,\n\n value: comment,\n\n span: Span { start, end, line },\n\n })\n", "file_path": "gcode/src/lexer.rs", "rank": 78, "score": 20.616828348772536 }, { "content": " /// A [`Buffers`] implementation which uses [`std::vec::Vec`] for storing items.\n\n ///\n\n /// In terms of memory usage, this has the potential to use a lot less overall\n\n /// than something like [`SmallFixedBuffers`] because we've traded deterministic\n\n /// memory usage for only allocating memory when it is required.\n\n #[derive(Debug, Copy, Clone, PartialEq)]\n\n pub enum VecBuffers {}\n\n\n\n impl<'input> Buffers<'input> for VecBuffers {\n\n type Arguments = DefaultArguments;\n\n type Commands = Vec<GCode<Self::Arguments>>;\n\n type Comments = Vec<Comment<'input>>;\n\n }\n\n\n\n impl<T> Buffer<T> for Vec<T> {\n\n fn try_push(&mut self, item: T) -> Result<(), CapacityError<T>> {\n\n self.push(item);\n\n Ok(())\n\n }\n\n\n", "file_path": "gcode/src/buffers.rs", "rank": 79, "score": 20.2316139934304 }, { "content": " if arg.letter.to_ascii_lowercase() == letter {\n\n return Some(arg.value);\n\n }\n\n }\n\n\n\n None\n\n }\n\n}\n\n\n\nimpl<A: Buffer<Word>> Extend<Word> for GCode<A> {\n\n fn extend<I: IntoIterator<Item = Word>>(&mut self, words: I) {\n\n for word in words {\n\n if self.push_argument(word).is_err() {\n\n // we can't add any more arguments\n\n return;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "gcode/src/gcode.rs", "rank": 80, "score": 19.973971793637833 }, { "content": "\n\n#[wasm_bindgen]\n\nimpl Comment {\n\n #[wasm_bindgen(getter)]\n\n pub fn text(&self) -> JsValue { JsValue::from_str(&self.text) }\n\n}\n\n\n\nimpl<'a> From<gcode::Comment<'a>> for Comment {\n\n fn from(other: gcode::Comment<'a>) -> Self {\n\n Comment {\n\n text: other.value.to_string(),\n\n span: Span(other.span),\n\n }\n\n }\n\n}", "file_path": "wasm/rust/simple_wrappers.rs", "rank": 81, "score": 19.854375142796023 }, { "content": " }\n\n\n\n match self.atoms.next().expect(\"unreachable\") {\n\n Atom::Unknown(token) => {\n\n self.callbacks.unknown_content(token.value, token.span)\n\n },\n\n Atom::Comment(comment) => {\n\n if let Err(e) = line.push_comment(comment) {\n\n self.on_comment_push_error(e.0);\n\n }\n\n },\n\n // line numbers are annoying, so handle them separately\n\n Atom::Word(word) if word.letter.to_ascii_lowercase() == 'n' => {\n\n self.handle_line_number(\n\n word,\n\n &mut line,\n\n temp_gcode.is_some(),\n\n );\n\n },\n\n Atom::Word(word) => {\n", "file_path": "gcode/src/parser.rs", "rank": 82, "score": 19.800429030096858 }, { "content": " match self.last_gcode_type {\n\n Some(ty) => {\n\n let mut new_gcode = GCode::new_with_argument_buffer(\n\n Mnemonic::for_letter(ty.letter).unwrap(),\n\n ty.value,\n\n ty.span,\n\n B::Arguments::default(),\n\n );\n\n if let Err(e) = new_gcode.push_argument(word) {\n\n self.on_arg_push_error(&new_gcode, e.0);\n\n }\n\n *temp_gcode = Some(new_gcode);\n\n },\n\n // oh well, you can't say we didn't try...\n\n None => {\n\n self.callbacks.argument_without_a_command(\n\n word.letter,\n\n word.value,\n\n word.span,\n\n );\n", "file_path": "gcode/src/parser.rs", "rank": 83, "score": 19.619408470585224 }, { "content": "#[derive(Debug)]\n\npub struct GCode(gcode::GCode);\n\n\n\n#[wasm_bindgen]\n\nimpl GCode {\n\n #[wasm_bindgen(getter)]\n\n pub fn mnemonic(&self) -> char { crate::mnemonic_letter(self.0.mnemonic()) }\n\n\n\n #[wasm_bindgen(getter)]\n\n pub fn number(&self) -> f32 {\n\n self.0.major_number() as f32 + (self.0.minor_number() as f32) / 10.0\n\n }\n\n\n\n #[wasm_bindgen(getter)]\n\n pub fn span(&self) -> Span {\n\n self.0.span().into()\n\n }\n\n\n\n pub fn num_arguments(&self) -> usize {\n\n self.0.arguments().len()\n", "file_path": "wasm/rust/simple_wrappers.rs", "rank": 84, "score": 19.54672807973178 }, { "content": " fn comment_buffer_overflow(&mut self, comment: gcode::Comment) {\n\n JavaScriptCallbacks::comment_buffer_overflow(\n\n self,\n\n comment.value,\n\n comment.span.into(),\n\n );\n\n }\n\n\n\n fn unexpected_line_number(&mut self, line_number: f32, span: gcode::Span) {\n\n JavaScriptCallbacks::unexpected_line_number(\n\n self,\n\n line_number,\n\n span.into(),\n\n );\n\n }\n\n\n\n fn argument_without_a_command(\n\n &mut self,\n\n letter: char,\n\n value: f32,\n", "file_path": "wasm/rust/callbacks.rs", "rank": 85, "score": 19.43208894193832 }, { "content": "use wasm_bindgen::prelude::{wasm_bindgen, JsValue};\n\n\n\n#[wasm_bindgen]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Span(gcode::Span);\n\n\n\n#[wasm_bindgen]\n\nimpl Span {\n\n #[wasm_bindgen(getter)]\n\n pub fn start(&self) -> usize { self.0.start }\n\n\n\n #[wasm_bindgen(getter)]\n\n pub fn end(&self) -> usize { self.0.end }\n\n\n\n #[wasm_bindgen(getter)]\n\n pub fn line(&self) -> usize { self.0.line }\n\n}\n\n\n\nimpl From<gcode::Span> for Span {\n\n fn from(other: gcode::Span) -> Span {\n", "file_path": "wasm/rust/simple_wrappers.rs", "rank": 86, "score": 19.274283750748832 }, { "content": " }\n\n\n\n fn argument_without_a_command(\n\n &mut self,\n\n letter: char,\n\n value: f32,\n\n span: Span,\n\n ) {\n\n panic!(\n\n \"Argument without a command at {:?}: {}{}\",\n\n span, letter, value\n\n );\n\n }\n\n\n\n fn number_without_a_letter(&mut self, value: &str, span: Span) {\n\n panic!(\"Number without a letter at {:?}: {}\", span, value);\n\n }\n\n\n\n fn letter_without_a_number(&mut self, value: &str, span: Span) {\n\n panic!(\"Letter without a number at {:?}: {}\", span, value);\n\n }\n\n}\n\n\n", "file_path": "gcode/tests/smoke_test.rs", "rank": 87, "score": 19.118495023858696 }, { "content": " }\n\n self\n\n }\n\n\n\n /// Get the value for a particular argument.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// # use gcode::{GCode, Mnemonic, Span, Word};\n\n /// let gcode = GCode::new(Mnemonic::General, 1.0, Span::PLACEHOLDER)\n\n /// .with_argument(Word::new('X', 30.0, Span::PLACEHOLDER))\n\n /// .with_argument(Word::new('Y', -3.14, Span::PLACEHOLDER));\n\n ///\n\n /// assert_eq!(gcode.value_for('Y'), Some(-3.14));\n\n /// ```\n\n pub fn value_for(&self, letter: char) -> Option<f32> {\n\n let letter = letter.to_ascii_lowercase();\n\n\n\n for arg in self.arguments() {\n", "file_path": "gcode/src/gcode.rs", "rank": 88, "score": 18.79037341834489 }, { "content": "use crate::{\n\n buffers::{Buffers, DefaultBuffers},\n\n lexer::{Lexer, Token, TokenType},\n\n words::{Atom, Word, WordsOrComments},\n\n Callbacks, Comment, GCode, Line, Mnemonic, Nop,\n\n};\n\nuse core::{iter::Peekable, marker::PhantomData};\n\n\n\n/// Parse each [`GCode`] in some text, ignoring any errors that may occur or\n\n/// [`Comment`]s that are found.\n\n///\n\n/// This function is probably what you are looking for if you just want to read\n\n/// the [`GCode`] commands in a program. If more detailed information is needed,\n\n/// have a look at [`full_parse_with_callbacks()`].\n", "file_path": "gcode/src/parser.rs", "rank": 89, "score": 18.556870241386974 }, { "content": " }\n\n}\n\n\n\nimpl<A: Buffer<Word>> GCode<A> {\n\n /// Create a new [`GCode`] which uses a custom [`Buffer`].\n\n pub fn new_with_argument_buffer(\n\n mnemonic: Mnemonic,\n\n number: f32,\n\n span: Span,\n\n arguments: A,\n\n ) -> Self {\n\n GCode {\n\n mnemonic,\n\n number,\n\n span,\n\n arguments,\n\n }\n\n }\n\n\n\n /// The overall category this [`GCode`] belongs to.\n", "file_path": "gcode/src/gcode.rs", "rank": 90, "score": 18.170594955981898 }, { "content": "#[cfg_attr(\n\n feature = \"serde-1\",\n\n derive(serde_derive::Serialize, serde_derive::Deserialize)\n\n)]\n\npub struct GCode<A = DefaultArguments> {\n\n mnemonic: Mnemonic,\n\n number: f32,\n\n arguments: A,\n\n span: Span,\n\n}\n\n\n\nimpl GCode {\n\n /// Create a new [`GCode`] which uses the [`DefaultArguments`] buffer.\n\n pub fn new(mnemonic: Mnemonic, number: f32, span: Span) -> Self {\n\n GCode {\n\n mnemonic,\n\n number,\n\n span,\n\n arguments: DefaultArguments::default(),\n\n }\n", "file_path": "gcode/src/gcode.rs", "rank": 91, "score": 18.120326898164038 }, { "content": "#[macro_use]\n\nmod macros;\n\n\n\npub mod buffers;\n\nmod callbacks;\n\nmod comment;\n\nmod gcode;\n\nmod lexer;\n\nmod line;\n\nmod parser;\n\nmod span;\n\nmod words;\n\n\n\npub use crate::{\n\n callbacks::{Callbacks, Nop},\n\n comment::Comment,\n\n gcode::{GCode, Mnemonic},\n\n line::Line,\n\n parser::{full_parse_with_callbacks, parse, Parser},\n\n span::Span,\n\n words::Word,\n\n};\n", "file_path": "gcode/src/lib.rs", "rank": 92, "score": 18.110096716003905 }, { "content": "}\n\n\n\nimpl<'input, I, C, B> Iterator for Lines<'input, I, C, B>\n\nwhere\n\n I: Iterator<Item = Atom<'input>> + 'input,\n\n C: Callbacks,\n\n B: Buffers<'input>,\n\n{\n\n type Item = Line<'input, B>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let mut line = Line::default();\n\n // we need a scratch space for the gcode we're in the middle of\n\n // constructing\n\n let mut temp_gcode = None;\n\n\n\n while let Some(next_line) = self.next_line_number() {\n\n if !line.is_empty() && next_line != line.span().line {\n\n // we've started the next line\n\n break;\n", "file_path": "gcode/src/parser.rs", "rank": 93, "score": 17.585706268089993 }, { "content": "use crate::{Span, Word};\n\nuse gcode::{Callbacks, Mnemonic};\n\nuse wasm_bindgen::prelude::wasm_bindgen;\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n pub type JavaScriptCallbacks;\n\n\n\n #[wasm_bindgen(method)]\n\n fn unknown_content(this: &JavaScriptCallbacks, text: &str, span: Span);\n\n\n\n #[wasm_bindgen(method)]\n\n fn gcode_buffer_overflowed(\n\n this: &JavaScriptCallbacks,\n\n mnemonic: char,\n\n number: f32,\n\n span: Span,\n\n );\n\n\n\n #[wasm_bindgen(method)]\n", "file_path": "wasm/rust/callbacks.rs", "rank": 94, "score": 17.543572753882565 }, { "content": " comment: Comment<'input>,\n\n ) -> Result<(), CapacityError<Comment<'input>>> {\n\n let span = self.span.merge(comment.span);\n\n self.comments.try_push(comment)?;\n\n self.span = span;\n\n Ok(())\n\n }\n\n\n\n /// Does the [`Line`] contain anything at all?\n\n pub fn is_empty(&self) -> bool {\n\n self.gcodes.as_slice().is_empty()\n\n && self.comments.as_slice().is_empty()\n\n && self.line_number().is_none()\n\n }\n\n\n\n /// Try to get the line number, if there was one.\n\n pub fn line_number(&self) -> Option<Word> { self.line_number }\n\n\n\n /// Set the [`Line::line_number()`].\n\n pub fn set_line_number<W: Into<Option<Word>>>(&mut self, line_number: W) {\n", "file_path": "gcode/src/line.rs", "rank": 95, "score": 17.30060654028854 }, { "content": " kind: TokenType::Unknown,\n\n span: Span::new(0, 7, 0),\n\n };\n\n\n\n let got = lexer.next().unwrap();\n\n\n\n assert_eq!(got, expected);\n\n assert_eq!(lexer.current_position, 7);\n\n let next = lexer.next().unwrap();\n\n assert_eq!(next.value, \"x\");\n\n }\n\n\n\n #[test]\n\n fn tokenize_a_letter() {\n\n let mut lexer = Lexer::new(\"asd\\nf\");\n\n\n\n let got = lexer.next().unwrap();\n\n\n\n assert_eq!(got.value, \"a\");\n\n assert_eq!(got.kind, TokenType::Letter);\n", "file_path": "gcode/src/lexer.rs", "rank": 96, "score": 17.23993170117668 }, { "content": " assert_eq!(lexer.current_position, newline);\n\n }\n\n\n\n #[test]\n\n fn tokenize_a_parens_comment() {\n\n let mut lexer = Lexer::new(\"( this is a comment) but this is not\");\n\n let comment = \"( this is a comment)\";\n\n\n\n let got = lexer.next().unwrap();\n\n\n\n assert_eq!(got.value, comment);\n\n assert_eq!(got.kind, TokenType::Comment);\n\n assert_eq!(\n\n got.span,\n\n Span {\n\n start: 0,\n\n end: comment.len(),\n\n line: 0\n\n }\n\n );\n", "file_path": "gcode/src/lexer.rs", "rank": 97, "score": 17.084805645657372 }, { "content": " }\n\n\n\n pub fn get_argument(&self, index: usize) -> Option<Word> {\n\n self.0.arguments().get(index).copied().map(|w| Word::from(w))\n\n }\n\n}\n\n\n\nimpl From<gcode::GCode> for GCode {\n\n fn from(other: gcode::GCode) -> GCode {\n\n GCode(other)\n\n }\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(Debug)]\n\npub struct Comment {\n\n text: String,\n\n #[wasm_bindgen(readonly)]\n\n pub span: Span,\n\n}\n", "file_path": "wasm/rust/simple_wrappers.rs", "rank": 98, "score": 16.605215407281854 }, { "content": " 't' => Some(Mnemonic::ToolChange),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Mnemonic {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Mnemonic::General => write!(f, \"G\"),\n\n Mnemonic::Miscellaneous => write!(f, \"M\"),\n\n Mnemonic::ProgramNumber => write!(f, \"O\"),\n\n Mnemonic::ToolChange => write!(f, \"T\"),\n\n }\n\n }\n\n}\n\n\n\n/// The in-memory representation of a single command in the G-code language\n\n/// (e.g. `\"G01 X50.0 Y-20.0\"`).\n\n#[derive(Clone)]\n", "file_path": "gcode/src/gcode.rs", "rank": 99, "score": 16.594031037173835 } ]
Rust
src/lib.rs
JIghtuse/rs-release
e96f2441c02ed1d54ee939856fca87c9bc2b7459
#![deny(missing_docs)] use std::collections::HashMap; use std::convert::From; use std::error::Error; use std::fmt; use std::fs::File; use std::io::{BufReader, BufRead}; use std::path::Path; use std::borrow::Cow; const PATHS: [&'static str; 2] = ["/etc/os-release", "/usr/lib/os-release"]; const QUOTES: [&'static str; 2] = ["\"", "'"]; const COMMON_KEYS: [&'static str; 16] = ["ANSI_COLOR", "BUG_REPORT_URL", "BUILD_ID", "CPE_NAME", "HOME_URL", "ID", "ID_LIKE", "NAME", "PRETTY_NAME", "PRIVACY_POLICY_URL", "SUPPORT_URL", "VARIANT", "VARIANT_ID", "VERSION", "VERSION_CODENAME", "VERSION_ID"]; #[derive(Debug)] pub enum OsReleaseError { Io(std::io::Error), NoFile, ParseError, } impl PartialEq for OsReleaseError { fn eq(&self, other: &OsReleaseError) -> bool { match (self, other) { (&OsReleaseError::Io(_), &OsReleaseError::Io(_)) | (&OsReleaseError::NoFile, &OsReleaseError::NoFile) | (&OsReleaseError::ParseError, &OsReleaseError::ParseError) => true, _ => false, } } } impl fmt::Display for OsReleaseError { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { OsReleaseError::Io(ref inner) => inner.fmt(fmt), OsReleaseError::NoFile => write!(fmt, "{}", self.description()), OsReleaseError::ParseError => write!(fmt, "{}", self.description()), } } } impl Error for OsReleaseError { fn description(&self) -> &str { match *self { OsReleaseError::Io(ref err) => err.description(), OsReleaseError::NoFile => "Failed to find os-release file", OsReleaseError::ParseError => "File is malformed", } } fn cause(&self) -> Option<&Error> { match *self { OsReleaseError::Io(ref err) => Some(err), OsReleaseError::NoFile | OsReleaseError::ParseError => None, } } } impl From<std::io::Error> for OsReleaseError { fn from(err: std::io::Error) -> OsReleaseError { OsReleaseError::Io(err) } } pub type Result<T> = std::result::Result<T, OsReleaseError>; fn trim_quotes(s: &str) -> &str { if QUOTES.iter().any(|q| s.starts_with(q) && s.ends_with(q)) { &s[1..s.len() - 1] } else { s } } fn extract_variable_and_value(s: &str) -> Result<(Cow<'static, str>, String)> { if let Some(equal) = s.chars().position(|c| c == '=') { let var = &s[..equal]; let var = var.trim(); let val = &s[equal + 1..]; let val = trim_quotes(val.trim()).to_string(); if let Some(key) = COMMON_KEYS.iter().find(|&k| *k == var) { Ok((Cow::Borrowed(key), val)) } else { Ok((Cow::Owned(var.to_string()), val)) } } else { Err(OsReleaseError::ParseError) } } pub fn parse_os_release<P: AsRef<Path>>(path: P) -> Result<HashMap<Cow<'static, str>, String>> { let mut os_release = HashMap::new(); let file = try!(File::open(path)); let reader = BufReader::new(file); for line in reader.lines() { let line = try!(line); let line = line.trim(); if line.starts_with('#') || line.is_empty() { continue; } let var_val = try!(extract_variable_and_value(line)); os_release.insert(var_val.0, var_val.1); } Ok(os_release) } pub fn parse_os_release_str(data: &str) -> Result<HashMap<Cow<'static, str>, String>> { let mut os_release = HashMap::new(); for line in data.split('\n') { let line = line.trim(); if line.starts_with('#') || line.is_empty() { continue; } let var_val = try!(extract_variable_and_value(line)); os_release.insert(var_val.0, var_val.1); } Ok(os_release) } pub fn get_os_release() -> Result<HashMap<Cow<'static, str>, String>> { for file in &PATHS { if let Ok(os_release) = parse_os_release(file) { return Ok(os_release); } } Err(OsReleaseError::NoFile) }
#![deny(missing_docs)] use std::collections::HashMap; use std::convert::From; use std::error::Error; use std::fmt; use std::fs::File; use std::io::{BufReader, BufRead}; use std::path::Path; use std::borrow::Cow; const PATHS: [&'static str; 2] = ["/etc/os-release", "/usr/lib/os-release"]; const QUOTES: [&'static str; 2] = ["\"", "'"]; const COMMON_KEYS: [&'static str; 16] = ["ANSI_COLOR", "BUG_REPORT_URL", "BUILD_ID", "CPE_NAME", "HOME_URL", "ID", "ID_LIKE", "NAME", "PRETTY_NAME", "PRIVACY_POLICY_URL", "SUPPORT_URL", "VARIANT", "VARIANT_ID", "VERSION", "VERSION_CODENAME", "VERSION_ID"]; #[derive(Debug)] pub enum OsReleaseError { Io(std::io::Error), NoFile, ParseError, } impl PartialEq for OsReleaseError { fn eq(&self, other: &OsReleaseError) -> bool { match (self, other) { (&OsReleaseError::Io(_), &OsReleaseError::Io(_)) | (&OsReleaseError::NoFile, &OsReleaseError::NoFile) | (&OsReleaseError::ParseError, &OsReleaseError::ParseError) => true, _ => false, } } } impl fmt::Display for OsReleaseError { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { OsReleaseError::Io(ref inner) => inner.fmt(fmt), OsReleaseError::NoFile => write!(fmt, "{}", self.description()), OsReleaseError::ParseError => write!(fmt, "{}", self.description()), } } } impl Error for OsReleaseError { fn description(&self) -> &str { match *self { OsReleaseError::Io(ref err) => err.description(), OsReleaseError::NoFile => "Failed to find os-release file", OsReleaseError::ParseError => "File is malformed", } } fn cause(&self) -> Option<&Error> { match *self { OsReleaseError::Io(ref err) => Some(err), OsReleaseError::NoFile | OsReleaseError::ParseError => None, } } } impl From<std::io::Error> for OsReleaseError { fn from(err: std::io::Error) -> OsReleaseError { OsReleaseError::Io(err) } } pub type Result<T> = std::result::Result<T, OsReleaseError>;
fn extract_variable_and_value(s: &str) -> Result<(Cow<'static, str>, String)> { if let Some(equal) = s.chars().position(|c| c == '=') { let var = &s[..equal]; let var = var.trim(); let val = &s[equal + 1..]; let val = trim_quotes(val.trim()).to_string(); if let Some(key) = COMMON_KEYS.iter().find(|&k| *k == var) { Ok((Cow::Borrowed(key), val)) } else { Ok((Cow::Owned(var.to_string()), val)) } } else { Err(OsReleaseError::ParseError) } } pub fn parse_os_release<P: AsRef<Path>>(path: P) -> Result<HashMap<Cow<'static, str>, String>> { let mut os_release = HashMap::new(); let file = try!(File::open(path)); let reader = BufReader::new(file); for line in reader.lines() { let line = try!(line); let line = line.trim(); if line.starts_with('#') || line.is_empty() { continue; } let var_val = try!(extract_variable_and_value(line)); os_release.insert(var_val.0, var_val.1); } Ok(os_release) } pub fn parse_os_release_str(data: &str) -> Result<HashMap<Cow<'static, str>, String>> { let mut os_release = HashMap::new(); for line in data.split('\n') { let line = line.trim(); if line.starts_with('#') || line.is_empty() { continue; } let var_val = try!(extract_variable_and_value(line)); os_release.insert(var_val.0, var_val.1); } Ok(os_release) } pub fn get_os_release() -> Result<HashMap<Cow<'static, str>, String>> { for file in &PATHS { if let Ok(os_release) = parse_os_release(file) { return Ok(os_release); } } Err(OsReleaseError::NoFile) }
fn trim_quotes(s: &str) -> &str { if QUOTES.iter().any(|q| s.starts_with(q) && s.ends_with(q)) { &s[1..s.len() - 1] } else { s } }
function_block-full_function
[ { "content": "#[derive(Debug)]\n\nenum Error {\n\n UnknownOs,\n\n ReadError,\n\n}\n\n\n", "file_path": "examples/who_eats_my_hard_drive.rs", "rank": 5, "score": 70185.70182749387 }, { "content": "fn get_os_id() -> Result<String, Error> {\n\n match rs_release::get_os_release() {\n\n Err(_) => Err(Error::ReadError),\n\n Ok(mut os_release) => os_release.remove(\"ID\").ok_or(Error::UnknownOs),\n\n }\n\n}\n\n\n", "file_path": "examples/who_eats_my_hard_drive.rs", "rank": 6, "score": 60117.5735267822 }, { "content": "#[test]\n\nfn fails_on_parse_errors() {\n\n for file in &[\"tests/data/os-release-malformed-no-equal\"] {\n\n assert_eq!(Err(OsReleaseError::ParseError), parse_os_release(file));\n\n }\n\n}\n\n\n", "file_path": "tests/test_os_release.rs", "rank": 7, "score": 56109.868540700314 }, { "content": "#[test]\n\nfn fails_on_io_errors() {\n\n for file in &[\"\", \"/etc/non_existing_file\", \"/etc/shadow\"] {\n\n match parse_os_release(file) {\n\n Err(OsReleaseError::Io(_)) => {}\n\n err => panic!(\"Expected OsReleaseError::Io, but instead got {:?}\", err),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/test_os_release.rs", "rank": 8, "score": 56109.86854070031 }, { "content": "#[test]\n\nfn trims_quotes() {\n\n let path = \"tests/data/os-release-quotes-two-env\";\n\n let os_release = parse_os_release(path);\n\n assert!(os_release.is_ok());\n\n let os_release = os_release.unwrap();\n\n assert_eq!(2, os_release.len());\n\n assert_eq!(\"Fedora 24 (Workstation Edition)\", os_release[\"PRETTY_NAME\"]);\n\n assert_eq!(\"cpe:/o:fedoraproject:fedora:24\", os_release[\"CPE_NAME\"]);\n\n}\n\n\n", "file_path": "tests/test_os_release.rs", "rank": 9, "score": 41704.76867300563 }, { "content": "#[test]\n\nfn parses_from_str() {\n\n let data = r\"\n\n\n\n # comment\n\n\n\n QUOTED_NAME = 'Fedora 24 (Workstation Edition)'\n\n\n\n PRETTY_NAME = Fedora 24 (Workstation Edition)\n\n\n\nCPE_NAME= cpe:/o:fedoraproject:fedora:24 \";\n\n let os_release = parse_os_release_str(data);\n\n assert!(os_release.is_ok());\n\n let os_release = os_release.unwrap();\n\n assert_eq!(3, os_release.len());\n\n assert_eq!(\"Fedora 24 (Workstation Edition)\", os_release[\"PRETTY_NAME\"]);\n\n assert_eq!(\"Fedora 24 (Workstation Edition)\", os_release[\"QUOTED_NAME\"]);\n\n assert_eq!(\"cpe:/o:fedoraproject:fedora:24\", os_release[\"CPE_NAME\"]);\n\n\n\n let os_release_malformed = parse_os_release_str(\"SOMETHING\");\n\n assert_eq!(Err(OsReleaseError::ParseError), os_release_malformed);\n\n\n\n let os_release_empty = parse_os_release_str(\"\");\n\n assert!(os_release_empty.is_ok());\n\n let os_release_empty = os_release_empty.unwrap();\n\n assert_eq!(0, os_release_empty.len());\n\n}\n", "file_path": "tests/test_os_release.rs", "rank": 10, "score": 40913.7665756873 }, { "content": "fn main() {\n\n let mut args = env::args();\n\n\n\n let os_release = if let Some(os_release_path) = args.nth(1) {\n\n parse_os_release(os_release_path)\n\n } else {\n\n get_os_release()\n\n };\n\n\n\n match os_release {\n\n Ok(os_release) => {\n\n println!(\"Parsed os-release:\");\n\n for (k, v) in os_release {\n\n println!(\"{}={}\", k, v);\n\n }\n\n }\n\n Err(e) => println!(\"ERROR: {:?}\", e),\n\n }\n\n\n\n // You could also parse data from a string\n\n if let Ok(os_release) = parse_os_release_str(\"NAME = Fedora\") {\n\n println!(\"Parsed os-release from &str:\");\n\n for (k, v) in os_release {\n\n println!(\"{}={}\", k, v);\n\n }\n\n }\n\n}\n", "file_path": "examples/get_os_release.rs", "rank": 11, "score": 25580.978350129484 }, { "content": "fn main() {\n\n match get_os_id() {\n\n Ok(id) => {\n\n match id.as_str() {\n\n \"fedora\" => show_fedora_packages(),\n\n \"debian\" => show_debian_packages(),\n\n _ => println!(\"ERROR: {:?}\", Error::UnknownOs),\n\n }\n\n }\n\n Err(e) => println!(\"ERROR: {:?}\", e),\n\n }\n\n}\n", "file_path": "examples/who_eats_my_hard_drive.rs", "rank": 12, "score": 25580.978350129484 }, { "content": "#[test]\n\nfn parses_ok() {\n\n let path = \"tests/data/os-release-one-env\";\n\n let os_release = parse_os_release(path);\n\n assert!(os_release.is_ok());\n\n let os_release = os_release.unwrap();\n\n assert_eq!(1, os_release.len());\n\n assert_eq!(\"Fedora\", os_release[\"NAME\"]);\n\n}\n\n\n", "file_path": "tests/test_os_release.rs", "rank": 13, "score": 24994.186724800165 }, { "content": "#[test]\n\nfn ignores_comments() {\n\n let path = \"tests/data/os-release-comment\";\n\n let os_release = parse_os_release(path);\n\n assert!(os_release.is_ok());\n\n let os_release = os_release.unwrap();\n\n assert_eq!(0, os_release.len());\n\n}\n\n\n", "file_path": "tests/test_os_release.rs", "rank": 14, "score": 24994.186724800165 }, { "content": "#[test]\n\nfn trims_whitespace() {\n\n let path = \"tests/data/os-release-whitespace\";\n\n let os_release = parse_os_release(path);\n\n assert!(os_release.is_ok());\n\n let os_release = os_release.unwrap();\n\n assert_eq!(2, os_release.len());\n\n assert_eq!(\"Fedora 24 (Workstation Edition)\", os_release[\"PRETTY_NAME\"]);\n\n assert_eq!(\"cpe:/o:fedoraproject:fedora:24\", os_release[\"CPE_NAME\"]);\n\n}\n\n\n", "file_path": "tests/test_os_release.rs", "rank": 15, "score": 24994.186724800165 }, { "content": "// https://blog.tinned-software.net/show-installed-yum-packages-by-size/\n\nfn show_fedora_packages() {\n\n let mut command = Command::new(\"rpm\");\n\n\n\n command.arg(\"--query\");\n\n command.arg(\"--all\");\n\n command.arg(\"--queryformat\");\n\n command.arg(\"%10{size} - %-25{name} \\t %{version}\\n\");\n\n\n\n if let Err(e) = command.spawn() {\n\n println!(\"ERROR running rpm: {:?}\", e);\n\n }\n\n}\n\n\n", "file_path": "examples/who_eats_my_hard_drive.rs", "rank": 16, "score": 24456.51396227102 }, { "content": "// http://www.commandlinefu.com/commands/view/3842/list-your-largest-installed-packages-on-debianubuntu\n\nfn show_debian_packages() {\n\n let mut command = Command::new(\"dpkg-query\");\n\n\n\n command.arg(\"--show\");\n\n command.arg(\"--showformat\");\n\n command.arg(\"${Installed-Size}\\t${Package}\\n\");\n\n\n\n if let Err(e) = command.spawn() {\n\n println!(\"ERROR running dpkg-query: {:?}\", e);\n\n }\n\n}\n\n\n", "file_path": "examples/who_eats_my_hard_drive.rs", "rank": 17, "score": 24456.51396227102 }, { "content": "extern crate rs_release;\n\n\n\nuse rs_release::{OsReleaseError, parse_os_release, parse_os_release_str};\n\n\n\n#[test]\n", "file_path": "tests/test_os_release.rs", "rank": 22, "score": 4.201284184643783 }, { "content": "extern crate rs_release;\n\n\n\nuse rs_release::{get_os_release, parse_os_release, parse_os_release_str};\n\nuse std::env;\n\n\n", "file_path": "examples/get_os_release.rs", "rank": 23, "score": 3.2287125696874437 }, { "content": "### v0.1.7 (2017-08-04)\n\n\n\n* Implement std::error::Error for OsReleaseError\n", "file_path": "CHANGELOG.md", "rank": 25, "score": 2.4686142395539354 }, { "content": "extern crate rs_release;\n\n\n\nuse std::process::Command;\n\n\n\n#[derive(Debug)]\n", "file_path": "examples/who_eats_my_hard_drive.rs", "rank": 26, "score": 1.714393990861381 } ]
Rust
crates/tm4c129x/src/emac0/vlantg.rs
m-labs/ti2svd
30145706b658136c35c90290701de3f02a4b8ef2
#[doc = "Reader of register VLANTG"] pub type R = crate::R<u32, super::VLANTG>; #[doc = "Writer for register VLANTG"] pub type W = crate::W<u32, super::VLANTG>; #[doc = "Register VLANTG `reset()`'s with value 0"] impl crate::ResetValue for super::VLANTG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `VL`"] pub type VL_R = crate::R<u16, u16>; #[doc = "Write proxy for field `VL`"] pub struct VL_W<'a> { w: &'a mut W, } impl<'a> VL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff); self.w } } #[doc = "Reader of field `ETV`"] pub type ETV_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ETV`"] pub struct ETV_W<'a> { w: &'a mut W, } impl<'a> ETV_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `VTIM`"] pub type VTIM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `VTIM`"] pub struct VTIM_W<'a> { w: &'a mut W, } impl<'a> VTIM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Reader of field `ESVL`"] pub type ESVL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ESVL`"] pub struct ESVL_W<'a> { w: &'a mut W, } impl<'a> ESVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `VTHM`"] pub type VTHM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `VTHM`"] pub struct VTHM_W<'a> { w: &'a mut W, } impl<'a> VTHM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } impl R { #[doc = "Bits 0:15 - VLAN Tag Identifier for Receive Frames"] #[inline(always)] pub fn vl(&self) -> VL_R { VL_R::new((self.bits & 0xffff) as u16) } #[doc = "Bit 16 - Enable 12-Bit VLAN Tag Comparison"] #[inline(always)] pub fn etv(&self) -> ETV_R { ETV_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - VLAN Tag Inverse Match Enable"] #[inline(always)] pub fn vtim(&self) -> VTIM_R { VTIM_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - Enable S-VLAN"] #[inline(always)] pub fn esvl(&self) -> ESVL_R { ESVL_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - VLAN Tag Hash Table Match Enable"] #[inline(always)] pub fn vthm(&self) -> VTHM_R { VTHM_R::new(((self.bits >> 19) & 0x01) != 0) } } impl W { #[doc = "Bits 0:15 - VLAN Tag Identifier for Receive Frames"] #[inline(always)] pub fn vl(&mut self) -> VL_W { VL_W { w: self } } #[doc = "Bit 16 - Enable 12-Bit VLAN Tag Comparison"] #[inline(always)] pub fn etv(&mut self) -> ETV_W { ETV_W { w: self } } #[doc = "Bit 17 - VLAN Tag Inverse Match Enable"] #[inline(always)] pub fn vtim(&mut self) -> VTIM_W { VTIM_W { w: self } } #[doc = "Bit 18 - Enable S-VLAN"] #[inline(always)] pub fn esvl(&mut self) -> ESVL_W { ESVL_W { w: self } } #[doc = "Bit 19 - VLAN Tag Hash Table Match Enable"] #[inline(always)] pub fn vthm(&mut self) -> VTHM_W { VTHM_W { w: self } } }
#[doc = "Reader of register VLANTG"] pub type R = crate::R<u32, super::VLANTG>; #[doc = "Writer for register VLANTG"] pub type W = crate::W<u32, super::VLANTG>; #[doc = "Register VLANTG `reset()`'s with value 0"] impl crate::ResetValue for super::VLANTG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `VL`"] pub type VL_R = crate::R<u16, u16>; #[doc = "Write proxy for field `VL`"] pub struct VL_W<'a> { w: &'a mut W, } impl<'a> VL_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u16) -> &'a mut W { self.w.bits = (self.w.bits & !0xffff) | ((value as u32) & 0xffff); self.w } } #[doc = "Reader of field `ETV`"] pub type ETV_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ETV`"] pub struct ETV_W<'a> { w: &'a mut W, } impl<'a> ETV_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 16)) | (((value as u32) & 0x01) << 16); self.w } } #[doc = "Reader of field `VTIM`"] pub type VTIM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `VTIM`"] pub struct VTIM_W<'a> { w: &'a mut W, } impl<'a> VTIM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17); self.w } } #[doc = "Reader of field `ESVL`"] pub type ESVL_R = crate::R<bool, bool>; #[doc = "Write proxy for field `ESVL`"] pub struct ESVL_W<'a> { w: &'a mut W, } impl<'a> ESVL_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(alway
elf) -> ETV_R { ETV_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 17 - VLAN Tag Inverse Match Enable"] #[inline(always)] pub fn vtim(&self) -> VTIM_R { VTIM_R::new(((self.bits >> 17) & 0x01) != 0) } #[doc = "Bit 18 - Enable S-VLAN"] #[inline(always)] pub fn esvl(&self) -> ESVL_R { ESVL_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - VLAN Tag Hash Table Match Enable"] #[inline(always)] pub fn vthm(&self) -> VTHM_R { VTHM_R::new(((self.bits >> 19) & 0x01) != 0) } } impl W { #[doc = "Bits 0:15 - VLAN Tag Identifier for Receive Frames"] #[inline(always)] pub fn vl(&mut self) -> VL_W { VL_W { w: self } } #[doc = "Bit 16 - Enable 12-Bit VLAN Tag Comparison"] #[inline(always)] pub fn etv(&mut self) -> ETV_W { ETV_W { w: self } } #[doc = "Bit 17 - VLAN Tag Inverse Match Enable"] #[inline(always)] pub fn vtim(&mut self) -> VTIM_W { VTIM_W { w: self } } #[doc = "Bit 18 - Enable S-VLAN"] #[inline(always)] pub fn esvl(&mut self) -> ESVL_W { ESVL_W { w: self } } #[doc = "Bit 19 - VLAN Tag Hash Table Match Enable"] #[inline(always)] pub fn vthm(&mut self) -> VTHM_W { VTHM_W { w: self } } }
s)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18); self.w } } #[doc = "Reader of field `VTHM`"] pub type VTHM_R = crate::R<bool, bool>; #[doc = "Write proxy for field `VTHM`"] pub struct VTHM_W<'a> { w: &'a mut W, } impl<'a> VTHM_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19); self.w } } impl R { #[doc = "Bits 0:15 - VLAN Tag Identifier for Receive Frames"] #[inline(always)] pub fn vl(&self) -> VL_R { VL_R::new((self.bits & 0xffff) as u16) } #[doc = "Bit 16 - Enable 12-Bit VLAN Tag Comparison"] #[inline(always)] pub fn etv(&s
random
[ { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "crates/tm4c129x/src/generic.rs", "rank": 0, "score": 159295.56070954952 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "crates/tm4c123x/src/generic.rs", "rank": 1, "score": 159295.56070954952 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\")).unwrap().write_all(include_bytes!(\"device.x\")).unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "crates/tm4c123x/build.rs", "rank": 2, "score": 61987.665827901175 }, { "content": "fn main() {\n\n if env::var_os(\"CARGO_FEATURE_RT\").is_some() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"device.x\")).unwrap().write_all(include_bytes!(\"device.x\")).unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=device.x\");\n\n }\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "crates/tm4c129x/build.rs", "rank": 3, "score": 61987.665827901175 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "crates/tm4c123x/src/generic.rs", "rank": 4, "score": 56875.064836524 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "crates/tm4c129x/src/generic.rs", "rank": 5, "score": 56875.064836524 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "crates/tm4c123x/src/generic.rs", "rank": 6, "score": 56863.05328766474 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "crates/tm4c129x/src/generic.rs", "rank": 7, "score": 56863.05328766474 }, { "content": "#[doc = \"Reader of register BIT\"]\n\npub type R = crate::R<u32, super::BIT>;\n\n#[doc = \"Writer for register BIT\"]\n\npub type W = crate::W<u32, super::BIT>;\n\n#[doc = \"Register BIT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::BIT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BRP`\"]\n\npub type BRP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `BRP`\"]\n\npub struct BRP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BRP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/bit_.rs", "rank": 9, "score": 55627.83727210846 }, { "content": "#[doc = \"Reader of register BIT\"]\n\npub type R = crate::R<u32, super::BIT>;\n\n#[doc = \"Writer for register BIT\"]\n\npub type W = crate::W<u32, super::BIT>;\n\n#[doc = \"Register BIT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::BIT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BRP`\"]\n\npub type BRP_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `BRP`\"]\n\npub struct BRP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BRP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/bit_.rs", "rank": 10, "score": 55627.83727210846 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x3f) | ((value as u32) & 0x3f);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SJW`\"]\n\npub type SJW_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SJW`\"]\n\npub struct SJW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SJW_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);\n\n self.w\n\n }\n\n}\n", "file_path": "crates/tm4c123x/src/can0/bit_.rs", "rank": 14, "score": 55611.56714703634 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x3f) | ((value as u32) & 0x3f);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SJW`\"]\n\npub type SJW_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SJW`\"]\n\npub struct SJW_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SJW_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 6)) | (((value as u32) & 0x03) << 6);\n\n self.w\n\n }\n\n}\n", "file_path": "crates/tm4c129x/src/can0/bit_.rs", "rank": 15, "score": 55611.56714703634 }, { "content": "#[doc = \"Reader of field `TSEG1`\"]\n\npub type TSEG1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEG1`\"]\n\npub struct TSEG1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEG1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEG2`\"]\n\npub type TSEG2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEG2`\"]\n\npub struct TSEG2_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "crates/tm4c129x/src/can0/bit_.rs", "rank": 16, "score": 55610.73886001092 }, { "content": "#[doc = \"Reader of field `TSEG1`\"]\n\npub type TSEG1_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEG1`\"]\n\npub struct TSEG1_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TSEG1_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x0f << 8)) | (((value as u32) & 0x0f) << 8);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TSEG2`\"]\n\npub type TSEG2_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TSEG2`\"]\n\npub struct TSEG2_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "crates/tm4c123x/src/can0/bit_.rs", "rank": 17, "score": 55610.73886001092 }, { "content": "impl<'a> TSEG2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:5 - Baud Rate Prescaler\"]\n\n #[inline(always)]\n\n pub fn brp(&self) -> BRP_R {\n\n BRP_R::new((self.bits & 0x3f) as u8)\n\n }\n\n #[doc = \"Bits 6:7 - (Re)Synchronization Jump Width\"]\n\n #[inline(always)]\n\n pub fn sjw(&self) -> SJW_R {\n\n SJW_R::new(((self.bits >> 6) & 0x03) as u8)\n\n }\n\n #[doc = \"Bits 8:11 - Time Segment Before Sample Point\"]\n", "file_path": "crates/tm4c123x/src/can0/bit_.rs", "rank": 21, "score": 55595.56726492818 }, { "content": "impl<'a> TSEG2_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x07 << 12)) | (((value as u32) & 0x07) << 12);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:5 - Baud Rate Prescaler\"]\n\n #[inline(always)]\n\n pub fn brp(&self) -> BRP_R {\n\n BRP_R::new((self.bits & 0x3f) as u8)\n\n }\n\n #[doc = \"Bits 6:7 - (Re)Synchronization Jump Width\"]\n\n #[inline(always)]\n\n pub fn sjw(&self) -> SJW_R {\n\n SJW_R::new(((self.bits >> 6) & 0x03) as u8)\n\n }\n\n #[doc = \"Bits 8:11 - Time Segment Before Sample Point\"]\n", "file_path": "crates/tm4c129x/src/can0/bit_.rs", "rank": 22, "score": 55595.56726492818 }, { "content": "#[doc = \"Reader of register VALUE\"]\n\npub type R = crate::R<u32, super::VALUE>;\n\nimpl R {}\n", "file_path": "crates/tm4c123x/src/watchdog0/value.rs", "rank": 24, "score": 55589.710525917406 }, { "content": "#[doc = \"Reader of register VALUE\"]\n\npub type R = crate::R<u32, super::VALUE>;\n\nimpl R {}\n", "file_path": "crates/tm4c129x/src/watchdog0/value.rs", "rank": 25, "score": 55589.710525917406 }, { "content": " #[inline(always)]\n\n pub fn tseg1(&self) -> TSEG1_R {\n\n TSEG1_R::new(((self.bits >> 8) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 12:14 - Time Segment after Sample Point\"]\n\n #[inline(always)]\n\n pub fn tseg2(&self) -> TSEG2_R {\n\n TSEG2_R::new(((self.bits >> 12) & 0x07) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:5 - Baud Rate Prescaler\"]\n\n #[inline(always)]\n\n pub fn brp(&mut self) -> BRP_W {\n\n BRP_W { w: self }\n\n }\n\n #[doc = \"Bits 6:7 - (Re)Synchronization Jump Width\"]\n\n #[inline(always)]\n\n pub fn sjw(&mut self) -> SJW_W {\n\n SJW_W { w: self }\n", "file_path": "crates/tm4c129x/src/can0/bit_.rs", "rank": 27, "score": 55576.74840749567 }, { "content": " #[inline(always)]\n\n pub fn tseg1(&self) -> TSEG1_R {\n\n TSEG1_R::new(((self.bits >> 8) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bits 12:14 - Time Segment after Sample Point\"]\n\n #[inline(always)]\n\n pub fn tseg2(&self) -> TSEG2_R {\n\n TSEG2_R::new(((self.bits >> 12) & 0x07) as u8)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:5 - Baud Rate Prescaler\"]\n\n #[inline(always)]\n\n pub fn brp(&mut self) -> BRP_W {\n\n BRP_W { w: self }\n\n }\n\n #[doc = \"Bits 6:7 - (Re)Synchronization Jump Width\"]\n\n #[inline(always)]\n\n pub fn sjw(&mut self) -> SJW_W {\n\n SJW_W { w: self }\n", "file_path": "crates/tm4c123x/src/can0/bit_.rs", "rank": 28, "score": 55576.74840749567 }, { "content": " }\n\n #[doc = \"Bits 8:11 - Time Segment Before Sample Point\"]\n\n #[inline(always)]\n\n pub fn tseg1(&mut self) -> TSEG1_W {\n\n TSEG1_W { w: self }\n\n }\n\n #[doc = \"Bits 12:14 - Time Segment after Sample Point\"]\n\n #[inline(always)]\n\n pub fn tseg2(&mut self) -> TSEG2_W {\n\n TSEG2_W { w: self }\n\n }\n\n}\n", "file_path": "crates/tm4c129x/src/can0/bit_.rs", "rank": 29, "score": 55567.4237793735 }, { "content": " }\n\n #[doc = \"Bits 8:11 - Time Segment Before Sample Point\"]\n\n #[inline(always)]\n\n pub fn tseg1(&mut self) -> TSEG1_W {\n\n TSEG1_W { w: self }\n\n }\n\n #[doc = \"Bits 12:14 - Time Segment after Sample Point\"]\n\n #[inline(always)]\n\n pub fn tseg2(&mut self) -> TSEG2_W {\n\n TSEG2_W { w: self }\n\n }\n\n}\n", "file_path": "crates/tm4c123x/src/can0/bit_.rs", "rank": 30, "score": 55567.4237793735 }, { "content": "#[doc = \"Reader of register IBRD\"]\n\npub type R = crate::R<u32, super::IBRD>;\n\n#[doc = \"Writer for register IBRD\"]\n\npub type W = crate::W<u32, super::IBRD>;\n\n#[doc = \"Register IBRD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IBRD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DIVINT`\"]\n\npub type DIVINT_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DIVINT`\"]\n\npub struct DIVINT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DIVINT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/uart0/ibrd.rs", "rank": 31, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF2DA1\"]\n\npub type R = crate::R<u32, super::IF2DA1>;\n\n#[doc = \"Writer for register IF2DA1\"]\n\npub type W = crate::W<u32, super::IF2DA1>;\n\n#[doc = \"Register IF2DA1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2DA1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if2da1.rs", "rank": 32, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _3_LOAD\"]\n\npub type R = crate::R<u32, super::_3_LOAD>;\n\n#[doc = \"Writer for register _3_LOAD\"]\n\npub type W = crate::W<u32, super::_3_LOAD>;\n\n#[doc = \"Register _3_LOAD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_3_LOAD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LOAD`\"]\n\npub type LOAD_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `LOAD`\"]\n\npub struct LOAD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LOAD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_3_load.rs", "rank": 33, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _0_MINFLTPER\"]\n\npub type R = crate::R<u32, super::_0_MINFLTPER>;\n\n#[doc = \"Writer for register _0_MINFLTPER\"]\n\npub type W = crate::W<u32, super::_0_MINFLTPER>;\n\n#[doc = \"Register _0_MINFLTPER `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_0_MINFLTPER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MINFLTPER`\"]\n\npub type MINFLTPER_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MINFLTPER`\"]\n\npub struct MINFLTPER_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MINFLTPER_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_0_minfltper.rs", "rank": 34, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register RTCT\"]\n\npub type R = crate::R<u32, super::RTCT>;\n\n#[doc = \"Writer for register RTCT\"]\n\npub type W = crate::W<u32, super::RTCT>;\n\n#[doc = \"Register RTCT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RTCT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TRIM`\"]\n\npub type TRIM_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `TRIM`\"]\n\npub struct TRIM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRIM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/hib/rtct.rs", "rank": 35, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _2_CMPA\"]\n\npub type R = crate::R<u32, super::_2_CMPA>;\n\n#[doc = \"Writer for register _2_CMPA\"]\n\npub type W = crate::W<u32, super::_2_CMPA>;\n\n#[doc = \"Register _2_CMPA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_CMPA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPA`\"]\n\npub type COMPA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPA`\"]\n\npub struct COMPA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_2_cmpa.rs", "rank": 36, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register DR\"]\n\npub type R = crate::R<u32, super::DR>;\n\n#[doc = \"Writer for register DR\"]\n\npub type W = crate::W<u32, super::DR>;\n\n#[doc = \"Register DR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/ssi0/dr.rs", "rank": 37, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register RPSTD0\"]\n\npub type R = crate::R<u32, super::RPSTD0>;\n\n#[doc = \"Writer for register RPSTD0\"]\n\npub type W = crate::W<u32, super::RPSTD0>;\n\n#[doc = \"Register RPSTD0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RPSTD0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `POSTCNT`\"]\n\npub type POSTCNT_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `POSTCNT`\"]\n\npub struct POSTCNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POSTCNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/epi0/rpstd0.rs", "rank": 38, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register RSCLKCFG\"]\n\npub type R = crate::R<u32, super::RSCLKCFG>;\n\n#[doc = \"Writer for register RSCLKCFG\"]\n\npub type W = crate::W<u32, super::RSCLKCFG>;\n\n#[doc = \"Register RSCLKCFG `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RSCLKCFG {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `PSYSDIV`\"]\n\npub type PSYSDIV_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `PSYSDIV`\"]\n\npub struct PSYSDIV_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> PSYSDIV_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/sysctl/rsclkcfg.rs", "rank": 39, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register DCCMP3\"]\n\npub type R = crate::R<u32, super::DCCMP3>;\n\n#[doc = \"Writer for register DCCMP3\"]\n\npub type W = crate::W<u32, super::DCCMP3>;\n\n#[doc = \"Register DCCMP3 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP3 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/adc0/dccmp3.rs", "rank": 40, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register PLLFREQ0\"]\n\npub type R = crate::R<u32, super::PLLFREQ0>;\n\n#[doc = \"Writer for register PLLFREQ0\"]\n\npub type W = crate::W<u32, super::PLLFREQ0>;\n\n#[doc = \"Register PLLFREQ0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PLLFREQ0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MINT`\"]\n\npub type MINT_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MINT`\"]\n\npub struct MINT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MINT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/sysctl/pllfreq0.rs", "rank": 41, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register RTCSS\"]\n\npub type R = crate::R<u32, super::RTCSS>;\n\n#[doc = \"Writer for register RTCSS\"]\n\npub type W = crate::W<u32, super::RTCSS>;\n\n#[doc = \"Register RTCSS `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RTCSS {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RTCSSC`\"]\n\npub type RTCSSC_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RTCSSC`\"]\n\npub struct RTCSSC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RTCSSC_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/hib/rtcss.rs", "rank": 42, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register DCCMP6\"]\n\npub type R = crate::R<u32, super::DCCMP6>;\n\n#[doc = \"Writer for register DCCMP6\"]\n\npub type W = crate::W<u32, super::DCCMP6>;\n\n#[doc = \"Register DCCMP6 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP6 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/adc0/dccmp6.rs", "rank": 43, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _2_DBFALL\"]\n\npub type R = crate::R<u32, super::_2_DBFALL>;\n\n#[doc = \"Writer for register _2_DBFALL\"]\n\npub type W = crate::W<u32, super::_2_DBFALL>;\n\n#[doc = \"Register _2_DBFALL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_DBFALL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FALLDELAY`\"]\n\npub type FALLDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `FALLDELAY`\"]\n\npub struct FALLDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FALLDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_2_dbfall.rs", "rank": 44, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP1\"]\n\npub type R = crate::R<u32, super::DCCMP1>;\n\n#[doc = \"Writer for register DCCMP1\"]\n\npub type W = crate::W<u32, super::DCCMP1>;\n\n#[doc = \"Register DCCMP1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/adc0/dccmp1.rs", "rank": 45, "score": 84.35151285985305 }, { "content": "#[doc = \"Reader of register _0_CMPB\"]\n\npub type R = crate::R<u32, super::_0_CMPB>;\n\n#[doc = \"Writer for register _0_CMPB\"]\n\npub type W = crate::W<u32, super::_0_CMPB>;\n\n#[doc = \"Register _0_CMPB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_0_CMPB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPB`\"]\n\npub type COMPB_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPB`\"]\n\npub struct COMPB_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPB_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_0_cmpb.rs", "rank": 46, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register IF2ARB2\"]\n\npub type R = crate::R<u32, super::IF2ARB2>;\n\n#[doc = \"Writer for register IF2ARB2\"]\n\npub type W = crate::W<u32, super::IF2ARB2>;\n\n#[doc = \"Register IF2ARB2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2ARB2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ID`\"]\n\npub type ID_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `ID`\"]\n\npub struct ID_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ID_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if2arb2.rs", "rank": 47, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _0_CMPA\"]\n\npub type R = crate::R<u32, super::_0_CMPA>;\n\n#[doc = \"Writer for register _0_CMPA\"]\n\npub type W = crate::W<u32, super::_0_CMPA>;\n\n#[doc = \"Register _0_CMPA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_0_CMPA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPA`\"]\n\npub type COMPA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPA`\"]\n\npub struct COMPA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_0_cmpa.rs", "rank": 48, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register ADDR3H\"]\n\npub type R = crate::R<u32, super::ADDR3H>;\n\n#[doc = \"Writer for register ADDR3H\"]\n\npub type W = crate::W<u32, super::ADDR3H>;\n\n#[doc = \"Register ADDR3H `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::ADDR3H {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADDRHI`\"]\n\npub type ADDRHI_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `ADDRHI`\"]\n\npub struct ADDRHI_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADDRHI_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/emac0/addr3h.rs", "rank": 49, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF1ARB2\"]\n\npub type R = crate::R<u32, super::IF1ARB2>;\n\n#[doc = \"Writer for register IF1ARB2\"]\n\npub type W = crate::W<u32, super::IF1ARB2>;\n\n#[doc = \"Register IF1ARB2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF1ARB2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ID`\"]\n\npub type ID_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `ID`\"]\n\npub struct ID_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ID_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if1arb2.rs", "rank": 50, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DMATXCNT\"]\n\npub type R = crate::R<u32, super::DMATXCNT>;\n\n#[doc = \"Writer for register DMATXCNT\"]\n\npub type W = crate::W<u32, super::DMATXCNT>;\n\n#[doc = \"Register DMATXCNT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DMATXCNT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TXCNT`\"]\n\npub type TXCNT_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `TXCNT`\"]\n\npub struct TXCNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXCNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/epi0/dmatxcnt.rs", "rank": 51, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register DCCMP7\"]\n\npub type R = crate::R<u32, super::DCCMP7>;\n\n#[doc = \"Writer for register DCCMP7\"]\n\npub type W = crate::W<u32, super::DCCMP7>;\n\n#[doc = \"Register DCCMP7 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP7 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/adc0/dccmp7.rs", "rank": 52, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _0_LOAD\"]\n\npub type R = crate::R<u32, super::_0_LOAD>;\n\n#[doc = \"Writer for register _0_LOAD\"]\n\npub type W = crate::W<u32, super::_0_LOAD>;\n\n#[doc = \"Register _0_LOAD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_0_LOAD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LOAD`\"]\n\npub type LOAD_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `LOAD`\"]\n\npub struct LOAD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LOAD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_0_load.rs", "rank": 53, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register BAUD\"]\n\npub type R = crate::R<u32, super::BAUD>;\n\n#[doc = \"Writer for register BAUD\"]\n\npub type W = crate::W<u32, super::BAUD>;\n\n#[doc = \"Register BAUD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::BAUD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COUNT0`\"]\n\npub type COUNT0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COUNT0`\"]\n\npub struct COUNT0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COUNT0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/epi0/baud.rs", "rank": 54, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _2_MINFLTPER\"]\n\npub type R = crate::R<u32, super::_2_MINFLTPER>;\n\n#[doc = \"Writer for register _2_MINFLTPER\"]\n\npub type W = crate::W<u32, super::_2_MINFLTPER>;\n\n#[doc = \"Register _2_MINFLTPER `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_MINFLTPER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MFP`\"]\n\npub type MFP_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MFP`\"]\n\npub struct MFP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MFP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_2_minfltper.rs", "rank": 55, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF1ARB2\"]\n\npub type R = crate::R<u32, super::IF1ARB2>;\n\n#[doc = \"Writer for register IF1ARB2\"]\n\npub type W = crate::W<u32, super::IF1ARB2>;\n\n#[doc = \"Register IF1ARB2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF1ARB2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ID`\"]\n\npub type ID_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `ID`\"]\n\npub struct ID_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ID_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/if1arb2.rs", "rank": 56, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register RTCT\"]\n\npub type R = crate::R<u32, super::RTCT>;\n\n#[doc = \"Writer for register RTCT\"]\n\npub type W = crate::W<u32, super::RTCT>;\n\n#[doc = \"Register RTCT `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RTCT {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TRIM`\"]\n\npub type TRIM_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `TRIM`\"]\n\npub struct TRIM_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRIM_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/hib/rtct.rs", "rank": 57, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _0_DBFALL\"]\n\npub type R = crate::R<u32, super::_0_DBFALL>;\n\n#[doc = \"Writer for register _0_DBFALL\"]\n\npub type W = crate::W<u32, super::_0_DBFALL>;\n\n#[doc = \"Register _0_DBFALL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_0_DBFALL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DELAY`\"]\n\npub type DELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DELAY`\"]\n\npub struct DELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_0_dbfall.rs", "rank": 58, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _3_DBFALL\"]\n\npub type R = crate::R<u32, super::_3_DBFALL>;\n\n#[doc = \"Writer for register _3_DBFALL\"]\n\npub type W = crate::W<u32, super::_3_DBFALL>;\n\n#[doc = \"Register _3_DBFALL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_3_DBFALL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FALLDELAY`\"]\n\npub type FALLDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `FALLDELAY`\"]\n\npub struct FALLDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FALLDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_3_dbfall.rs", "rank": 59, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _3_DBRISE\"]\n\npub type R = crate::R<u32, super::_3_DBRISE>;\n\n#[doc = \"Writer for register _3_DBRISE\"]\n\npub type W = crate::W<u32, super::_3_DBRISE>;\n\n#[doc = \"Register _3_DBRISE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_3_DBRISE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RISEDELAY`\"]\n\npub type RISEDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RISEDELAY`\"]\n\npub struct RISEDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RISEDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_3_dbrise.rs", "rank": 60, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _3_DBRISE\"]\n\npub type R = crate::R<u32, super::_3_DBRISE>;\n\n#[doc = \"Writer for register _3_DBRISE\"]\n\npub type W = crate::W<u32, super::_3_DBRISE>;\n\n#[doc = \"Register _3_DBRISE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_3_DBRISE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RISEDELAY`\"]\n\npub type RISEDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RISEDELAY`\"]\n\npub struct RISEDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RISEDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_3_dbrise.rs", "rank": 61, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _1_MINFLTPER\"]\n\npub type R = crate::R<u32, super::_1_MINFLTPER>;\n\n#[doc = \"Writer for register _1_MINFLTPER\"]\n\npub type W = crate::W<u32, super::_1_MINFLTPER>;\n\n#[doc = \"Register _1_MINFLTPER `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_MINFLTPER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MFP`\"]\n\npub type MFP_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MFP`\"]\n\npub struct MFP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MFP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_1_minfltper.rs", "rank": 62, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF2MSK2\"]\n\npub type R = crate::R<u32, super::IF2MSK2>;\n\n#[doc = \"Writer for register IF2MSK2\"]\n\npub type W = crate::W<u32, super::IF2MSK2>;\n\n#[doc = \"Register IF2MSK2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2MSK2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `IDMSK`\"]\n\npub type IDMSK_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `IDMSK`\"]\n\npub struct IDMSK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IDMSK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/if2msk2.rs", "rank": 63, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _2_CMPA\"]\n\npub type R = crate::R<u32, super::_2_CMPA>;\n\n#[doc = \"Writer for register _2_CMPA\"]\n\npub type W = crate::W<u32, super::_2_CMPA>;\n\n#[doc = \"Register _2_CMPA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_CMPA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPA`\"]\n\npub type COMPA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPA`\"]\n\npub struct COMPA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_2_cmpa.rs", "rank": 64, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP0\"]\n\npub type R = crate::R<u32, super::DCCMP0>;\n\n#[doc = \"Writer for register DCCMP0\"]\n\npub type W = crate::W<u32, super::DCCMP0>;\n\n#[doc = \"Register DCCMP0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/adc0/dccmp0.rs", "rank": 65, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP2\"]\n\npub type R = crate::R<u32, super::DCCMP2>;\n\n#[doc = \"Writer for register DCCMP2\"]\n\npub type W = crate::W<u32, super::DCCMP2>;\n\n#[doc = \"Register DCCMP2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/adc0/dccmp2.rs", "rank": 66, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register IF2DB1\"]\n\npub type R = crate::R<u32, super::IF2DB1>;\n\n#[doc = \"Writer for register IF2DB1\"]\n\npub type W = crate::W<u32, super::IF2DB1>;\n\n#[doc = \"Register IF2DB1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2DB1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/if2db1.rs", "rank": 67, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register DR\"]\n\npub type R = crate::R<u32, super::DR>;\n\n#[doc = \"Writer for register DR\"]\n\npub type W = crate::W<u32, super::DR>;\n\n#[doc = \"Register DR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/ssi0/dr.rs", "rank": 68, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register RPSTD1\"]\n\npub type R = crate::R<u32, super::RPSTD1>;\n\n#[doc = \"Writer for register RPSTD1\"]\n\npub type W = crate::W<u32, super::RPSTD1>;\n\n#[doc = \"Register RPSTD1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::RPSTD1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `POSTCNT`\"]\n\npub type POSTCNT_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `POSTCNT`\"]\n\npub struct POSTCNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> POSTCNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/epi0/rpstd1.rs", "rank": 69, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF2DA2\"]\n\npub type R = crate::R<u32, super::IF2DA2>;\n\n#[doc = \"Writer for register IF2DA2\"]\n\npub type W = crate::W<u32, super::IF2DA2>;\n\n#[doc = \"Register IF2DA2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2DA2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/if2da2.rs", "rank": 70, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _1_DBFALL\"]\n\npub type R = crate::R<u32, super::_1_DBFALL>;\n\n#[doc = \"Writer for register _1_DBFALL\"]\n\npub type W = crate::W<u32, super::_1_DBFALL>;\n\n#[doc = \"Register _1_DBFALL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_DBFALL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `FALLDELAY`\"]\n\npub type FALLDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `FALLDELAY`\"]\n\npub struct FALLDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> FALLDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_1_dbfall.rs", "rank": 71, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP5\"]\n\npub type R = crate::R<u32, super::DCCMP5>;\n\n#[doc = \"Writer for register DCCMP5\"]\n\npub type W = crate::W<u32, super::DCCMP5>;\n\n#[doc = \"Register DCCMP5 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP5 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/adc0/dccmp5.rs", "rank": 72, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register EEBLOCK\"]\n\npub type R = crate::R<u32, super::EEBLOCK>;\n\n#[doc = \"Writer for register EEBLOCK\"]\n\npub type W = crate::W<u32, super::EEBLOCK>;\n\n#[doc = \"Register EEBLOCK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EEBLOCK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BLOCK`\"]\n\npub type BLOCK_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `BLOCK`\"]\n\npub struct BLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BLOCK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/eeprom/eeblock.rs", "rank": 73, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP4\"]\n\npub type R = crate::R<u32, super::DCCMP4>;\n\n#[doc = \"Writer for register DCCMP4\"]\n\npub type W = crate::W<u32, super::DCCMP4>;\n\n#[doc = \"Register DCCMP4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/adc0/dccmp4.rs", "rank": 74, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _1_DBRISE\"]\n\npub type R = crate::R<u32, super::_1_DBRISE>;\n\n#[doc = \"Writer for register _1_DBRISE\"]\n\npub type W = crate::W<u32, super::_1_DBRISE>;\n\n#[doc = \"Register _1_DBRISE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_DBRISE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RISEDELAY`\"]\n\npub type RISEDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RISEDELAY`\"]\n\npub struct RISEDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RISEDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_1_dbrise.rs", "rank": 75, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register DCCMP4\"]\n\npub type R = crate::R<u32, super::DCCMP4>;\n\n#[doc = \"Writer for register DCCMP4\"]\n\npub type W = crate::W<u32, super::DCCMP4>;\n\n#[doc = \"Register DCCMP4 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP4 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/adc0/dccmp4.rs", "rank": 76, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF2DB2\"]\n\npub type R = crate::R<u32, super::IF2DB2>;\n\n#[doc = \"Writer for register IF2DB2\"]\n\npub type W = crate::W<u32, super::IF2DB2>;\n\n#[doc = \"Register IF2DB2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2DB2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if2db2.rs", "rank": 77, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP2\"]\n\npub type R = crate::R<u32, super::DCCMP2>;\n\n#[doc = \"Writer for register DCCMP2\"]\n\npub type W = crate::W<u32, super::DCCMP2>;\n\n#[doc = \"Register DCCMP2 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP2 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/adc0/dccmp2.rs", "rank": 78, "score": 84.35151285985305 }, { "content": "#[doc = \"Reader of register _1_CMPA\"]\n\npub type R = crate::R<u32, super::_1_CMPA>;\n\n#[doc = \"Writer for register _1_CMPA\"]\n\npub type W = crate::W<u32, super::_1_CMPA>;\n\n#[doc = \"Register _1_CMPA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_CMPA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPA`\"]\n\npub type COMPA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPA`\"]\n\npub struct COMPA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_1_cmpa.rs", "rank": 79, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _1_CMPB\"]\n\npub type R = crate::R<u32, super::_1_CMPB>;\n\n#[doc = \"Writer for register _1_CMPB\"]\n\npub type W = crate::W<u32, super::_1_CMPB>;\n\n#[doc = \"Register _1_CMPB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_CMPB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPB`\"]\n\npub type COMPB_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPB`\"]\n\npub struct COMPB_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPB_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_1_cmpb.rs", "rank": 80, "score": 84.35151285985305 }, { "content": "#[doc = \"Reader of register EEBLOCK\"]\n\npub type R = crate::R<u32, super::EEBLOCK>;\n\n#[doc = \"Writer for register EEBLOCK\"]\n\npub type W = crate::W<u32, super::EEBLOCK>;\n\n#[doc = \"Register EEBLOCK `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::EEBLOCK {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `BLOCK`\"]\n\npub type BLOCK_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `BLOCK`\"]\n\npub struct BLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> BLOCK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/eeprom/eeblock.rs", "rank": 81, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP0\"]\n\npub type R = crate::R<u32, super::DCCMP0>;\n\n#[doc = \"Writer for register DCCMP0\"]\n\npub type W = crate::W<u32, super::DCCMP0>;\n\n#[doc = \"Register DCCMP0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/adc0/dccmp0.rs", "rank": 82, "score": 84.35151285985305 }, { "content": "#[doc = \"Reader of register _0_DBFALL\"]\n\npub type R = crate::R<u32, super::_0_DBFALL>;\n\n#[doc = \"Writer for register _0_DBFALL\"]\n\npub type W = crate::W<u32, super::_0_DBFALL>;\n\n#[doc = \"Register _0_DBFALL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_0_DBFALL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DELAY`\"]\n\npub type DELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DELAY`\"]\n\npub struct DELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_0_dbfall.rs", "rank": 83, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _1_CMPA\"]\n\npub type R = crate::R<u32, super::_1_CMPA>;\n\n#[doc = \"Writer for register _1_CMPA\"]\n\npub type W = crate::W<u32, super::_1_CMPA>;\n\n#[doc = \"Register _1_CMPA `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_CMPA {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPA`\"]\n\npub type COMPA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPA`\"]\n\npub struct COMPA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_1_cmpa.rs", "rank": 84, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _2_LOAD\"]\n\npub type R = crate::R<u32, super::_2_LOAD>;\n\n#[doc = \"Writer for register _2_LOAD\"]\n\npub type W = crate::W<u32, super::_2_LOAD>;\n\n#[doc = \"Register _2_LOAD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_LOAD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LOAD`\"]\n\npub type LOAD_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `LOAD`\"]\n\npub struct LOAD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LOAD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_2_load.rs", "rank": 85, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF1MSK1\"]\n\npub type R = crate::R<u32, super::IF1MSK1>;\n\n#[doc = \"Writer for register IF1MSK1\"]\n\npub type W = crate::W<u32, super::IF1MSK1>;\n\n#[doc = \"Register IF1MSK1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF1MSK1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `IDMSK`\"]\n\npub type IDMSK_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `IDMSK`\"]\n\npub struct IDMSK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IDMSK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if1msk1.rs", "rank": 86, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _2_LOAD\"]\n\npub type R = crate::R<u32, super::_2_LOAD>;\n\n#[doc = \"Writer for register _2_LOAD\"]\n\npub type W = crate::W<u32, super::_2_LOAD>;\n\n#[doc = \"Register _2_LOAD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_LOAD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LOAD`\"]\n\npub type LOAD_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `LOAD`\"]\n\npub struct LOAD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LOAD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_2_load.rs", "rank": 87, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _1_DBRISE\"]\n\npub type R = crate::R<u32, super::_1_DBRISE>;\n\n#[doc = \"Writer for register _1_DBRISE\"]\n\npub type W = crate::W<u32, super::_1_DBRISE>;\n\n#[doc = \"Register _1_DBRISE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_DBRISE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RISEDELAY`\"]\n\npub type RISEDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RISEDELAY`\"]\n\npub struct RISEDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RISEDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_1_dbrise.rs", "rank": 88, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register DCCMP5\"]\n\npub type R = crate::R<u32, super::DCCMP5>;\n\n#[doc = \"Writer for register DCCMP5\"]\n\npub type W = crate::W<u32, super::DCCMP5>;\n\n#[doc = \"Register DCCMP5 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DCCMP5 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMP0`\"]\n\npub type COMP0_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMP0`\"]\n\npub struct COMP0_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMP0_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/adc0/dccmp5.rs", "rank": 89, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _2_CMPB\"]\n\npub type R = crate::R<u32, super::_2_CMPB>;\n\n#[doc = \"Writer for register _2_CMPB\"]\n\npub type W = crate::W<u32, super::_2_CMPB>;\n\n#[doc = \"Register _2_CMPB `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_CMPB {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `COMPB`\"]\n\npub type COMPB_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `COMPB`\"]\n\npub struct COMPB_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> COMPB_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_2_cmpb.rs", "rank": 90, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register IF2MSK1\"]\n\npub type R = crate::R<u32, super::IF2MSK1>;\n\n#[doc = \"Writer for register IF2MSK1\"]\n\npub type W = crate::W<u32, super::IF2MSK1>;\n\n#[doc = \"Register IF2MSK1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2MSK1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `IDMSK`\"]\n\npub type IDMSK_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `IDMSK`\"]\n\npub struct IDMSK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> IDMSK_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if2msk1.rs", "rank": 91, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register _1_MINFLTPER\"]\n\npub type R = crate::R<u32, super::_1_MINFLTPER>;\n\n#[doc = \"Writer for register _1_MINFLTPER\"]\n\npub type W = crate::W<u32, super::_1_MINFLTPER>;\n\n#[doc = \"Register _1_MINFLTPER `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_MINFLTPER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MFP`\"]\n\npub type MFP_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MFP`\"]\n\npub struct MFP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MFP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_1_minfltper.rs", "rank": 92, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _2_DBRISE\"]\n\npub type R = crate::R<u32, super::_2_DBRISE>;\n\n#[doc = \"Writer for register _2_DBRISE\"]\n\npub type W = crate::W<u32, super::_2_DBRISE>;\n\n#[doc = \"Register _2_DBRISE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_2_DBRISE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `RISEDELAY`\"]\n\npub type RISEDELAY_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `RISEDELAY`\"]\n\npub struct RISEDELAY_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> RISEDELAY_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_2_dbrise.rs", "rank": 93, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register IF2ARB1\"]\n\npub type R = crate::R<u32, super::IF2ARB1>;\n\n#[doc = \"Writer for register IF2ARB1\"]\n\npub type W = crate::W<u32, super::IF2ARB1>;\n\n#[doc = \"Register IF2ARB1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2ARB1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ID`\"]\n\npub type ID_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `ID`\"]\n\npub struct ID_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ID_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/if2arb1.rs", "rank": 94, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register IF2DB1\"]\n\npub type R = crate::R<u32, super::IF2DB1>;\n\n#[doc = \"Writer for register IF2DB1\"]\n\npub type W = crate::W<u32, super::IF2DB1>;\n\n#[doc = \"Register IF2DB1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF2DB1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/can0/if2db1.rs", "rank": 95, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _1_LOAD\"]\n\npub type R = crate::R<u32, super::_1_LOAD>;\n\n#[doc = \"Writer for register _1_LOAD\"]\n\npub type W = crate::W<u32, super::_1_LOAD>;\n\n#[doc = \"Register _1_LOAD `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_1_LOAD {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `LOAD`\"]\n\npub type LOAD_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `LOAD`\"]\n\npub struct LOAD_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> LOAD_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/pwm0/_1_load.rs", "rank": 96, "score": 84.35151285985307 }, { "content": "#[doc = \"Reader of register IF1DA1\"]\n\npub type R = crate::R<u32, super::IF1DA1>;\n\n#[doc = \"Writer for register IF1DA1\"]\n\npub type W = crate::W<u32, super::IF1DA1>;\n\n#[doc = \"Register IF1DA1 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::IF1DA1 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DATA`\"]\n\npub type DATA_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c123x/src/can0/if1da1.rs", "rank": 97, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register PLLFREQ0\"]\n\npub type R = crate::R<u32, super::PLLFREQ0>;\n\n#[doc = \"Writer for register PLLFREQ0\"]\n\npub type W = crate::W<u32, super::PLLFREQ0>;\n\n#[doc = \"Register PLLFREQ0 `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::PLLFREQ0 {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MINT`\"]\n\npub type MINT_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MINT`\"]\n\npub struct MINT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MINT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/sysctl/pllfreq0.rs", "rank": 98, "score": 84.35151285985306 }, { "content": "#[doc = \"Reader of register _3_MINFLTPER\"]\n\npub type R = crate::R<u32, super::_3_MINFLTPER>;\n\n#[doc = \"Writer for register _3_MINFLTPER\"]\n\npub type W = crate::W<u32, super::_3_MINFLTPER>;\n\n#[doc = \"Register _3_MINFLTPER `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::_3_MINFLTPER {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `MFP`\"]\n\npub type MFP_R = crate::R<u16, u16>;\n\n#[doc = \"Write proxy for field `MFP`\"]\n\npub struct MFP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> MFP_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "crates/tm4c129x/src/pwm0/_3_minfltper.rs", "rank": 99, "score": 84.35151285985306 } ]
Rust
crates/core/src/graph/mod.rs
rustatian/rock
664825fe85b3649de669d5e498f64267c7676e79
#![warn(missing_debug_implementations)] #![allow(dead_code)] use crate::profile::line::Line; use crate::profile::Profile; use crate::profile::{self}; use std::{collections::HashMap, vec}; use std::{ hash::{Hash, Hasher}, path::PathBuf, }; #[cfg(target_os = "windows")] const SEPARATOR: &str = "\\"; #[cfg(target_os = "linux")] const SEPARATOR: &str = "/"; type EdgeMap = HashMap<Node, Edge>; type TagMap = HashMap<String, Tag>; type Nodes = Vec<Node>; type NodeMap = HashMap<NodeInfo, Node>; type NodeSet = HashMap<NodeInfo, bool>; #[derive(Clone, Debug)] struct Graph<'a> { nodes: Vec<&'a Node>, } impl<'a> Graph<'a> { pub fn new() -> Self { Graph { nodes: vec![] } } fn init_graph<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( &self, prof: Profile, o: Options<T, U>, ) -> Self { Graph { nodes: vec![] } } fn create_nodes<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( prof: &Profile, o: Options<T, U>, ) -> Option<(Nodes, HashMap<u64, Nodes>)> { let mut locations: HashMap<u64, Nodes> = HashMap::new(); let nm = NodeMap::new(); for l in prof.location.iter() { let lines: &Vec<Line> = &l.line; let mut nodes: Vec<Node> = vec![Node::default(); lines.len()]; for ln in 0..lines.len() { nodes.insert(ln, Node::default()); } locations.insert(l.id, nodes); } Some(( nm.iter().map(|x| x.1.clone()).collect::<Vec<Node>>(), locations, )) } fn find_or_insert_node(nm: &mut NodeMap, info: NodeInfo, kept: NodeSet) -> Option<Node> { None } fn find_or_insert_line<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( nm: &NodeMap, l: &profile::location::Location, line: profile::line::Line, o: &Options<T, U>, ) -> Option<Node> { let mut objfile = String::new(); if let Some(m) = &l.mapping { if !m.filename.is_empty() { objfile = m.filename.clone(); } } let mut node_info = Graph::node_info(l, line, objfile, o); None } fn node_info<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( l: &profile::location::Location, line: profile::line::Line, objfile: String, o: &Options<T, U>, ) -> NodeInfo { if line.function == profile::function::Function::default() { return NodeInfo { address: l.address, objfile, ..Default::default() }; } let mut ni = NodeInfo { address: l.address, lineno: line.line, name: line.function.name, ..Default::default() }; if !line.function.filename.is_empty() { let mut buf = PathBuf::from(line.function.filename); buf.clear(); ni.file = buf.to_str().unwrap().to_string(); } if o.orig_fn_names { ni.orig_name = line.function.system_name; } if o.obj_names || (ni.name.is_empty() && ni.orig_name.is_empty()) { ni.objfile = objfile; ni.start_line = line.function.start_line; } ni } } #[derive(Debug)] struct Options<T, U> where T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String, { sample_value: T, sample_mean_divisor: T, format_tag: U, obj_names: bool, orig_fn_names: bool, call_tree: bool, drop_negative: bool, kept_nodes: HashMap<NodeInfo, bool>, } #[derive(Clone, Debug, Eq, Default)] struct Node { info: NodeInfo, function: Box<Node>, flat: i64, flat_div: i64, cum: i64, cum_div: i64, r#in: HashMap<Node, Edge>, out: HashMap<Node, Edge>, label_tags: HashMap<String, Tag>, numeric_tags: HashMap<String, HashMap<String, Tag>>, } impl Hash for Node { fn hash<H: Hasher>(&self, state: &mut H) { self.info.hash(state); self.function.hash(state); self.flat.hash(state); self.flat_div.hash(state); self.cum.hash(state); self.cum_div.hash(state); } } impl PartialEq for Node { fn eq(&self, other: &Self) -> bool { self.flat == other.flat && self.info == other.info && self.function == other.function && self.flat_div == other.flat_div && self.cum == other.cum && self.cum_div == other.cum_div && self.r#in == other.r#in && self.out == other.out && self.label_tags == other.label_tags && self.numeric_tags == other.numeric_tags } } impl Node { pub fn new() -> Self { Node::default() } pub fn flat_value(&self) -> i64 { if self.flat_div == 0 { return self.flat; } self.flat / self.flat_div } pub fn cum_value(&self) -> i64 { if self.cum_div == 0 { return self.cum; } self.cum / self.cum_div } pub fn add_to_edge(&mut self, to: &mut Node, v: i64, residual: bool, inline: bool) { self.add_to_edge_div(to, 0, v, residual, inline); } pub fn add_to_edge_div( &mut self, to: &mut Node, dv: i64, v: i64, residual: bool, inline: bool, ) { if let Some(node1) = self.r#in.get(to) { if let Some(node2) = self.out.get(self) { if node1 != node2 { panic!("asymmetric edges {:?} {:?}", self, to); } } } if let Some(e) = self.r#in.get_mut(to) { e.weight_div += dv; e.weight += v; if residual { e.residual = true; } if !inline { e.inline = false; } return; } let info = Edge { src: self.clone(), dest: to.clone(), weight_div: dv, weight: v, residual, inline, }; self.out.insert(to.clone(), info.clone()); to.r#in.insert(self.clone(), info); } } #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] struct NodeInfo { name: String, orig_name: String, address: u64, file: String, start_line: i64, lineno: i64, objfile: String, } impl NodeInfo { pub fn printable_name(&self) -> String { self.name_components().join(" ") } pub fn name_components(&self) -> Vec<String> { let mut name = vec![]; if self.address != 0 { name.push(format!("{:x}", self.address)); } if !self.name.is_empty() { name.push(self.name.to_string()); } if self.lineno != 0 { name.push(format!("{}:{}", self.file, self.lineno)); } if !self.file.is_empty() { name.push(self.file.to_string()); } if !self.name.is_empty() { name.push(self.name.to_string()); } if !self.objfile.is_empty() { name.push(format!("[{}]", get_basename(&self.objfile, SEPARATOR))); } if name.is_empty() { name.push("<unknown>".to_string()); } name } } fn get_basename<'a>(path: &'a str, pat: &'a str) -> String { let mut parts = path.rsplit(pat); match parts.next() { None => "".into(), Some(path) => path.into(), } } #[derive(Clone, Debug, Hash, Eq, PartialEq)] struct Edge { src: Node, dest: Node, weight: i64, weight_div: i64, residual: bool, inline: bool, } impl Edge { pub fn weight_value(&self) -> i64 { if self.weight_div == 0 { return self.weight; } self.weight / self.weight_div } } #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] struct Tag { name: String, unit: String, value: i64, flat: i64, flat_div: i64, cum: i64, cum_div: i64, } impl Tag { pub fn cum_value(&self) -> i64 { if self.cum_div == 0 { return self.cum; } self.cum / self.cum_div } pub fn flat_value(&self) -> i64 { if self.flat_div == 0 { return self.flat; } self.flat / self.flat_div } } #[cfg(test)] mod tests { use crate::graph::{get_basename, SEPARATOR}; #[test] fn test_get_basename() { assert_eq!(get_basename("/usr/data", SEPARATOR), "data"); assert_eq!(get_basename("/", SEPARATOR), ""); assert_eq!(get_basename("/root", SEPARATOR), "root"); } }
#![warn(missing_debug_implementations)] #![allow(dead_code)] use crate::profile::line::Line; use crate::profile::Profile; use crate::profile::{self}; use std::{collections::HashMap, vec}; use std::{ hash::{Hash, Hasher}, path::PathBuf, }; #[cfg(target_os = "windows")] const SEPARATOR: &str = "\\"; #[cfg(target_os = "linux")] const SEPARATOR: &str = "/"; type EdgeMap = HashMap<Node, Edge>; type TagMap = HashMap<String, Tag>; type Nodes = Vec<Node>; type NodeMap = HashMap<NodeInfo, Node>; type NodeSet = HashMap<NodeInfo, bool>; #[derive(Clone, Debug)] struct Graph<'a> { nodes: Vec<&'a Node>, } impl<'a> Graph<'a> { pub fn new() -> Self { Graph { nodes: vec![] } }
fn create_nodes<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( prof: &Profile, o: Options<T, U>, ) -> Option<(Nodes, HashMap<u64, Nodes>)> { let mut locations: HashMap<u64, Nodes> = HashMap::new(); let nm = NodeMap::new(); for l in prof.location.iter() { let lines: &Vec<Line> = &l.line; let mut nodes: Vec<Node> = vec![Node::default(); lines.len()]; for ln in 0..lines.len() { nodes.insert(ln, Node::default()); } locations.insert(l.id, nodes); } Some(( nm.iter().map(|x| x.1.clone()).collect::<Vec<Node>>(), locations, )) } fn find_or_insert_node(nm: &mut NodeMap, info: NodeInfo, kept: NodeSet) -> Option<Node> { None } fn find_or_insert_line<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( nm: &NodeMap, l: &profile::location::Location, line: profile::line::Line, o: &Options<T, U>, ) -> Option<Node> { let mut objfile = String::new(); if let Some(m) = &l.mapping { if !m.filename.is_empty() { objfile = m.filename.clone(); } } let mut node_info = Graph::node_info(l, line, objfile, o); None } fn node_info<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( l: &profile::location::Location, line: profile::line::Line, objfile: String, o: &Options<T, U>, ) -> NodeInfo { if line.function == profile::function::Function::default() { return NodeInfo { address: l.address, objfile, ..Default::default() }; } let mut ni = NodeInfo { address: l.address, lineno: line.line, name: line.function.name, ..Default::default() }; if !line.function.filename.is_empty() { let mut buf = PathBuf::from(line.function.filename); buf.clear(); ni.file = buf.to_str().unwrap().to_string(); } if o.orig_fn_names { ni.orig_name = line.function.system_name; } if o.obj_names || (ni.name.is_empty() && ni.orig_name.is_empty()) { ni.objfile = objfile; ni.start_line = line.function.start_line; } ni } } #[derive(Debug)] struct Options<T, U> where T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String, { sample_value: T, sample_mean_divisor: T, format_tag: U, obj_names: bool, orig_fn_names: bool, call_tree: bool, drop_negative: bool, kept_nodes: HashMap<NodeInfo, bool>, } #[derive(Clone, Debug, Eq, Default)] struct Node { info: NodeInfo, function: Box<Node>, flat: i64, flat_div: i64, cum: i64, cum_div: i64, r#in: HashMap<Node, Edge>, out: HashMap<Node, Edge>, label_tags: HashMap<String, Tag>, numeric_tags: HashMap<String, HashMap<String, Tag>>, } impl Hash for Node { fn hash<H: Hasher>(&self, state: &mut H) { self.info.hash(state); self.function.hash(state); self.flat.hash(state); self.flat_div.hash(state); self.cum.hash(state); self.cum_div.hash(state); } } impl PartialEq for Node { fn eq(&self, other: &Self) -> bool { self.flat == other.flat && self.info == other.info && self.function == other.function && self.flat_div == other.flat_div && self.cum == other.cum && self.cum_div == other.cum_div && self.r#in == other.r#in && self.out == other.out && self.label_tags == other.label_tags && self.numeric_tags == other.numeric_tags } } impl Node { pub fn new() -> Self { Node::default() } pub fn flat_value(&self) -> i64 { if self.flat_div == 0 { return self.flat; } self.flat / self.flat_div } pub fn cum_value(&self) -> i64 { if self.cum_div == 0 { return self.cum; } self.cum / self.cum_div } pub fn add_to_edge(&mut self, to: &mut Node, v: i64, residual: bool, inline: bool) { self.add_to_edge_div(to, 0, v, residual, inline); } pub fn add_to_edge_div( &mut self, to: &mut Node, dv: i64, v: i64, residual: bool, inline: bool, ) { if let Some(node1) = self.r#in.get(to) { if let Some(node2) = self.out.get(self) { if node1 != node2 { panic!("asymmetric edges {:?} {:?}", self, to); } } } if let Some(e) = self.r#in.get_mut(to) { e.weight_div += dv; e.weight += v; if residual { e.residual = true; } if !inline { e.inline = false; } return; } let info = Edge { src: self.clone(), dest: to.clone(), weight_div: dv, weight: v, residual, inline, }; self.out.insert(to.clone(), info.clone()); to.r#in.insert(self.clone(), info); } } #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] struct NodeInfo { name: String, orig_name: String, address: u64, file: String, start_line: i64, lineno: i64, objfile: String, } impl NodeInfo { pub fn printable_name(&self) -> String { self.name_components().join(" ") } pub fn name_components(&self) -> Vec<String> { let mut name = vec![]; if self.address != 0 { name.push(format!("{:x}", self.address)); } if !self.name.is_empty() { name.push(self.name.to_string()); } if self.lineno != 0 { name.push(format!("{}:{}", self.file, self.lineno)); } if !self.file.is_empty() { name.push(self.file.to_string()); } if !self.name.is_empty() { name.push(self.name.to_string()); } if !self.objfile.is_empty() { name.push(format!("[{}]", get_basename(&self.objfile, SEPARATOR))); } if name.is_empty() { name.push("<unknown>".to_string()); } name } } fn get_basename<'a>(path: &'a str, pat: &'a str) -> String { let mut parts = path.rsplit(pat); match parts.next() { None => "".into(), Some(path) => path.into(), } } #[derive(Clone, Debug, Hash, Eq, PartialEq)] struct Edge { src: Node, dest: Node, weight: i64, weight_div: i64, residual: bool, inline: bool, } impl Edge { pub fn weight_value(&self) -> i64 { if self.weight_div == 0 { return self.weight; } self.weight / self.weight_div } } #[derive(Clone, Debug, Hash, PartialEq, Eq, PartialOrd, Ord, Default)] struct Tag { name: String, unit: String, value: i64, flat: i64, flat_div: i64, cum: i64, cum_div: i64, } impl Tag { pub fn cum_value(&self) -> i64 { if self.cum_div == 0 { return self.cum; } self.cum / self.cum_div } pub fn flat_value(&self) -> i64 { if self.flat_div == 0 { return self.flat; } self.flat / self.flat_div } } #[cfg(test)] mod tests { use crate::graph::{get_basename, SEPARATOR}; #[test] fn test_get_basename() { assert_eq!(get_basename("/usr/data", SEPARATOR), "data"); assert_eq!(get_basename("/", SEPARATOR), ""); assert_eq!(get_basename("/root", SEPARATOR), "root"); } }
fn init_graph<T: Fn(&[i64]) -> i64, U: Fn(i64, String) -> String>( &self, prof: Profile, o: Options<T, U>, ) -> Self { Graph { nodes: vec![] } }
function_block-full_function
[ { "content": "#[inline]\n\n#[allow(unused_assignments)]\n\npub fn decode_field(buf: &mut Buffer, data: &mut Vec<u8>) -> Result<Vec<u8>, RockError> {\n\n let result = decode_varint(data);\n\n match result {\n\n Ok(varint) => {\n\n // decode\n\n // 90 -> 1011010\n\n // after right shift -> 1011, this is field number in proto\n\n // then we're doing AND operation and getting 7 bits\n\n buf.field = varint.shr(3);\n\n buf.r#type = WireTypes::from(varint & 7);\n\n buf.u64 = 0;\n\n\n\n let mut buf_data = vec![];\n\n\n\n // this is returned type\n\n match buf.r#type {\n\n //0\n\n WireTypes::WireVarint => match decode_varint(data) {\n\n Ok(varint) => {\n\n buf.u64 = varint as u64;\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 11, "score": 89298.5739070261 }, { "content": "#[inline(always)]\n\npub fn decode_varint(data: &mut Vec<u8>) -> Result<usize, RockError> {\n\n let mut u: usize = 0;\n\n let mut i: usize = 0;\n\n\n\n loop {\n\n // Message should be no more than 10 bytes\n\n if i >= 10 || i >= data.len() {\n\n return Err(RockError::DecodeFieldFailed {\n\n reason: \"bad varint\".to_string(),\n\n });\n\n }\n\n\n\n // get 7 bits except MSB\n\n // here is would be a number w/o the sign bit\n\n // 0x7F --> 127. So, if the number in the self.data[i]\n\n // is eq to 127 there is probably MSB would be set to 1, and if it is\n\n // there is would be a second 7 bits of information\n\n // than we shift like this:\n\n // 1010 1100 0000 0010\n\n // →010 1100 000 0010\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 12, "score": 87517.08102255416 }, { "content": "#[inline]\n\npub fn decode_message(buf: &mut Buffer, data: &mut Vec<u8>, profile: &mut Profile) {\n\n if buf.r#type != WireTypes::WireBytes {\n\n panic!(\"WireTypes not Equal WireBytes\");\n\n }\n\n\n\n while !data.is_empty() {\n\n // here we decode data, the algorithm is following:\n\n // 1. We pass whole data and buffer to the decode_field function\n\n // 2. As the result we get main data (which drained to the buffer size) and buffer with that drained data filled with other fields\n\n // 3. We also calculate field, type and u64 fields to pass it to Profile::decode_profile function\n\n let mut res = decode_field(buf, data);\n\n match res {\n\n Ok(ref mut buf_data) => {\n\n Profile::decode_profile_field(profile, buf, buf_data);\n\n }\n\n Err(err) => {\n\n panic!(err);\n\n }\n\n }\n\n }\n\n}\n\n\n\n// decode_field is used to decode fields from incoming data\n\n// buf -> buffer with data to allocate\n\n// data -> unparsed data\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 13, "score": 79337.61988709305 }, { "content": "pub fn main() -> iced::Result {\n\n Todos::run(Settings::default())\n\n}\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 14, "score": 79225.61293944182 }, { "content": "#[inline]\n\npub fn decode_fixed32(p: &[u8]) -> u32 {\n\n (p[0] | p[1].shl(8) | p[2].shl(16) | p[3].shl(24)) as u32\n\n}\n\n\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 15, "score": 73072.14084364631 }, { "content": "#[inline]\n\npub fn decode_fixed64(p: &[u8]) -> u64 {\n\n ((p[0])\n\n | (p[1].shl(8))\n\n | (p[2].shl(16))\n\n | (p[3].shl(24))\n\n | (p[4].shl(32))\n\n | (p[5].shl(40))\n\n | (p[6].shl(48))\n\n | (p[7].shl(56))) as u64\n\n}\n\n\n\n/// Decode WireType -- 5, Fixed32\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 16, "score": 73072.14084364631 }, { "content": "#[inline]\n\npub fn decode_string(v: &[u8]) -> String {\n\n std::str::from_utf8(v).unwrap().to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod profile_test {\n\n use std::collections::HashMap;\n\n use std::io::Read;\n\n\n\n use crate::profile::buffer::Decoder;\n\n\n\n #[test]\n\n fn parse() {\n\n // key - path to pb\n\n // value - path to related golden file\n\n let mut test_data = HashMap::<String, String>::new();\n\n test_data.insert(\n\n String::from(\"tests/HEAP.pb.gz\"),\n\n String::from(\"tests/HEAP_GOLDEN.string\"),\n\n );\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 17, "score": 73072.14084364631 }, { "content": "pub fn profile_bench_encoded(c: &mut Criterion) {\n\n let r_file_res = std::fs::File::open(\"tests/encoded\");\n\n match r_file_res {\n\n Ok(mut file) => {\n\n let mut buffer = vec![];\n\n let _ = file.read_to_end(&mut buffer);\n\n c.bench_function(\"profile_bench_encoded\", |b| {\n\n b.iter(|| profile::buffer::Buffer::decode(black_box(buffer.as_mut())))\n\n });\n\n }\n\n Err(err) => panic!(err),\n\n }\n\n}\n\n\n", "file_path": "crates/core/benches/profile_decode.rs", "rank": 18, "score": 71539.69003645174 }, { "content": "pub fn profile_bench_heap(c: &mut Criterion) {\n\n let r_file_res = std::fs::File::open(\"tests/HEAP.pb.gz\");\n\n match r_file_res {\n\n Ok(mut file) => {\n\n let mut buffer = vec![];\n\n let _ = file.read_to_end(&mut buffer);\n\n c.bench_function(\"profile_bench_heap\", |b| {\n\n b.iter(|| profile::buffer::Buffer::decode(black_box(buffer.as_mut())))\n\n });\n\n }\n\n Err(err) => panic!(err),\n\n }\n\n}\n\n\n", "file_path": "crates/core/benches/profile_decode.rs", "rank": 19, "score": 71539.69003645174 }, { "content": "pub fn profile_bench_cpu(c: &mut Criterion) {\n\n let r_file_res = std::fs::File::open(\"tests/CPU.pb.gz\");\n\n match r_file_res {\n\n Ok(mut file) => {\n\n let mut buffer = vec![];\n\n let _ = file.read_to_end(&mut buffer);\n\n c.bench_function(\"profile_bench_cpu\", |b| {\n\n b.iter(|| profile::buffer::Buffer::decode(black_box(buffer.as_mut())))\n\n });\n\n }\n\n Err(err) => panic!(err),\n\n }\n\n}\n\n\n", "file_path": "crates/core/benches/profile_decode.rs", "rank": 20, "score": 71539.69003645174 }, { "content": "fn empty_message<'a>(message: &str) -> Element<'a, Message> {\n\n Container::new(\n\n Text::new(message)\n\n .width(Length::Fill)\n\n .size(25)\n\n .horizontal_alignment(HorizontalAlignment::Center)\n\n .color([0.7, 0.7, 0.7]),\n\n )\n\n .width(Length::Fill)\n\n .height(Length::Units(200))\n\n .center_y()\n\n .into()\n\n}\n\n\n\n// Fonts\n\nconst ICONS: Font = Font::External {\n\n name: \"Icons\",\n\n bytes: include_bytes!(\"../fonts/icons.ttf\"),\n\n};\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 21, "score": 69708.186255367 }, { "content": "pub fn profile_bench_big_1min_13025_lines(c: &mut Criterion) {\n\n let r_file_res = std::fs::File::open(\"tests/RR_CPU.pb.gz\");\n\n match r_file_res {\n\n Ok(mut file) => {\n\n let mut buffer = vec![];\n\n let _ = file.read_to_end(&mut buffer);\n\n c.bench_function(\"profile_bench_big_1min_13025_lines\", |b| {\n\n b.iter(|| profile::buffer::Buffer::decode(black_box(buffer.as_mut())))\n\n });\n\n }\n\n Err(err) => panic!(err),\n\n }\n\n}\n\n\n\ncriterion_group! {\n\n name = benches;\n\n config = Criterion::default().sample_size(100).nresamples(5000).measurement_time(Duration::from_secs(60)).warm_up_time(Duration::from_secs(1));\n\n targets = profile_bench_cpu, profile_bench_heap, profile_bench_encoded\n\n}\n\n\n\ncriterion_group! {\n\n name = slow_bench;\n\n config = Criterion::default().sample_size(10).nresamples(5000).measurement_time(Duration::from_secs(60)).warm_up_time(Duration::from_secs(1));\n\n targets = profile_bench_big_1min_13025_lines\n\n}\n\n\n\ncriterion_main!(benches, slow_bench);\n", "file_path": "crates/core/benches/profile_decode.rs", "rank": 22, "score": 68762.86891956796 }, { "content": "type NumLabelUnitsWithIgnored = (HashMap<String, String>, HashMap<String, Vec<String>>);\n\n\n\n/// Text representation of a profile. For debugging and testing purposes.\n\nimpl ToString for Profile {\n\n fn to_string(&self) -> String {\n\n // pre-allocate space for vector\n\n let mut ss: Vec<String> = Vec::with_capacity(\n\n self.comments.len() + self.sample.len() + self.mapping.len() + self.location.len(),\n\n );\n\n\n\n // COMMENT SECTION START =================================\n\n for c in self.comments.iter() {\n\n ss.push(format!(\"Comment: {}\", c))\n\n }\n\n\n\n match self.period_type {\n\n // it is possible, that there is no pt\n\n None => {}\n\n Some(ref pt) => ss.push(format!(\"PeriodType: {} {}\", pt.r#type, pt.unit)),\n\n }\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 24, "score": 58561.87749216954 }, { "content": "#[derive(Debug, Default)]\n\nstruct State {\n\n scroll: scrollable::State,\n\n input: text_input::State,\n\n input_value: String,\n\n filter: Filter,\n\n tasks: Vec<Task>,\n\n controls: Controls,\n\n dirty: bool,\n\n saving: bool,\n\n}\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 25, "score": 49761.032136374066 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct Task {\n\n description: String,\n\n completed: bool,\n\n\n\n #[serde(skip)]\n\n state: TaskState,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum TaskState {\n\n Idle {\n\n edit_button: button::State,\n\n },\n\n Editing {\n\n text_input: text_input::State,\n\n delete_button: button::State,\n\n },\n\n}\n\n\n\nimpl Default for TaskState {\n", "file_path": "crates/gui/src/main.rs", "rank": 26, "score": 49760.87854403892 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nstruct SavedState {\n\n input_value: String,\n\n filter: Filter,\n\n tasks: Vec<Task>,\n\n}\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 27, "score": 48643.24861463814 }, { "content": "struct Options<T>\n\nwhere\n\n T: Fn(&[i64]) -> i64,\n\n{\n\n output_format: isize,\n\n\n\n cum_sort: bool,\n\n call_tree: bool,\n\n drop_negative: bool,\n\n compact_labels: bool,\n\n ratio: f64,\n\n title: String,\n\n profile_labels: Vec<String>,\n\n active_filters: Vec<String>,\n\n num_label_units: HashMap<String, String>,\n\n\n\n node_count: isize,\n\n node_fraction: f64,\n\n edge_fraction: f64,\n\n\n", "file_path": "crates/core/src/report/mod.rs", "rank": 28, "score": 44839.04019114029 }, { "content": "struct Report<T>\n\nwhere\n\n T: Fn(i64) -> String,\n\n{\n\n prof: Profile,\n\n total: i64,\n\n format_value: T,\n\n}\n\n\n", "file_path": "crates/core/src/report/mod.rs", "rank": 29, "score": 44839.04019114029 }, { "content": "// ProfileDecoder is a main trait to decode the profile\n\npub trait Decoder {\n\n fn decode(data: &mut Vec<u8>) -> Result<Profile, RockError>;\n\n}\n\n\n\n// Constants that identify the encoding of a value on the wire.\n\n#[repr(u8)]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum WireTypes {\n\n WireVarint = 0,\n\n WireFixed64 = 1,\n\n WireBytes = 2,\n\n WireFixed32 = 5,\n\n}\n\n\n\nimpl From<usize> for WireTypes {\n\n fn from(var: usize) -> Self {\n\n match var {\n\n 0 => self::WireTypes::WireVarint,\n\n 1 => self::WireTypes::WireFixed64,\n\n 2 => self::WireTypes::WireBytes,\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 30, "score": 44601.044434178795 }, { "content": "fn edit_icon() -> Text {\n\n icon('\\u{F303}')\n\n}\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 31, "score": 44102.30952483472 }, { "content": "fn delete_icon() -> Text {\n\n icon('\\u{F1F8}')\n\n}\n\n\n\n// Persistence\n", "file_path": "crates/gui/src/main.rs", "rank": 32, "score": 44102.30952483472 }, { "content": "pub trait Decoder<T> {\n\n fn decode(buf: &mut Buffer, data: &mut Vec<u8>) -> T;\n\n}\n\n\n\n// TODO ADD OPTIONAL TO THE STRUCT FIELDS\n\n// TODO BUG, getString(p.stringTable, &p.dropFramesX, err) p.dropFramesX and similar logic. p.dropFramesX should became 0 !!!!\n\n// Profile is an in-memory representation of profile.proto\n\n\n\n#[derive(Clone, Default, Debug, Eq, PartialEq)]\n\npub struct Profile {\n\n // A description of the samples associated with each Sample.value.\n\n // For a cpu profile this might be:\n\n // [[\"cpu\",\"nanoseconds\"]] or [[\"wall\",\"seconds\"]] or [[\"syscall\",\"count\"]]\n\n // For a heap profile, this might be:\n\n // [[\"allocations\",\"count\"], [\"space\",\"bytes\"]],\n\n // If one of the values represents the number of events represented\n\n // by the sample, by convention it should be at index 0 and use\n\n // sample_type.unit == \"count\".\n\n sample_type: Vec<value_type::ValueType>,\n\n // The set of samples recorded in this profile.\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 33, "score": 41520.57703417933 }, { "content": "fn icon(unicode: char) -> Text {\n\n Text::new(&unicode.to_string())\n\n .font(ICONS)\n\n .width(Length::Units(20))\n\n .horizontal_alignment(HorizontalAlignment::Center)\n\n .size(20)\n\n}\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 34, "score": 39403.596520741696 }, { "content": "fn loading_message<'a>() -> Element<'a, Message> {\n\n Container::new(\n\n Text::new(\"Loading...\")\n\n .horizontal_alignment(HorizontalAlignment::Center)\n\n .size(50),\n\n )\n\n .width(Length::Fill)\n\n .height(Length::Fill)\n\n .center_y()\n\n .into()\n\n}\n\n\n", "file_path": "crates/gui/src/main.rs", "rank": 35, "score": 38043.834498421886 }, { "content": "use crate::profile::buffer::{decode_field, Buffer};\n\nuse crate::profile::Decoder;\n\n\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\n/// ValueType describes the semantics and measurement units of a value\n\npub struct ValueType {\n\n // Type and uint do not present in proto file\n\n // Used only for parsing\n\n // cpu, wall, inuse_space, etc\n\n pub r#type: String,\n\n // seconds, nanoseconds, bytes, etc\n\n pub unit: String,\n\n\n\n // index in the string table\n\n pub type_index: i64,\n\n // index in the string table\n\n pub unit_index: i64,\n\n}\n\n\n\nimpl Decoder<ValueType> for ValueType {\n", "file_path": "crates/core/src/profile/value_type.rs", "rank": 50, "score": 26834.714274637485 }, { "content": " fn decode(buf: &mut Buffer, data: &mut Vec<u8>) -> ValueType {\n\n let mut vt = ValueType::default();\n\n while !data.is_empty() {\n\n match decode_field(buf, data) {\n\n Ok(_) => {\n\n match buf.field {\n\n //1\n\n 1 => {\n\n vt.type_index = buf.u64 as i64;\n\n }\n\n //2\n\n 2 => {\n\n vt.unit_index = buf.u64 as i64;\n\n }\n\n _ => {\n\n panic!(\"Unknown value_type type\");\n\n }\n\n }\n\n }\n\n Err(err) => {\n\n panic!(err);\n\n }\n\n }\n\n }\n\n vt\n\n }\n\n}\n", "file_path": "crates/core/src/profile/value_type.rs", "rank": 51, "score": 26828.846714978772 }, { "content": "\n\n Ok((num_label_units, units_ignored))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::profile::sample::Sample;\n\n use crate::profile::Profile;\n\n use std::collections::HashMap;\n\n use std::hash::Hash;\n\n\n\n macro_rules! tag_vals_init (\n\n { $($key:expr => $value:expr),+ } => {\n\n {\n\n let mut tag_vals = ::std::vec::Vec::new();\n\n let mut m = ::std::collections::HashMap::new();\n\n $(\n\n m.entry($key.to_string())\n\n .and_modify(|v:&mut Vec<i64>| v.push($value as i64))\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 52, "score": 13.896361283543762 }, { "content": " .or_insert(vec![$value as i64]);\n\n )+\n\n tag_vals.push(m);\n\n tag_vals\n\n }\n\n };\n\n );\n\n\n\n macro_rules! tag_units_init (\n\n { $($key:expr => $value:expr),+ } => {\n\n {\n\n let mut tag_vals = ::std::vec::Vec::new();\n\n let mut m = ::std::collections::HashMap::new();\n\n $(\n\n m.entry($key.to_string())\n\n .and_modify(|v:&mut Vec<String>| v.push($value.to_string()))\n\n .or_insert(vec![$value.to_string()]);\n\n )+\n\n tag_vals.push(m);\n\n tag_vals\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 53, "score": 12.425059131233342 }, { "content": " fn default() -> Self {\n\n TaskState::Idle {\n\n edit_button: button::State::new(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum TaskMessage {\n\n Completed(bool),\n\n Edit,\n\n DescriptionEdited(String),\n\n FinishEdition,\n\n Delete,\n\n}\n\n\n\nimpl Task {\n\n fn new(description: String) -> Self {\n\n Task {\n\n description,\n", "file_path": "crates/gui/src/main.rs", "rank": 54, "score": 12.269390807044916 }, { "content": "use crate::profile::buffer::{decode_string, decode_varint, Buffer, WireTypes};\n\nuse crate::profile::errors::RockError;\n\nuse chrono::NaiveDateTime;\n\nuse std::borrow::Borrow;\n\nuse std::collections::hash_map::Entry;\n\nuse std::collections::HashMap;\n\n\n\npub mod buffer;\n\nmod errors;\n\npub(crate) mod function;\n\nmod label;\n\npub(crate) mod line;\n\npub(crate) mod location;\n\nmod mapping;\n\nmod sample;\n\nmod value_type;\n\n\n\nconst NSEC_IN_SECOND: i64 = 1_000_000_000;\n\n\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 55, "score": 11.645721299376689 }, { "content": " 5 => self::WireTypes::WireFixed32,\n\n _ => panic!(\"unknown WireType\"),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct Buffer {\n\n pub field: usize,\n\n pub r#type: WireTypes,\n\n pub u64: u64,\n\n}\n\n\n\nimpl Default for Buffer {\n\n fn default() -> Self {\n\n Buffer {\n\n field: 0,\n\n r#type: WireTypes::WireVarint,\n\n u64: 0,\n\n }\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 56, "score": 11.283752109938069 }, { "content": "use std::fmt;\n\nuse std::fmt::{Debug, Formatter};\n\n\n\n#[derive(Debug)]\n\npub enum RockError {\n\n ProfileUncompressFailed {\n\n reason: String,\n\n },\n\n DecodeFieldFailed {\n\n reason: String,\n\n },\n\n ValidationFailed {\n\n reason: String,\n\n },\n\n #[allow(dead_code)]\n\n Unknown {\n\n reason: String,\n\n },\n\n}\n\n\n", "file_path": "crates/core/src/profile/errors.rs", "rank": 57, "score": 10.980965328382972 }, { "content": "use crate::profile::buffer::{decode_field, decode_varint, Buffer, WireTypes};\n\nuse crate::profile::{label, location, Decoder};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\n// Each Sample records values encountered in some program\n\n// context. The program context is typically a stack trace, perhaps\n\n// augmented with auxiliary information like the thread-id, some\n\n// indicator of a higher level request being handled etc.\n\npub struct Sample {\n\n // The ids recorded here correspond to a Profile.location.id.\n\n // The leaf is at location_id[0].\n\n pub location: Vec<location::Location>,\n\n // The type and unit of each value is defined by the corresponding\n\n // entry in Profile.sample_type. All samples must have the same\n\n // number of values, the same as the length of Profile.sample_type.\n\n // When aggregating multiple samples into a single sample, the\n\n // result has a list of values that is the elemntwise sum of the\n\n // lists of the originals.\n\n pub value: Vec<i64>,\n", "file_path": "crates/core/src/profile/sample.rs", "rank": 58, "score": 10.825515017906325 }, { "content": " }\n\n\n\n async fn load() -> Result<SavedState, LoadError> {\n\n use async_std::prelude::*;\n\n\n\n let mut contents = String::new();\n\n\n\n let mut file = async_std::fs::File::open(Self::path())\n\n .await\n\n .map_err(|_| LoadError::File)?;\n\n\n\n file.read_to_string(&mut contents)\n\n .await\n\n .map_err(|_| LoadError::File)?;\n\n\n\n serde_json::from_str(&contents).map_err(|_| LoadError::Format)\n\n }\n\n\n\n async fn save(self) -> Result<(), SaveError> {\n\n use async_std::prelude::*;\n", "file_path": "crates/gui/src/main.rs", "rank": 59, "score": 9.961094063926039 }, { "content": "#![warn(missing_debug_implementations, rust_2018_idioms)]\n\n\n\npub mod graph;\n\npub mod profile;\n\npub mod report;\n\npub mod driver;\n", "file_path": "crates/core/src/lib.rs", "rank": 60, "score": 9.41667227923492 }, { "content": "use crate::profile::buffer::{decode_field, Buffer};\n\nuse crate::profile::Decoder;\n\nuse std::default::Default;\n\n\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\npub struct Function {\n\n // Unique nonzero id for the function.\n\n pub id: u64,\n\n // Name of the function, in human-readable form if available.\n\n pub name: String,\n\n // Name of the function, as identified by the system.\n\n // For instance, it can be a C++ mangled name.\n\n pub system_name: String,\n\n // Source file containing the function.\n\n pub filename: String,\n\n // Line number in source file.\n\n pub start_line: i64,\n\n\n\n // HELPERS\n\n // Index into string table\n", "file_path": "crates/core/src/profile/function.rs", "rank": 61, "score": 8.919839882872305 }, { "content": "\n\n // This is a simple way to save at most once every couple seconds\n\n async_std::task::sleep(std::time::Duration::from_secs(2)).await;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nmod style {\n\n use iced::{button, Background, Color, Vector};\n\n\n\n pub enum Button {\n\n Filter { selected: bool },\n\n Icon,\n\n Destructive,\n\n }\n\n\n\n impl button::StyleSheet for Button {\n\n fn active(&self) -> button::Style {\n\n match self {\n", "file_path": "crates/gui/src/main.rs", "rank": 62, "score": 8.609899005862648 }, { "content": " tag_units: tag_units_init!(\"key1\" => \"\"),\n\n want_units: want_units_init!(\"key1\" => \"key1\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"Key bytes, unit not specified\"),\n\n tag_vals: tag_vals_init! {\"bytes\" => 8},\n\n tag_units: Vec::new(),\n\n want_units: want_units_init!(\"bytes\" => \"bytes\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"One sample, one key with one value, unit not specified\"),\n\n tag_vals: tag_vals_init! {\"kilobytes\" => 8},\n\n tag_units: Vec::new(),\n\n want_units: want_units_init!(\"kilobytes\" => \"kilobytes\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"Key request, unit not specified\"),\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 63, "score": 8.388910957981869 }, { "content": " $(\n\n m.entry($key.to_string())\n\n .and_modify(|v:&mut Vec<String>| v.push($value.to_string()))\n\n .or_insert(vec![$value.to_string()]);\n\n )+\n\n m\n\n }\n\n };\n\n );\n\n\n\n #[test]\n\n fn test_num_label_units() {\n\n #[derive(Debug)]\n\n struct TagFilterTests {\n\n desc: String,\n\n tag_vals: Vec<HashMap<String, Vec<i64>>>,\n\n tag_units: Vec<HashMap<String, Vec<String>>>,\n\n want_units: HashMap<String, String>,\n\n want_ignored_units: HashMap<String, Vec<String>>,\n\n }\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 64, "score": 8.350081025167988 }, { "content": "use crate::profile::buffer::{decode_field, Buffer};\n\nuse crate::profile::Decoder;\n\nuse std::default::Default;\n\n\n\n// TMP\n\n// mapping corresponds to Profile.Mapping\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\npub struct Mapping {\n\n // Unique nonzero id for the mapping.\n\n pub id: u64,\n\n // Address at which the binary (or DLL) is loaded into memory.\n\n pub memory_start: u64,\n\n // The limit of the address range occupied by this mapping.\n\n pub memory_limit: u64,\n\n // Offset in the binary that corresponds to the first mapped address.\n\n pub memory_offset: u64,\n\n // Index into string table\n\n // The object this entry is loaded from. This can be a filename on\n\n // disk for the main binary and shared libraries, or virtual\n\n // abstractions like \"[vdso]\".\n", "file_path": "crates/core/src/profile/mapping.rs", "rank": 65, "score": 8.268162520079212 }, { "content": " pub filename: String,\n\n // Index into string table\n\n // A string that uniquely identifies a particular program version\n\n // with high probability. E.g., for binaries generated by GNU tools,\n\n // it could be the contents of the .note.gnu.build-id field.\n\n pub build_id: String,\n\n\n\n pub has_function: bool,\n\n pub has_filenames: bool,\n\n pub has_line_numbers: bool,\n\n pub has_inline_frames: bool,\n\n\n\n // Index into string table\n\n pub filename_index: i64,\n\n // Index into string table\n\n pub build_id_index: i64,\n\n}\n\n\n\nimpl Decoder<Mapping> for Mapping {\n\n fn decode(buf: &mut Buffer, data: &mut Vec<u8>) -> Mapping {\n", "file_path": "crates/core/src/profile/mapping.rs", "rank": 66, "score": 8.210103655923396 }, { "content": " // label includes additional context for this sample. It can include\n\n // things like a thread id, allocation size, etc\n\n pub label: HashMap<String, Vec<String>>,\n\n // key is label.key_index(in string table), value is associated str_index\n\n // entry in Profile.sample_type\n\n pub num_label: HashMap<String, Vec<i64>>,\n\n // label and numbers in string table, key_index is a key\n\n pub num_unit_label: HashMap<String, Vec<String>>, // label and unit measurement, key_index also is a key\n\n\n\n // These types are not present in the proto file\n\n pub location_index: Vec<u64>,\n\n pub label_index: Vec<label::Label>,\n\n}\n\n\n\nimpl Decoder<Sample> for Sample {\n\n #[inline]\n\n fn decode(buf: &mut Buffer, data: &mut Vec<u8>) -> Sample {\n\n let mut s = Sample::default();\n\n while !data.is_empty() {\n\n match decode_field(buf, data) {\n", "file_path": "crates/core/src/profile/sample.rs", "rank": 67, "score": 8.104963996332305 }, { "content": " tag_vals: tag_vals_init! {\"request\" => 8},\n\n tag_units: Vec::new(),\n\n want_units: want_units_init!(\"request\" => \"bytes\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"Key alignment, unit not specified\"),\n\n tag_vals: tag_vals_init! {\"alignment\" => 8},\n\n tag_units: Vec::new(),\n\n want_units: want_units_init!(\"alignment\" => \"bytes\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"One sample, one key with multiple values and two different units\"),\n\n tag_vals: tag_vals_init! {\"key1\" => 8, \"key1\" => 8},\n\n tag_units: tag_units_init!(\"key1\" => \"bytes\", \"key1\" => \"kilobytes\"),\n\n want_units: want_units_init!(\"key1\" => \"bytes\"),\n\n want_ignored_units: want_ignored_units_init!(\"key1\" => \"kilobytes\"),\n\n });\n\n tests.push(TagFilterTests {\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 68, "score": 8.052384977493595 }, { "content": "use crate::profile::buffer::{decode_field, Buffer};\n\nuse crate::profile::{function, Decoder};\n\n\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\npub struct Line {\n\n // Line number in source code.\n\n pub line: i64,\n\n // The id of the corresponding profile.Function for this line.\n\n pub function_index: u64,\n\n // HELPERS\n\n pub function: function::Function,\n\n}\n\n\n\nimpl Decoder<Line> for Line {\n\n fn decode(buf: &mut Buffer, data: &mut Vec<u8>) -> Line {\n\n let mut line = Line::default();\n\n while !data.is_empty() {\n\n match decode_field(buf, data) {\n\n Ok(_) => {\n\n match buf.field {\n", "file_path": "crates/core/src/profile/line.rs", "rank": 69, "score": 7.999066949922799 }, { "content": " }\n\n }\n\n }\n\n Err(err) => {\n\n panic!(err);\n\n }\n\n }\n\n }\n\n mapping\n\n }\n\n}\n\n\n\nimpl ToString for Mapping {\n\n fn to_string(&self) -> String {\n\n let mut bits = String::new();\n\n\n\n if self.has_function {\n\n bits.push_str(\"[FN]\");\n\n }\n\n\n", "file_path": "crates/core/src/profile/mapping.rs", "rank": 70, "score": 7.847716757194611 }, { "content": " Ok(())\n\n }\n\n\n\n // NumLabelUnits returns a map of numeric label keys to the units\n\n // associated with those keys and a map of those keys to any units\n\n // that were encountered but not used.\n\n // Unit for a given key is the first encountered unit for that key. If multiple\n\n // units are encountered for values paired with a particular key, then the first\n\n // unit encountered is used and all other units are returned in sorted order\n\n // in map of ignored units.\n\n // If no units are encountered for a particular key, the unit is then inferred\n\n // based on the key.\n\n pub fn num_label_units(&self) -> Result<NumLabelUnitsWithIgnored, RockError> {\n\n let mut num_label_units: HashMap<String, String> = HashMap::new();\n\n let mut ignored_units: HashMap<String, HashMap<String, bool>> = HashMap::new();\n\n let mut encountered_keys: HashMap<String, bool> = HashMap::new();\n\n\n\n // Determine units based on numeric tags for each sample.\n\n for (_, s) in self.sample.iter().enumerate() {\n\n for (k, _) in s.num_label.iter() {\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 71, "score": 7.829596768818397 }, { "content": " st.r#type = self.string_table[st.type_index as usize].to_string();\n\n }\n\n\n\n for s in self.sample.iter_mut() {\n\n let mut labels: HashMap<String, Vec<String>> = HashMap::new();\n\n let mut num_labels: HashMap<String, Vec<i64>> = HashMap::new();\n\n let mut num_units: HashMap<String, Vec<String>> = HashMap::new();\n\n\n\n for label_index in s.label_index.iter() {\n\n // key can't be empty\n\n let key = self.string_table[label_index.key_index as usize].to_string();\n\n\n\n if label_index.str_index != 0 {\n\n let key_value = self.string_table[label_index.str_index as usize].to_string();\n\n // using or_insert_with because: The function will always be called and potentially allocate an object acting as the default.\n\n labels\n\n .entry(key)\n\n .and_modify(|e| e.push(key_value.clone()))\n\n .or_insert_with(|| vec![key_value]);\n\n } else if label_index.num_index != 0 {\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 72, "score": 7.8281939429365694 }, { "content": "use crate::profile::buffer::{decode_field, Buffer};\n\nuse crate::profile::Decoder;\n\n\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\npub struct Label {\n\n pub key_index: i64, // Index into string table\n\n\n\n // one of the two following values must be set\n\n pub str_index: i64,\n\n // Index into string table\n\n pub num_index: i64,\n\n\n\n // Should only be present when num is present.\n\n // Specifies the units of num.\n\n // Use arbitrary string (for example, \"requests\") as a custom count unit.\n\n // If no unit is specified, consumer may apply heuristic to deduce the unit.\n\n // Consumers may also interpret units like \"bytes\" and \"kilobytes\" as memory\n\n // units and units like \"seconds\" and \"nanoseconds\" as time units,\n\n // and apply appropriate unit conversions to these.\n\n pub num_unit_index: i64,\n", "file_path": "crates/core/src/profile/label.rs", "rank": 73, "score": 7.782248827773433 }, { "content": "use std::io::{BufReader, Read};\n\nuse std::ops::{Shl, Shr};\n\n\n\nuse flate2::read::GzDecoder;\n\n\n\nuse crate::profile::errors::RockError;\n\nuse crate::profile::Profile;\n\nuse std::convert::From;\n\nuse std::string::ToString;\n\n\n\n// ProfileDecoder is a main trait to decode the profile\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 74, "score": 7.641026333381776 }, { "content": "\n\nimpl ToString for Sample {\n\n #[inline]\n\n fn to_string(&self) -> String {\n\n let mut ss: Vec<String> = vec![];\n\n let mut sv = String::new();\n\n\n\n for val in self.value.iter() {\n\n sv.push_str(format!(\" {:10}\", val).as_ref());\n\n }\n\n\n\n sv.push_str(\": \");\n\n\n\n for loc in self.location.iter() {\n\n sv.push_str(format!(\"{} \", loc.id).as_ref());\n\n }\n\n sv.drain((sv.len() - 1)..);\n\n\n\n ss.push(sv);\n\n let label_header = String::from(\" \");\n", "file_path": "crates/core/src/profile/sample.rs", "rank": 75, "score": 7.453175727894011 }, { "content": "use crate::profile::buffer::{decode_field, Buffer};\n\nuse crate::profile::mapping::Mapping;\n\nuse crate::profile::{function, line, Decoder};\n\n\n\n#[derive(Default, Debug, Clone, Eq, PartialEq)]\n\n// Describes function and line table debug information.\n\npub struct Location {\n\n // Unique nonzero id for the location. A profile could use\n\n // instruction addresses or any integer sequence as ids.\n\n pub id: u64,\n\n // The id of the corresponding profile.Mapping for this location.\n\n // It can be unset if the mapping is unknown or not applicable for\n\n // this profile type.\n\n pub mapping_index: u64,\n\n // The instruction address for this location, if available. It\n\n // should be within [Mapping.memory_start...Mapping.memory_limit]\n\n // for the corresponding mapping. A non-leaf address may be in the\n\n // middle of a call instruction. It is up to display tools to find\n\n // the beginning of the instruction if necessary.\n\n pub address: u64,\n", "file_path": "crates/core/src/profile/location.rs", "rank": 76, "score": 7.390129637104087 }, { "content": "use core::profile;\n\nuse core::profile::buffer::Decoder;\n\nuse criterion::{black_box, criterion_group, criterion_main, Criterion};\n\nuse std::io::Read;\n\nuse std::time::Duration;\n\n\n", "file_path": "crates/core/benches/profile_decode.rs", "rank": 77, "score": 7.2654973366383295 }, { "content": " }\n\n RockError::DecodeFieldFailed { reason } => {\n\n std::io::Error::new(std::io::ErrorKind::Other, reason)\n\n }\n\n RockError::ValidationFailed { reason } => {\n\n std::io::Error::new(std::io::ErrorKind::Other, reason)\n\n }\n\n RockError::Unknown { reason } => std::io::Error::new(std::io::ErrorKind::Other, reason),\n\n }\n\n }\n\n}\n", "file_path": "crates/core/src/profile/errors.rs", "rank": 78, "score": 7.0429768802072745 }, { "content": "use crate::profile::Profile;\n\nuse std::collections::HashMap;\n\n\n", "file_path": "crates/core/src/report/mod.rs", "rank": 79, "score": 6.954139378291073 }, { "content": " .push(filter_button(\n\n completed_button,\n\n \"Completed\",\n\n Filter::Completed,\n\n current_filter,\n\n )),\n\n )\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum Filter {\n\n All,\n\n Active,\n\n Completed,\n\n}\n\n\n\nimpl Default for Filter {\n\n fn default() -> Self {\n\n Filter::All\n", "file_path": "crates/gui/src/main.rs", "rank": 80, "score": 6.73475113606548 }, { "content": " }\n\n}\n\n\n\nimpl ToString for Location {\n\n #[inline]\n\n fn to_string(&self) -> String {\n\n let mut ss: Vec<String> = vec![];\n\n let mut loc_str = format!(\"{:6}: {:#x} \", self.id, self.address);\n\n\n\n match self.mapping {\n\n None => {}\n\n Some(ref mapping) => {\n\n loc_str.push_str(format!(\"M={} \", mapping.id).as_ref());\n\n }\n\n }\n\n\n\n if self.is_folder {\n\n loc_str.push_str(\"[F] \");\n\n }\n\n if self.line.is_empty() {\n", "file_path": "crates/core/src/profile/location.rs", "rank": 81, "score": 6.611263578905399 }, { "content": "\n\n let mut tests = vec![];\n\n\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"One sample, multiple keys, different specified units\"),\n\n tag_vals: tag_vals_init! {\"key1\" => 131_072, \"key2\" => 128},\n\n tag_units: tag_units_init!(\"key1\" => \"bytes\", \"key2\" => \"kilobytes\"),\n\n want_units: want_units_init!(\"key1\" => \"bytes\", \"key2\" => \"kilobytes\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"One sample, one key with one value, unit specified\"),\n\n tag_vals: tag_vals_init! {\"key1\" => 8},\n\n tag_units: tag_units_init!(\"key1\" => \"bytes\"),\n\n want_units: want_units_init!(\"key1\" => \"bytes\"),\n\n want_ignored_units: HashMap::new(),\n\n });\n\n tests.push(TagFilterTests {\n\n desc: String::from(\"One sample, one key with one value, empty unit specified\"),\n\n tag_vals: tag_vals_init! {\"key1\" => 8},\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 82, "score": 6.55280884313945 }, { "content": " pub fn decode_profile_field(&mut self, buf: &mut Buffer, data: &mut Vec<u8>) {\n\n match buf.field {\n\n // repeated ValueType sample_type = 1\n\n 1 => {\n\n self.sample_type\n\n .push(value_type::ValueType::decode(buf, data));\n\n }\n\n // repeated Sample sample = 2\n\n 2 => {\n\n let a = sample::Sample::decode(buf, data);\n\n self.sample.push(a);\n\n }\n\n // repeated Mapping mapping = 3\n\n 3 => {\n\n self.mapping.push(mapping::Mapping::decode(buf, data));\n\n }\n\n // repeated Location location = 4\n\n 4 => {\n\n self.location.push(location::Location::decode(buf, data));\n\n }\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 83, "score": 6.517489747427271 }, { "content": " // clone string w/o getting ownership, because we also use push in the for cycle below\n\n ss.push(loc_str.clone());\n\n }\n\n\n\n for (li, _) in self.line.iter().enumerate() {\n\n let mut ln_str = String::from(\"??\");\n\n\n\n let func = self.line[li].function.clone();\n\n // TODO better to use option\n\n if func != function::Function::default() {\n\n ln_str.clear();\n\n ln_str.push_str(&format!(\n\n \"{} {}:{} s={}\",\n\n func.name, func.filename, self.line[li].line, func.start_line\n\n ));\n\n\n\n if func.name != func.system_name {\n\n ln_str.push_str(&format!(\"({})\", func.system_name));\n\n }\n\n }\n", "file_path": "crates/core/src/profile/location.rs", "rank": 84, "score": 6.25654528141966 }, { "content": " match units {\n\n None => {\n\n label_string.push_str(&format!(\n\n \"{}:[{}]\",\n\n k,\n\n v.iter().map(|v| { v.to_string() }).collect::<String>()\n\n ));\n\n }\n\n Some(units) => {\n\n if units.len() == v.len() {\n\n let mut values = vec![];\n\n for _ in 0..v.len() {\n\n values.push(String::new());\n\n }\n\n //alignment:[3 kilobytes 4 kilobytes] bytes:[3 4] key1:[1 2] key2:[3 4] requests:[1 1 3 seconds 4 5 s]\n\n for (i, vv) in v.iter().enumerate() {\n\n values[i] = format!(\"{} {}\", vv, units[i]);\n\n }\n\n\n\n label_string.push_str(&format!(\n", "file_path": "crates/core/src/profile/sample.rs", "rank": 85, "score": 6.255817031050507 }, { "content": "# Rock \n\n\n\n![CI](https://github.com/spiral/rock/workflows/CI/badge.svg)\n\n\n\nParser for the golang `pprof` profile format with mimalloc (on Linux) allocator. Data passed to the `Rock` can be in the same `zip/pb.gz` archive\n\nas produces pprof (by default stored on Linux in `$HOME/pprof/...`)\n\n\n\nThis library can be used as intergration with http server (for example) to continuously parse profiles.\n\n\n\nTo do that, use:\n\n\n\n```rust\n\nBuffer::decode(&mut Vec<u8>) -> Result<Profile, RockError>\n\n```\n\n\n\n`Profile` will contain fully parsed pprof profile.\n", "file_path": "README.md", "rank": 86, "score": 6.196828264938937 }, { "content": " if label_index.num_unit_index != 0 {\n\n let unit =\n\n self.string_table[label_index.num_unit_index as usize].to_string();\n\n\n\n let num_len = num_labels.get(&key).unwrap_or(&Vec::<i64>::new()).len();\n\n let units_len = num_units.get(&key).unwrap_or(&Vec::<String>::new()).len();\n\n\n\n if num_len > units_len {\n\n match num_units.entry(key.clone()) {\n\n Entry::Occupied(mut e) => {\n\n e.get_mut().resize(num_len, String::new());\n\n }\n\n Entry::Vacant(e) => {\n\n let mut v: Vec<String> = Vec::new();\n\n for _ in 0..num_len - units_len {\n\n v.push(String::new());\n\n }\n\n e.insert(v);\n\n }\n\n }\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 87, "score": 6.142326141186912 }, { "content": " // Multiple line indicates this location has inlined functions,\n\n // where the last entry represents the caller into which the\n\n // preceding entries were inlined.\n\n //\n\n // E.g., if memcpy() is inlined into printf:\n\n // line[0].function_name == \"memcpy\"\n\n // line[1].function_name == \"printf\"\n\n pub line: Vec<line::Line>,\n\n // Provides an indication that multiple symbols map to this location's\n\n // address, for example due to identical code folding by the linker. In that\n\n // case the line information above represents one of the multiple\n\n // symbols. This field must be recomputed when the symbolization state of the\n\n // profile changes.\n\n pub is_folder: bool,\n\n //HELPER\n\n pub mapping: Option<Mapping>,\n\n}\n\n\n\nimpl Decoder<Location> for Location {\n\n fn decode(buf: &mut Buffer, data: &mut Vec<u8>) -> Location {\n", "file_path": "crates/core/src/profile/location.rs", "rank": 88, "score": 5.958128053799232 }, { "content": " }\n\n}\n\n\n\nimpl Decoder for Buffer {\n\n fn decode(data: &mut Vec<u8>) -> Result<Profile, RockError> {\n\n // check is there data gzipped\n\n // https://tools.ietf.org/html/rfc1952#page-5\n\n if data.len() > 2 && data[0] == 0x1f && data[1] == 0x8b {\n\n let mut uncompressed = vec![];\n\n let mut gz_decoder = GzDecoder::new(BufReader::new(data.as_slice()));\n\n let res = gz_decoder.read_to_end(&mut uncompressed);\n\n return match res {\n\n Ok(_) => {\n\n let mut b = Buffer {\n\n field: 0,\n\n // 2 Length-delimited -> string, bytes, embedded messages, packed repeated fields\n\n r#type: WireTypes::WireBytes,\n\n u64: 0,\n\n };\n\n\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 89, "score": 5.747909948930259 }, { "content": " }\n\n };\n\n );\n\n\n\n macro_rules! want_units_init (\n\n { $($key:expr => $value:expr),+ } => {\n\n {\n\n let mut m = ::std::collections::HashMap::new();\n\n $(\n\n m.insert($key.to_string(), $value.to_string());\n\n )+\n\n m\n\n }\n\n };\n\n );\n\n\n\n macro_rules! want_ignored_units_init (\n\n { $($key:expr => $value:expr),+ } => {\n\n {\n\n let mut m = ::std::collections::HashMap::new();\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 90, "score": 5.698986892164144 }, { "content": " }\n\n\n\n // SAMPLES SECTION START =================================\n\n ss.push(\"Samples:\".to_string());\n\n\n\n let mut samples = String::new();\n\n for s in self.sample_type.iter() {\n\n let dflt = if s.r#type == self.default_sample_type {\n\n String::from(\"[dflt]\")\n\n } else {\n\n String::new()\n\n };\n\n if samples.is_empty() {\n\n samples = format!(\"{}/{}{} \", s.r#type, s.unit, dflt);\n\n continue;\n\n }\n\n samples = format!(\"{}{}/{}{} \", samples, s.r#type, s.unit, dflt);\n\n }\n\n\n\n samples.drain((samples.len() - 1)..);\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 91, "score": 5.576337353808201 }, { "content": " };\n\n }\n\n Some(_) => {}\n\n }\n\n }\n\n\n\n // Copy ignored units into more readable format\n\n let mut units_ignored: HashMap<String, Vec<String>> = HashMap::new();\n\n\n\n for (key, values) in ignored_units.iter() {\n\n let mut units: Vec<String> = vec![String::new(); values.len()];\n\n\n\n for (i, value) in values.iter().enumerate() {\n\n let (unit, _) = value;\n\n units.insert(i as usize, String::from(unit));\n\n }\n\n\n\n units.sort();\n\n units_ignored.insert(String::from(key), units);\n\n }\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 92, "score": 5.519010624700867 }, { "content": "impl fmt::Display for RockError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n RockError::ValidationFailed { reason } => {\n\n write!(f, \"Profile validation failed, reason: {}\", reason)\n\n }\n\n RockError::Unknown { reason } => write!(f, \"Unknown error, reason: {}\", reason),\n\n RockError::ProfileUncompressFailed { reason } => {\n\n write!(f, \"Failed to read compressed data. Error: {}\", reason)\n\n }\n\n _ => panic!(\"Unknown type of error\"),\n\n }\n\n }\n\n}\n\n\n\nimpl From<RockError> for std::io::Error {\n\n fn from(r: RockError) -> Self {\n\n match r {\n\n RockError::ProfileUncompressFailed { reason } => {\n\n std::io::Error::new(std::io::ErrorKind::Other, reason)\n", "file_path": "crates/core/src/profile/errors.rs", "rank": 93, "score": 5.445363830777267 }, { "content": " sample: Vec<sample::Sample>,\n\n // Mapping from address ranges to the image/binary/library mapped\n\n // into that address range. mapping[0] will be the main binary.\n\n mapping: Vec<mapping::Mapping>,\n\n // Useful program location\n\n pub location: Vec<location::Location>,\n\n // Functions referenced by locations\n\n function: Vec<function::Function>,\n\n // A common table for strings referenced by various messages.\n\n // string_table[0] must always be \"\".\n\n string_table: Vec<String>,\n\n // frames with Function.function_name fully matching the following\n\n // regexp will be dropped from the samples, along with their successors.\n\n drop_frames: String,\n\n // Index into string table.\n\n // frames with Function.function_name fully matching the following\n\n // regexp will be kept, even if it matches drop_functions.\n\n keep_frames: String, // Index into string table.\n\n\n\n // The following fields are informational, do not affect\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 94, "score": 5.286094288378456 }, { "content": " Ok(vec![])\n\n }\n\n Err(err) => Err(err),\n\n },\n\n //1\n\n WireTypes::WireFixed64 => {\n\n if data.len() < 8 {\n\n return Err(RockError::DecodeFieldFailed {\n\n reason: \"data len less than 8 bytes\".to_string(),\n\n });\n\n }\n\n buf.u64 = decode_fixed64(&data[..8]);\n\n // drain first 8 elements\n\n data.drain(..8);\n\n Ok(vec![])\n\n }\n\n //2\n\n WireTypes::WireBytes => {\n\n match decode_varint(data) {\n\n Ok(varint) => {\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 95, "score": 5.24632140853646 }, { "content": " if varint > data.len() {\n\n return Err(RockError::DecodeFieldFailed {\n\n reason: \"too much data\".to_string(),\n\n });\n\n }\n\n // buf.data = Rc::new(RefCell::new(data.borrow_mut()[..varint].into()));\n\n buf_data = data[..varint].into();\n\n // draint vec, start index removing decoded data\n\n data.drain(..varint);\n\n Ok(buf_data)\n\n }\n\n Err(err) => Err(err),\n\n }\n\n }\n\n\n\n //5\n\n WireTypes::WireFixed32 => {\n\n if data.len() < 4 {\n\n return Err(RockError::DecodeFieldFailed {\n\n reason: \"data len less than 8 bytes\".to_string(),\n", "file_path": "crates/core/src/profile/buffer.rs", "rank": 96, "score": 5.228162559127384 }, { "content": " self.string_table[self.default_sample_type_index as usize].to_string();\n\n }\n\n\n\n #[inline]\n\n pub fn validate(&self) -> Result<(), RockError> {\n\n if self.sample_type.is_empty() && self.sample.is_empty() {\n\n panic!(\"missing sample type information\");\n\n }\n\n\n\n for s in self.sample.iter() {\n\n if *s == sample::Sample::default() {\n\n panic!(\"profile has default (uninitialized) sample\")\n\n }\n\n if s.value.len() != self.sample_type.len() {\n\n panic!(\n\n \"mismatch: sample has {} values vs. {} types\",\n\n s.value.len(),\n\n self.sample_type.len()\n\n );\n\n }\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 97, "score": 5.161275662998979 }, { "content": "\n\n for test in tests {\n\n let mut p = Profile::default();\n\n for (i, num_label) in test.tag_vals.iter().enumerate() {\n\n let mut s = Sample {\n\n num_label: num_label.clone(),\n\n ..Default::default()\n\n };\n\n\n\n if test.tag_units.is_empty() {\n\n s.num_unit_label = HashMap::new();\n\n } else {\n\n s.num_unit_label = test.tag_units[i].clone();\n\n }\n\n\n\n p.sample.push(s);\n\n }\n\n\n\n let (units, ignore_units) = p.num_label_units().unwrap();\n\n\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 98, "score": 4.871086082469029 }, { "content": " // interpretation of results.\n\n // Time of collection (UTC) represented as nanoseconds past the epoch.\n\n time_nanos: i64,\n\n // Duration of the profile, if a duration makes sense.\n\n duration_nanos: i64,\n\n // The kind of events between sampled ocurrences.\n\n // e.g [ \"cpu\",\"cycles\" ] or [ \"heap\",\"bytes\" ]\n\n period_type: Option<value_type::ValueType>,\n\n // The number of events between sampled occurrences.\n\n period: i64,\n\n // Freeform text associated to the profile.\n\n comments: Vec<String>,\n\n // Indices into string table.\n\n // Index into the string table of the type of the preferred sample\n\n // value. If unset, clients should default to the last sample value.\n\n default_sample_type: String,\n\n\n\n // Index into string table.\n\n comment_index: Vec<i64>,\n\n // Index into string table.\n\n drop_frames_index: i64,\n\n // Index into string table.\n\n keep_frames_index: i64,\n\n\n\n // Index into string table.\n\n default_sample_type_index: i64,\n\n}\n\n\n", "file_path": "crates/core/src/profile/mod.rs", "rank": 99, "score": 4.829809356951727 } ]
Rust
src/args.rs
anthraxx/dfrs
ea645d6a9d36510005a08dc6729884e1171cc068
#![allow(clippy::use_self)] use structopt::clap::{AppSettings, Shell}; use structopt::StructOpt; use std::io::stdout; use anyhow::Result; use lazy_static::lazy_static; use std::path::PathBuf; use strum::VariantNames; use strum_macros::{EnumString, EnumVariantNames, ToString}; #[derive(Debug, StructOpt)] #[structopt(about="Display file system space usage using graphs and colors.", global_settings = &[AppSettings::ColoredHelp, AppSettings::DeriveDisplayOrder])] pub struct Args { #[structopt(short = "a", group = "display_group", parse(from_occurrences))] pub display: u8, #[structopt(long, group = "display_group")] pub more: bool, #[structopt(long, group = "display_group")] pub all: bool, #[structopt(long, group = "color_group", possible_values=&ColorOpt::VARIANTS)] pub color: Option<ColorOpt>, #[structopt(short = "c", group = "color_group")] pub color_always: bool, #[structopt(short, long)] pub inodes: bool, #[structopt(short = "h", long = "human-readable", group = "number_format")] pub base2: bool, #[structopt(short = "H", long = "si", group = "number_format")] pub base10: bool, #[structopt(long)] pub total: bool, #[structopt(short, long)] pub local: bool, #[structopt(long)] pub no_aliases: bool, #[structopt(long, parse(from_os_str), default_value = "/proc/self/mounts")] pub mounts: PathBuf, #[structopt(short)] pub verbose: bool, #[structopt(parse(from_os_str))] pub paths: Vec<PathBuf>, #[structopt(long, use_delimiter = true, possible_values = &ColumnType::VARIANTS, default_value = &COLUMNS_OPT_DEFAULT_VALUE)] pub columns: Vec<ColumnType>, #[structopt(subcommand)] pub subcommand: Option<SubCommand>, } #[derive(Debug, StructOpt)] pub enum SubCommand { #[structopt(name = "completions")] Completions(Completions), } #[derive(Debug, StructOpt, ToString, EnumString, EnumVariantNames)] #[strum(serialize_all = "lowercase")] pub enum ColorOpt { Auto, Always, Never, } #[derive(Debug, StructOpt, EnumString)] #[strum(serialize_all = "lowercase")] pub enum DisplayFilter { Minimal, More, All, } impl DisplayFilter { pub const fn from_u8(n: u8) -> Self { match n { 0 => Self::Minimal, 1 => Self::More, _ => Self::All, } } pub fn get_mnt_fsname_filter(&self) -> Vec<&'static str> { match self { Self::Minimal => vec!["/dev*", "storage"], Self::More => vec!["dev", "run", "tmpfs", "/dev*", "storage"], Self::All => vec!["*"], } } } #[derive(Debug, StructOpt)] pub enum NumberFormat { Base10, Base2, } impl NumberFormat { pub const fn get_powers_of(&self) -> f64 { match self { Self::Base10 => 1000_f64, Self::Base2 => 1024_f64, } } } #[derive(Debug, StructOpt, ToString, EnumString, EnumVariantNames)] #[strum(serialize_all = "snake_case")] pub enum ColumnType { Filesystem, Type, Bar, Used, UsedPercentage, Available, AvailablePercentage, Capacity, MountedOn, } impl ColumnType { pub const fn label(&self, inodes_mode: bool) -> &str { match self { Self::Filesystem => "Filesystem", Self::Type => "Type", Self::Bar => "", Self::Used => "Used", Self::UsedPercentage => "Used%", Self::Available => "Avail", Self::AvailablePercentage => "Avail%", Self::Capacity => { if inodes_mode { "Inodes" } else { "Size" } } Self::MountedOn => "Mounted on", } } } lazy_static! { static ref COLUMNS_OPT_DEFAULT_VALUE: String = vec![ ColumnType::Filesystem, ColumnType::Type, ColumnType::Bar, ColumnType::UsedPercentage, ColumnType::Available, ColumnType::Used, ColumnType::Capacity, ColumnType::MountedOn ] .iter() .map(|e| e.to_string()) .collect::<Vec<String>>() .join(","); } #[derive(Debug, StructOpt)] pub struct Completions { #[structopt(possible_values=&Shell::variants())] pub shell: Shell, } pub fn gen_completions(args: &Completions) -> Result<()> { Args::clap().gen_completions_to("dfrs", args.shell, &mut stdout()); Ok(()) }
#![allow(clippy::use_self)] use structopt::clap::{AppSettings, Shell}; use structopt::StructOpt; use std::io::stdout; use anyhow::Result; use lazy_static::lazy_static; use std::path::PathBuf; use strum::VariantNames; use strum_macros::{EnumString, EnumVariantNames, ToString}; #[derive(Debug, StructOpt)] #[structopt(about="Display file system space usage using graphs and colors.", global_settings = &[AppSettings::ColoredHelp, AppSettings::DeriveDisplayOrder])] pub struct Args { #[structopt(short = "a", group = "display_group", parse(from_occurrences))] pub display: u8, #[structopt(long, group = "display_group")] pub more: bool, #[structopt(long, group = "display_group")] pub all: bool, #[structopt(long, group = "color_group", possible_values=&ColorOpt::VARIANTS)] pub color: Option<ColorOpt>, #[structopt(short = "c", group = "color_group")] pub color_always: bool, #[structopt(short, long)] pub inodes: bool, #[structopt(short = "h", long = "human-readable", group = "number_format")] pub base2: bool, #[structopt(short = "H", long = "si", group = "number_format")] pub base10: bool, #[structopt(long)] pub total: bool, #[structopt(short, long)] pub local: bool, #[structopt(long)] pub no_aliases: bool, #[structopt(long, parse(from_os_str), default_value = "/proc/self/mounts")] pub mounts: PathBuf, #[structopt(short)] pub verbose: bool, #[structopt(parse(from_os_str))] pub paths: Vec<PathBuf>, #[structopt(long, use_delimiter = true, possible_values = &ColumnType::VARIANTS, default_value = &COLUMNS_OPT_DEFAULT_VALUE)] pub columns: Vec<ColumnType>, #[structopt(subcommand)] pub subcommand: Option<SubCommand>, } #[derive(Debug, StructOpt)] pub enum SubCommand { #[structopt(name = "completions")] Completions(Completions), } #[derive(Debug, StructOpt, ToString, EnumString, EnumVariantNames)] #[strum(serialize_all = "lowercase")] pub enum ColorOpt { Auto, Always, Never, } #[derive(Debug, StructOpt, EnumString)] #[strum(serialize_all = "lowercase")] pub enum DisplayFilter { Minimal, More, All, } impl DisplayFilter { pub const fn from_u8(n: u8) -> Self { match n { 0 => Self::Minimal, 1 => Self::More, _ => Self::All, } } pub fn get_mnt_fsname_filter(&self) -> Vec<&'static str> { match self { Self::Minimal => vec!["/dev*", "storage"], Self::More => vec!["dev", "run", "tmpfs", "/dev*", "storage"], Self::All => vec!["*"], } } } #[derive(Debug, StructOpt)] pub enum NumberFormat { Base10, Base2, } impl NumberFormat { pub const fn get_powers_of(&self) -> f64 { m
ble_values=&Shell::variants())] pub shell: Shell, } pub fn gen_completions(args: &Completions) -> Result<()> { Args::clap().gen_completions_to("dfrs", args.shell, &mut stdout()); Ok(()) }
atch self { Self::Base10 => 1000_f64, Self::Base2 => 1024_f64, } } } #[derive(Debug, StructOpt, ToString, EnumString, EnumVariantNames)] #[strum(serialize_all = "snake_case")] pub enum ColumnType { Filesystem, Type, Bar, Used, UsedPercentage, Available, AvailablePercentage, Capacity, MountedOn, } impl ColumnType { pub const fn label(&self, inodes_mode: bool) -> &str { match self { Self::Filesystem => "Filesystem", Self::Type => "Type", Self::Bar => "", Self::Used => "Used", Self::UsedPercentage => "Used%", Self::Available => "Avail", Self::AvailablePercentage => "Avail%", Self::Capacity => { if inodes_mode { "Inodes" } else { "Size" } } Self::MountedOn => "Mounted on", } } } lazy_static! { static ref COLUMNS_OPT_DEFAULT_VALUE: String = vec![ ColumnType::Filesystem, ColumnType::Type, ColumnType::Bar, ColumnType::UsedPercentage, ColumnType::Available, ColumnType::Used, ColumnType::Capacity, ColumnType::MountedOn ] .iter() .map(|e| e.to_string()) .collect::<Vec<String>>() .join(","); } #[derive(Debug, StructOpt)] pub struct Completions { #[structopt(possi
random
[ { "content": "pub fn parse_mounts(f: File) -> Result<Vec<Mount>> {\n\n BufReader::new(f)\n\n .lines()\n\n .map(|line| {\n\n parse_mount_line(&line?)\n\n .context(\"Failed to parse mount line\")\n\n .map_err(Error::from)\n\n })\n\n .collect::<Result<Vec<_>>>()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_mounts() {\n\n let file = r#\"sysfs /sys sysfs rw,nosuid,nodev,noexec,relatime 0 0\n\nproc /proc proc rw,nosuid,nodev,noexec,relatime,hidepid=2 0 0\n\nudev /dev devtmpfs rw,nosuid,relatime,size=2009144k,nr_inodes=502286,mode=755 0 0\n", "file_path": "src/mount.rs", "rank": 0, "score": 140442.2174440783 }, { "content": "#[inline]\n\npub fn mnt_matches_filter(mnt: &Mount, filter: &str) -> bool {\n\n filter.strip_suffix('*').map_or_else(\n\n || mnt.mnt_fsname == filter,\n\n |start| mnt.mnt_fsname.starts_with(start),\n\n )\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 1, "score": 137241.43366722608 }, { "content": "#[inline]\n\npub fn calculate_path_match_score(path: &Path, mnt: &Mount) -> usize {\n\n if path.starts_with(&mnt.mnt_dir) {\n\n mnt.mnt_dir.len()\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 2, "score": 125651.55807442563 }, { "content": "#[inline]\n\npub fn get_best_mount_match<'a>(path: &Path, mnts: &'a [Mount]) -> Option<&'a Mount> {\n\n let scores = mnts\n\n .iter()\n\n .map(|mnt| (calculate_path_match_score(path, mnt), mnt));\n\n let best = scores.max_by_key(|x| x.0)?;\n\n Some(best.1)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 3, "score": 124362.25342601852 }, { "content": "#[inline]\n\npub fn calc_total(mnts: &[Mount]) -> Mount {\n\n let mut total = Mount::named(\"total\".to_string());\n\n\n\n total.free = mnts.iter().map(|mnt| mnt.free).sum();\n\n total.used = mnts.iter().map(|mnt| mnt.used).sum();\n\n total.capacity = mnts.iter().map(|mnt| mnt.capacity).sum();\n\n\n\n total\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 4, "score": 111176.46466451784 }, { "content": "fn parse_mount_line(line: &str) -> Result<Mount> {\n\n let mut mnt_a = line.split_whitespace();\n\n Ok(Mount::new(\n\n mnt_a\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Missing value fsname\"))?\n\n .into(),\n\n mnt_a\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Missing value dir\"))?\n\n .into(),\n\n mnt_a\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Missing value type\"))?\n\n .into(),\n\n mnt_a\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Missing value opts\"))?\n\n .into(),\n\n mnt_a\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Missing value freq\"))?\n\n .parse::<i32>()?,\n\n mnt_a\n\n .next()\n\n .ok_or_else(|| anyhow!(\"Missing value passno\"))?\n\n .parse::<i32>()?,\n\n ))\n\n}\n\n\n", "file_path": "src/mount.rs", "rank": 6, "score": 92223.21221182594 }, { "content": "pub fn format_count(num: f64, delimiter: f64) -> String {\n\n let units = [\"B\", \"k\", \"M\", \"G\", \"T\", \"P\", \"E\", \"Z\", \"Y\"];\n\n if num < 1_f64 {\n\n return format!(\"{}\", num);\n\n }\n\n let exponent = cmp::min(num.log(delimiter).floor() as i32, (units.len() - 1) as i32);\n\n let pretty_bytes = format!(\"{:.*}\", 1, num / delimiter.powi(exponent));\n\n let unit = units[exponent as usize];\n\n format!(\"{}{}\", pretty_bytes, unit)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 7, "score": 87851.69009806769 }, { "content": "fn run(args: Args) -> Result<()> {\n\n if let Some(color) = args.color {\n\n debug!(\"Bypass tty detection for colors: {:?}\", color);\n\n match color {\n\n ColorOpt::Auto => {}\n\n ColorOpt::Always => {\n\n colored::control::set_override(true);\n\n }\n\n ColorOpt::Never => {\n\n colored::control::set_override(false);\n\n }\n\n }\n\n }\n\n\n\n if args.color_always {\n\n debug!(\"Bypass tty detection for colors: always\");\n\n colored::control::set_override(true);\n\n }\n\n\n\n match args.subcommand {\n", "file_path": "src/main.rs", "rank": 8, "score": 86819.61135853609 }, { "content": "#[inline]\n\nfn column_width<F>(mnt: &[Mount], f: F, heading: &str) -> usize\n\nwhere\n\n F: Fn(&Mount) -> usize,\n\n{\n\n mnt.iter()\n\n .map(f)\n\n .chain(std::iter::once(heading.len()))\n\n .max()\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 83227.14842981033 }, { "content": "#[inline]\n\npub fn cmp_by_capacity_and_dir_name(a: &Mount, b: &Mount) -> cmp::Ordering {\n\n u64::min(1, a.capacity)\n\n .cmp(&u64::min(1, b.capacity))\n\n .reverse()\n\n .then(a.mnt_dir.cmp(&b.mnt_dir))\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 10, "score": 81039.79139538214 }, { "content": "pub fn lvm_alias(device: &str) -> Option<String> {\n\n if !device.starts_with(\"/dev/mapper/\") {\n\n return None;\n\n }\n\n let device = &device[\"/dev/mapper/\".len()..].replace(\"--\", \"$$\");\n\n if !device.contains('-') {\n\n return None;\n\n }\n\n let mut it = device.splitn(2, '-');\n\n let vg = it.next().unwrap_or(\"\");\n\n let lv = it.next().unwrap_or(\"\");\n\n Some(format!(\"/dev/{}/{}\", vg, lv).replace(\"$$\", \"-\"))\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 11, "score": 78722.95355630518 }, { "content": "fn display_mounts(\n\n mnts: &[Mount],\n\n theme: &Theme,\n\n delimiter: &NumberFormat,\n\n inodes_mode: bool,\n\n no_aliases: bool,\n\n) {\n\n let color_heading = theme.color_heading.unwrap_or(Color::White);\n\n\n\n let fsname_func = if no_aliases {\n\n Mount::fsname\n\n } else {\n\n Mount::fsname_aliased\n\n };\n\n\n\n let fsname_width = column_width(\n\n mnts,\n\n |m| fsname_func(m).len(),\n\n ColumnType::Filesystem.label(inodes_mode),\n\n );\n", "file_path": "src/main.rs", "rank": 12, "score": 75397.26447987405 }, { "content": "#[inline]\n\npub fn try_print(args: fmt::Arguments) -> io::Result<()> {\n\n stdout().write_fmt(args)\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! try_print {\n\n ($($arg:tt)*) => ($crate::try_print(format_args!($($arg)*)));\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! try_println {\n\n ($fmt:expr) => (try_print!(concat!($fmt, \"\\n\")));\n\n ($fmt:expr, $($arg:tt)*) => (try_print!(concat!($fmt, \"\\n\"), $($arg)*));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::path::PathBuf;\n\n\n", "file_path": "src/util.rs", "rank": 13, "score": 62647.544121324245 }, { "content": "fn get_mounts(\n\n mounts_to_show: &DisplayFilter,\n\n show_inodes: bool,\n\n paths: &[PathBuf],\n\n mounts: &Path,\n\n local_only: bool,\n\n) -> Result<Vec<Mount>> {\n\n let f = File::open(mounts)?;\n\n\n\n let mut mnts = parse_mounts(f)?;\n\n mnts.retain(|mount| {\n\n mounts_to_show\n\n .get_mnt_fsname_filter()\n\n .iter()\n\n .any(|fsname| util::mnt_matches_filter(mount, fsname))\n\n });\n\n if local_only {\n\n mnts.retain(Mount::is_local);\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 14, "score": 54649.72377349377 }, { "content": "#[inline]\n\npub fn format_percentage(percentage: Option<f32>) -> String {\n\n match percentage {\n\n Some(percentage) => format!(\n\n \"{:>5.1}{}\",\n\n (percentage * 10.0).round() / 10.0,\n\n \"%\".color(Color::White)\n\n ),\n\n None => format!(\"{:>6}\", \"-\"),\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 15, "score": 50913.553803715404 }, { "content": "pub fn bar(width: usize, percentage: Option<f32>, theme: &Theme) -> String {\n\n let fill_len_total = (percentage.unwrap_or(0.0) as f32 / 100.0 * width as f32).ceil() as usize;\n\n let fill_len_low = std::cmp::min(\n\n fill_len_total,\n\n (width as f32 * theme.threshold_usage_medium / 100.0).ceil() as usize,\n\n );\n\n let fill_len_medium = std::cmp::min(\n\n fill_len_total,\n\n (width as f32 * theme.threshold_usage_high / 100.0).ceil() as usize,\n\n ) - fill_len_low;\n\n let fill_len_high = fill_len_total - fill_len_low - fill_len_medium;\n\n\n\n let color_empty = match percentage {\n\n Some(_) => theme.color_usage_low,\n\n None => theme.color_usage_void,\n\n }\n\n .unwrap_or(Color::Green);\n\n\n\n let fill_low = theme\n\n .char_bar_filled\n", "file_path": "src/util.rs", "rank": 16, "score": 42313.30889980866 }, { "content": "fn main() {\n\n let args = Args::from_args();\n\n\n\n let logging = if args.verbose { \"debug\" } else { \"info\" };\n\n\n\n env_logger::init_from_env(Env::default().default_filter_or(logging));\n\n\n\n if let Err(err) = run(args) {\n\n eprintln!(\"Error: {}\", err);\n\n for cause in err.chain().skip(1) {\n\n eprintln!(\"Because: {}\", cause);\n\n }\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 17, "score": 36630.73633116248 }, { "content": "\n\n pub fn usage_color(&self, theme: &Theme) -> Color {\n\n match &self.used_percentage() {\n\n Some(p) if p >= &theme.threshold_usage_high => &theme.color_usage_high,\n\n Some(p) if p >= &theme.threshold_usage_medium => &theme.color_usage_medium,\n\n Some(_) => &theme.color_usage_low,\n\n _ => &theme.color_usage_void,\n\n }\n\n .unwrap_or(Color::White)\n\n }\n\n\n\n #[inline]\n\n pub fn is_local(&self) -> bool {\n\n !self.is_remote()\n\n }\n\n\n\n pub fn is_remote(&self) -> bool {\n\n [\n\n \"afs\",\n\n \"cifs\",\n", "file_path": "src/mount.rs", "rank": 27, "score": 21969.79191859624 }, { "content": "use crate::errors::*;\n\n\n\nuse crate::args::NumberFormat;\n\nuse crate::theme::Theme;\n\nuse crate::util::{format_count, lvm_alias};\n\n\n\nuse colored::Color;\n\nuse std::fs::File;\n\nuse std::io::BufRead;\n\nuse std::io::BufReader;\n\n\n\n#[derive(Clone)]\n\npub struct Mount {\n\n pub mnt_fsname: String,\n\n pub mnt_dir: String,\n\n pub mnt_type: String,\n\n pub mnt_opts: String,\n\n pub mnt_freq: i32,\n\n pub mnt_passno: i32,\n\n pub capacity: u64,\n", "file_path": "src/mount.rs", "rank": 28, "score": 21965.53140039474 }, { "content": " pub free: u64,\n\n pub used: u64,\n\n pub statfs: Option<nix::sys::statfs::Statfs>,\n\n}\n\n\n\nimpl Mount {\n\n pub fn fsname(&self) -> String {\n\n self.mnt_fsname.clone()\n\n }\n\n\n\n pub fn fsname_aliased(&self) -> String {\n\n let lvm = lvm_alias(&self.mnt_fsname);\n\n lvm.unwrap_or_else(|| self.mnt_fsname.clone())\n\n }\n\n\n\n pub fn used_percentage(&self) -> Option<f32> {\n\n match self.capacity {\n\n 0 => None,\n\n _ => Some(100.0 - self.free as f32 * 100.0 / self.capacity as f32),\n\n }\n", "file_path": "src/mount.rs", "rank": 29, "score": 21963.51406532633 }, { "content": " }\n\n\n\n pub fn free_percentage(&self) -> Option<f32> {\n\n match self.free {\n\n 0 => None,\n\n _ => Some(self.free as f32 * 100.0 / self.capacity as f32),\n\n }\n\n }\n\n\n\n pub fn capacity_formatted(&self, delimiter: &NumberFormat) -> String {\n\n format_count(self.capacity as f64, delimiter.get_powers_of())\n\n }\n\n\n\n pub fn free_formatted(&self, delimiter: &NumberFormat) -> String {\n\n format_count(self.free as f64, delimiter.get_powers_of())\n\n }\n\n\n\n pub fn used_formatted(&self, delimiter: &NumberFormat) -> String {\n\n format_count(self.used as f64, delimiter.get_powers_of())\n\n }\n", "file_path": "src/mount.rs", "rank": 30, "score": 21962.630664310323 }, { "content": "devpts /dev/pts devpts rw,nosuid,noexec,relatime,gid=5,mode=620,ptmxmode=000 0 0\n\ntmpfs /run tmpfs rw,nosuid,noexec,relatime,size=402800k,mode=755 0 0\n\n/dev/mapper/vg0-root / ext4 rw,relatime,errors=remount-ro 0 0\n\ntmpfs /run/lock tmpfs rw,nosuid,nodev,noexec,relatime,size=5120k 0 0\n\npstore /sys/fs/pstore pstore rw,relatime 0 0\n\nconfigfs /sys/kernel/config configfs rw,relatime 0 0\n\ntmpfs /run/shm tmpfs rw,nosuid,nodev,noexec,relatime,size=805580k 0 0\n\n/dev/mapper/vg0-boot /boot ext4 rw,relatime 0 0\n\n/dev/mapper/vg0-tmp /tmp ext4 rw,relatime 0 0\n\nnone /cgroup2 cgroup2 rw,relatime 0 0\n\n\"#;\n\n let mounts = file\n\n .lines()\n\n .map(|line| {\n\n parse_mount_line(&line)\n\n .context(\"Failed to parse mount line\")\n\n .map_err(Error::from)\n\n })\n\n .collect::<Result<Vec<_>>>()\n\n .unwrap();\n", "file_path": "src/mount.rs", "rank": 31, "score": 21961.0012181913 }, { "content": " \"coda\",\n\n \"ftpfs\",\n\n \"fuse.sshfs\",\n\n \"mfs\",\n\n \"ncpfs\",\n\n \"nfs\",\n\n \"nfs4\",\n\n \"smbfs\",\n\n \"sshfs\",\n\n ]\n\n .contains(&self.mnt_type.as_str())\n\n }\n\n\n\n pub fn named(name: String) -> Self {\n\n Self::new(name, \"-\".to_string(), \"-\".to_string(), \"\".to_string(), 0, 0)\n\n }\n\n\n\n const fn new(\n\n mnt_fsname: String,\n\n mnt_dir: String,\n", "file_path": "src/mount.rs", "rank": 32, "score": 21960.174014893724 }, { "content": " assert_eq!(mounts.len(), 13);\n\n\n\n // nix::sys::statfs::Statfs doesn't have PartialEq\n\n let mnt = &mounts[0];\n\n assert_eq!(mnt.mnt_fsname.as_str(), \"sysfs\");\n\n assert_eq!(mnt.mnt_dir.as_str(), \"/sys\");\n\n assert_eq!(mnt.mnt_type.as_str(), \"sysfs\");\n\n assert_eq!(mnt.mnt_opts.as_str(), \"rw,nosuid,nodev,noexec,relatime\");\n\n assert_eq!(mnt.mnt_freq, 0);\n\n assert_eq!(mnt.mnt_passno, 0);\n\n assert_eq!(mnt.capacity, 0);\n\n assert_eq!(mnt.free, 0);\n\n assert_eq!(mnt.used, 0);\n\n assert!(mnt.statfs.is_none());\n\n }\n\n\n\n #[test]\n\n fn is_remote() {\n\n let mut mnt = Mount::named(\"foo\".into());\n\n mnt.mnt_type = String::from(\"nfs\");\n", "file_path": "src/mount.rs", "rank": 33, "score": 21959.43118447321 }, { "content": " assert!(mnt.is_remote());\n\n }\n\n\n\n #[test]\n\n fn is_local() {\n\n let mut mnt = Mount::named(\"foo\".into());\n\n mnt.mnt_type = String::from(\"btrfs\");\n\n assert!(mnt.is_local());\n\n }\n\n}\n", "file_path": "src/mount.rs", "rank": 34, "score": 21959.02111539555 }, { "content": " mnt_type: String,\n\n mnt_opts: String,\n\n mnt_freq: i32,\n\n mnt_passno: i32,\n\n ) -> Self {\n\n Self {\n\n mnt_fsname,\n\n mnt_dir,\n\n mnt_type,\n\n mnt_opts,\n\n mnt_freq,\n\n mnt_passno,\n\n capacity: 0,\n\n free: 0,\n\n used: 0,\n\n statfs: None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mount.rs", "rank": 35, "score": 21958.318808792 }, { "content": " Some(SubCommand::Completions(completions)) => args::gen_completions(&completions)?,\n\n _ => {\n\n let mut theme = Theme::new();\n\n theme.columns = args.columns;\n\n\n\n let delimiter = if args.base10 {\n\n NumberFormat::Base10\n\n } else {\n\n NumberFormat::Base2\n\n };\n\n let mounts_to_show = if args.all {\n\n DisplayFilter::All\n\n } else if args.more {\n\n DisplayFilter::More\n\n } else {\n\n DisplayFilter::from_u8(args.display)\n\n };\n\n\n\n let mut mnts = get_mounts(\n\n &mounts_to_show,\n", "file_path": "src/main.rs", "rank": 36, "score": 19.307246547075525 }, { "content": " args.inodes,\n\n &args.paths,\n\n &args.mounts,\n\n args.local,\n\n )?;\n\n if args.total {\n\n mnts.push(util::calc_total(&mnts));\n\n }\n\n display_mounts(&mnts, &theme, &delimiter, args.inodes, args.no_aliases);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 37, "score": 17.290840335472975 }, { "content": "use crate::args::ColumnType;\n\nuse colored::*;\n\n\n\npub struct Theme {\n\n pub char_bar_filled: char,\n\n pub char_bar_empty: char,\n\n pub char_bar_open: String,\n\n pub char_bar_close: String,\n\n pub threshold_usage_medium: f32,\n\n pub threshold_usage_high: f32,\n\n pub color_heading: Option<Color>,\n\n pub color_usage_low: Option<Color>,\n\n pub color_usage_medium: Option<Color>,\n\n pub color_usage_high: Option<Color>,\n\n pub color_usage_void: Option<Color>,\n\n pub bar_width: usize,\n\n pub columns: Vec<ColumnType>,\n\n}\n\n\n\nimpl Theme {\n", "file_path": "src/theme.rs", "rank": 38, "score": 16.38950430267835 }, { "content": " pub fn new() -> Self {\n\n Self {\n\n char_bar_filled: named_char::HEAVY_BOX,\n\n char_bar_empty: named_char::HEAVY_DOUBLE_DASH,\n\n char_bar_open: \"\".to_string(),\n\n char_bar_close: \"\".to_string(),\n\n threshold_usage_medium: 50.0,\n\n threshold_usage_high: 75.0,\n\n color_heading: Some(Color::Blue),\n\n color_usage_low: Some(Color::Green),\n\n color_usage_medium: Some(Color::Yellow),\n\n color_usage_high: Some(Color::Red),\n\n color_usage_void: Some(Color::Blue),\n\n bar_width: 20,\n\n columns: vec![\n\n ColumnType::Filesystem,\n\n ColumnType::Type,\n\n ColumnType::Bar,\n\n ColumnType::UsedPercentage,\n\n ColumnType::Available,\n", "file_path": "src/theme.rs", "rank": 39, "score": 13.811136114671818 }, { "content": "\n\n let used_percentage = format_percentage(mnt.used_percentage()).color(usage_color);\n\n let available_percentage = format_percentage(mnt.free_percentage()).color(usage_color);\n\n\n\n line.clear();\n\n for column in &theme.columns {\n\n match column {\n\n ColumnType::Filesystem => {\n\n line.push_str(\n\n format!(\"{:<width$} \", fsname_func(mnt), width = fsname_width).as_str(),\n\n );\n\n }\n\n ColumnType::Type => {\n\n line.push_str(\n\n format!(\"{:<width$} \", mnt.mnt_type, width = type_width).as_str(),\n\n );\n\n }\n\n ColumnType::Bar => {\n\n line.push_str(\n\n format!(\n", "file_path": "src/main.rs", "rank": 40, "score": 13.28589091188795 }, { "content": "#![deny(clippy::nursery, clippy::cargo)]\n\nuse args::*;\n\nmod args;\n\n\n\nmod errors;\n\nuse errors::*;\n\n\n\nmod theme;\n\nuse theme::Theme;\n\n\n\nmod mount;\n\nuse mount::*;\n\n\n\nmod util;\n\nuse util::bar;\n\nuse util::try_print;\n\n\n\nuse std::fs::File;\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n", "file_path": "src/main.rs", "rank": 41, "score": 12.073434549946732 }, { "content": " \"{:<width$} \",\n\n bar(theme.bar_width, mnt.used_percentage(), theme),\n\n width = theme.bar_width\n\n )\n\n .as_str(),\n\n );\n\n }\n\n ColumnType::Used => {\n\n line.push_str(\n\n format!(\n\n \"{:>width$} \",\n\n mnt.used_formatted(delimiter).color(usage_color),\n\n width = used_width\n\n )\n\n .as_str(),\n\n );\n\n }\n\n ColumnType::UsedPercentage => {\n\n line.push_str(format!(\"{} \", used_percentage).as_str());\n\n }\n", "file_path": "src/main.rs", "rank": 42, "score": 11.89391473640539 }, { "content": " ColumnType::Available => {\n\n line.push_str(print_heading_right_func(column, available_width).as_str());\n\n }\n\n ColumnType::AvailablePercentage => {\n\n line.push_str(print_heading_right_func(column, 6).as_str());\n\n }\n\n ColumnType::Capacity => {\n\n line.push_str(print_heading_right_func(column, capacity_width).as_str());\n\n }\n\n ColumnType::MountedOn => {\n\n line.push_str(print_heading_left_func(column, mounted_width).as_str());\n\n }\n\n }\n\n }\n\n if try_println!(\"{}\", line.trim_end()).is_err() {\n\n return;\n\n }\n\n\n\n for mnt in mnts {\n\n let usage_color = mnt.usage_color(theme);\n", "file_path": "src/main.rs", "rank": 43, "score": 11.604157474238939 }, { "content": " ColumnType::Used,\n\n ColumnType::Capacity,\n\n ColumnType::MountedOn,\n\n ],\n\n }\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\npub mod named_char {\n\n pub const SPACE: char = ' ';\n\n pub const EQUAL: char = '=';\n\n pub const HASHTAG: char = '#';\n\n pub const ASTERISK: char = '*';\n\n pub const LIGHT_BOX: char = '■';\n\n pub const HEAVY_BOX: char = '▇';\n\n pub const PERIOD: char = '.';\n\n pub const DASH: char = '-';\n\n pub const LONG_DASH: char = '—';\n\n pub const LIGHT_HORIZONTAL: char = '─';\n", "file_path": "src/theme.rs", "rank": 44, "score": 11.531601999929013 }, { "content": "# dfrs\n\n\n\n[![Build Status](https://img.shields.io/github/workflow/status/anthraxx/dfrs/CI)](https://github.com/anthraxx/dfrs/actions) [![Latest release](https://img.shields.io/github/v/release/anthraxx/dfrs)](https://github.com/anthraxx/dfrs/releases) [![crates.io version](https://img.shields.io/crates/v/dfrs.svg)](https://crates.io/crates/dfrs) [![License](https://img.shields.io/github/license/anthraxx/dfrs)](https://github.com/anthraxx/dfrs/blob/main/LICENSE)\n\n\n\nDisplay file system space usage using graphs and colors\n\n\n\n![](contrib/screenshot.png)\n\n\n\n*dfrs* displays the amount of disk space available on the file system\n\ncontaining each file name argument. If no file name is given, the space\n\navailable on all currently mounted file systems is shown.\n\n\n\n*dfrs*(1) is a tool similar to *df*(1) except that it is able to show a graph\n\nalong with the data and is able to use colors.\n\n\n\nWithout any argument, size is displayed in human-readable format.\n\n\n\n## Installation\n\n\n\n<a href=\"https://repology.org/project/dfrs/versions\"><img align=\"right\" src=\"https://repology.org/badge/vertical-allrepos/dfrs.svg\" alt=\"Packaging status\"></a>\n\n\n\n cargo install dfrs\n\n\n\n### Arch Linux\n\n\n\n pacman -S dfrs\n\n\n\n### Debian sid/bullseye\n\n\n\n apt install dfrs\n\n\n\n### Alpine\n\n\n\n apk add dfrs\n\n\n\n## License\n\n\n\nMIT\n", "file_path": "README.md", "rank": 45, "score": 11.459969767126221 }, { "content": " ColumnType::Available => {\n\n line.push_str(\n\n format!(\n\n \"{:>width$} \",\n\n mnt.free_formatted(delimiter).color(usage_color),\n\n width = available_width\n\n )\n\n .as_str(),\n\n );\n\n }\n\n ColumnType::AvailablePercentage => {\n\n line.push_str(format!(\"{} \", available_percentage).as_str());\n\n }\n\n ColumnType::Capacity => {\n\n line.push_str(\n\n format!(\n\n \"{:>width$} \",\n\n mnt.capacity_formatted(delimiter).color(usage_color),\n\n width = capacity_width\n\n )\n", "file_path": "src/main.rs", "rank": 46, "score": 10.410635207341727 }, { "content": " }\n\n\n\n if !paths.is_empty() {\n\n let mut out = Vec::new();\n\n for path in paths {\n\n let path = match path.canonicalize() {\n\n Ok(path) => path,\n\n Err(err) => {\n\n eprintln!(\"dfrs: {}: {}\", path.display(), err);\n\n continue;\n\n }\n\n };\n\n\n\n if let Some(mnt) = util::get_best_mount_match(&path, &mnts) {\n\n out.push(mnt.clone());\n\n }\n\n }\n\n return Ok(out);\n\n }\n\n\n\n mnts.sort_by(util::cmp_by_capacity_and_dir_name);\n\n Ok(mnts)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 47, "score": 10.378951061523942 }, { "content": "use crate::mount::Mount;\n\nuse crate::theme::Theme;\n\n\n\nuse colored::*;\n\nuse std::cmp;\n\nuse std::fmt;\n\nuse std::io::{self, stdout, Write};\n\nuse std::path::Path;\n\n\n", "file_path": "src/util.rs", "rank": 48, "score": 10.210055628302435 }, { "content": " let mounted_width = column_width(\n\n mnts,\n\n |m| m.mnt_dir.len(),\n\n ColumnType::MountedOn.label(inodes_mode),\n\n );\n\n\n\n let print_heading_left_func = |column: &ColumnType, width: usize| -> String {\n\n format!(\n\n \"{:<width$} \",\n\n column.label(inodes_mode).color(color_heading),\n\n width = width,\n\n )\n\n };\n\n\n\n let print_heading_right_func = |column: &ColumnType, width: usize| -> String {\n\n format!(\n\n \"{:>width$} \",\n\n column.label(inodes_mode).color(color_heading),\n\n width = width,\n\n )\n", "file_path": "src/main.rs", "rank": 49, "score": 10.135701968084192 }, { "content": " };\n\n\n\n let mut line = String::new();\n\n for column in &theme.columns {\n\n match column {\n\n ColumnType::Filesystem => {\n\n line.push_str(print_heading_left_func(column, fsname_width).as_str());\n\n }\n\n ColumnType::Type => {\n\n line.push_str(print_heading_left_func(column, type_width).as_str());\n\n }\n\n ColumnType::Bar => {\n\n line.push_str(print_heading_left_func(column, theme.bar_width).as_str());\n\n }\n\n ColumnType::Used => {\n\n line.push_str(print_heading_right_func(column, used_width).as_str());\n\n }\n\n ColumnType::UsedPercentage => {\n\n line.push_str(print_heading_right_func(column, 6).as_str());\n\n }\n", "file_path": "src/main.rs", "rank": 50, "score": 9.567352785431567 }, { "content": "\n\nuse nix::sys::statfs;\n\n\n\nuse env_logger::Env;\n\n\n\nuse crate::mount::Mount;\n\nuse crate::util::format_percentage;\n\nuse anyhow::Result;\n\nuse colored::*;\n\nuse std::io::{stdout, Write};\n\nuse structopt::StructOpt;\n\n\n\n#[inline]\n", "file_path": "src/main.rs", "rank": 51, "score": 9.046619643213836 }, { "content": " let s = lvm_alias(\"/dev/mapper/crypto-----foo\");\n\n assert_eq!(s, Some(\"/dev/crypto--/foo\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn get_best_mount_match_simple() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.mnt_dir = \"/a\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.mnt_dir = \"/a/b\".to_string();\n\n let mut mnt3 = Mount::named(\"fizz\".into());\n\n mnt3.mnt_dir = \"/a/b/c\".to_string();\n\n let mut mnt4 = Mount::named(\"buzz\".into());\n\n mnt4.mnt_dir = \"/a/b/c/d\".to_string();\n\n\n\n let mnts = &[mnt1, mnt2, mnt3, mnt4];\n\n let matched = get_best_mount_match(&PathBuf::from(\"/a/b/c\"), mnts).unwrap();\n\n assert_eq!(matched.mnt_dir, \"/a/b/c\");\n\n }\n\n\n", "file_path": "src/util.rs", "rank": 52, "score": 8.1843564140356 }, { "content": " let type_width = column_width(\n\n mnts,\n\n |m| m.mnt_type.len(),\n\n ColumnType::Type.label(inodes_mode),\n\n );\n\n let available_width = column_width(\n\n mnts,\n\n |m| m.free_formatted(delimiter).len(),\n\n ColumnType::Available.label(inodes_mode),\n\n );\n\n let used_width = column_width(\n\n mnts,\n\n |m| m.used_formatted(delimiter).len(),\n\n ColumnType::Used.label(inodes_mode),\n\n );\n\n let capacity_width = column_width(\n\n mnts,\n\n |m| m.capacity_formatted(delimiter).len(),\n\n ColumnType::Capacity.label(inodes_mode),\n\n );\n", "file_path": "src/main.rs", "rank": 53, "score": 8.077281290249879 }, { "content": " mnt1.capacity = 123 + 456;\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.free = 678;\n\n mnt2.used = 9123;\n\n mnt2.capacity = 678 + 9123;\n\n let mut mnt3 = Mount::named(\"fizz\".into());\n\n mnt3.free = 4567;\n\n mnt3.used = 0;\n\n mnt3.capacity = 4567;\n\n let mut mnt4 = Mount::named(\"buzz\".into());\n\n mnt4.free = 0;\n\n mnt4.used = 890123;\n\n mnt4.capacity = 890123;\n\n\n\n let total = calc_total(&[mnt1, mnt2, mnt3, mnt4]);\n\n assert_eq!(total.mnt_fsname, \"total\");\n\n assert_eq!(total.free, 5368);\n\n assert_eq!(total.used, 899702);\n\n assert_eq!(total.capacity, 5368 + 899702);\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 54, "score": 7.784141145146973 }, { "content": " .as_str(),\n\n );\n\n }\n\n ColumnType::MountedOn => {\n\n line.push_str(\n\n format!(\"{:<width$} \", mnt.mnt_dir, width = mounted_width).as_str(),\n\n );\n\n }\n\n }\n\n }\n\n if try_println!(\"{}\", line.trim_end()).is_err() {\n\n return;\n\n }\n\n }\n\n if stdout().flush().is_err() {}\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 55, "score": 7.400667644543017 }, { "content": " #[test]\n\n fn calculate_path_match_score_simple() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.mnt_dir = \"/a/s/d\".to_string();\n\n let score = calculate_path_match_score(&PathBuf::from(\"/a/s/d/f\"), &mnt1);\n\n assert_eq!(score, 6);\n\n }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_equal() {\n\n let mnt1 = Mount::named(\"foo\".into());\n\n let mnt2 = Mount::named(\"bar\".into());\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n\n assert_eq!(ord, cmp::Ordering::Equal);\n\n }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_greater_capacity_greater_name() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n", "file_path": "src/util.rs", "rank": 56, "score": 6.576083201360884 }, { "content": " .to_string()\n\n .repeat(fill_len_low)\n\n .color(theme.color_usage_low.unwrap_or(Color::Green));\n\n let fill_medium = theme\n\n .char_bar_filled\n\n .to_string()\n\n .repeat(fill_len_medium)\n\n .color(theme.color_usage_medium.unwrap_or(Color::Yellow));\n\n let fill_high = theme\n\n .char_bar_filled\n\n .to_string()\n\n .repeat(fill_len_high)\n\n .color(theme.color_usage_high.unwrap_or(Color::Red));\n\n let empty = theme\n\n .char_bar_empty\n\n .to_string()\n\n .repeat(width - fill_len_total)\n\n .color(color_empty);\n\n\n\n format!(\n\n \"{}{}{}{}{}{}\",\n\n theme.char_bar_open, fill_low, fill_medium, fill_high, empty, theme.char_bar_close\n\n )\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 57, "score": 6.409307867046457 }, { "content": "pub use anyhow::{anyhow, Context, Error, Result};\n\npub use log::{debug, error, info, warn};\n", "file_path": "src/errors.rs", "rank": 58, "score": 5.551356448419245 }, { "content": " }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_greater_capacity_equal_name() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.capacity = 123;\n\n mnt1.mnt_dir = \"/a\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.capacity = 64;\n\n mnt2.mnt_dir = \"/a\".to_string();\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n\n assert_eq!(ord, cmp::Ordering::Equal);\n\n }\n\n\n\n #[test]\n\n fn calc_total_simple() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.free = 123;\n\n mnt1.used = 456;\n", "file_path": "src/util.rs", "rank": 59, "score": 4.881357382136164 }, { "content": " for mnt in &mut mnts {\n\n mnt.statfs = statfs::statfs(&mnt.mnt_dir[..]).ok();\n\n\n\n let (capacity, free) = match mnt.statfs {\n\n Some(stat) => {\n\n if show_inodes {\n\n (stat.files() as u64, stat.files_free() as u64)\n\n } else {\n\n (\n\n stat.blocks() as u64 * (stat.block_size() as u64),\n\n stat.blocks_available() as u64 * (stat.block_size() as u64),\n\n )\n\n }\n\n }\n\n None => (0, 0),\n\n };\n\n\n\n mnt.capacity = capacity;\n\n mnt.free = free;\n\n mnt.used = capacity - free;\n", "file_path": "src/main.rs", "rank": 60, "score": 4.23134096882817 }, { "content": " pub const HEAVY_HORIZONTAL: char = '━';\n\n pub const LIGHT_DOUBLE_DASH: char = '╌';\n\n pub const HEAVY_DOUBLE_DASH: char = '╍';\n\n pub const ELLIPSIS: char = '…';\n\n pub const SQUARE_BRACKET_OPEN: char = '[';\n\n pub const SQUARE_BRACKET_CLOSE: char = ']';\n\n pub const LIGHT_VERTICAL: char = '│';\n\n pub const LIGHT_VERTICAL_OPEN: char = '├';\n\n pub const LIGHT_VERTICAL_CLOSE: char = '┤';\n\n}\n", "file_path": "src/theme.rs", "rank": 61, "score": 3.3001361442945445 }, { "content": " let s = format_count(2535301200456458802993406410752.0, 1024.0);\n\n assert_eq!(s, \"2097152.0Y\");\n\n }\n\n\n\n #[test]\n\n fn format_percentage_zero() {\n\n let s = format_percentage(Option::Some(0f32));\n\n assert_eq!(s, format!(\" 0.0{}\", \"%\".color(Color::White)));\n\n }\n\n\n\n #[test]\n\n fn format_percentage_fraction() {\n\n let s = format_percentage(Option::Some(0.3333333333333333f32));\n\n assert_eq!(s, format!(\" 0.3{}\", \"%\".color(Color::White)));\n\n }\n\n\n\n #[test]\n\n fn format_percentage_hundred() {\n\n let s = format_percentage(Option::Some(100f32));\n\n assert_eq!(s, format!(\"100.0{}\", \"%\".color(Color::White)));\n", "file_path": "src/util.rs", "rank": 62, "score": 3.0360045430418907 }, { "content": " #[test]\n\n fn lvm_alias_two_dashes() {\n\n let s = lvm_alias(\"/dev/mapper/crypto--foo\");\n\n assert_eq!(s, None);\n\n }\n\n\n\n #[test]\n\n fn lvm_alias_three_dashes() {\n\n let s = lvm_alias(\"/dev/mapper/crypto---foo\");\n\n assert_eq!(s, Some(\"/dev/crypto-/foo\".to_string()));\n\n }\n\n\n\n #[test]\n\n fn lvm_alias_four_dashes() {\n\n let s = lvm_alias(\"/dev/mapper/crypto----foo\");\n\n assert_eq!(s, None);\n\n }\n\n\n\n #[test]\n\n fn lvm_alias_five_dashes() {\n", "file_path": "src/util.rs", "rank": 63, "score": 2.8656245643787273 }, { "content": " }\n\n\n\n #[test]\n\n fn format_percentage_none() {\n\n let s = format_percentage(Option::None);\n\n assert_eq!(s, \" -\");\n\n }\n\n\n\n #[test]\n\n fn lvm_alias_none() {\n\n let s = lvm_alias(\"/dev/mapper/crypto\");\n\n assert_eq!(s, None);\n\n }\n\n\n\n #[test]\n\n fn lvm_alias_simple() {\n\n let s = lvm_alias(\"/dev/mapper/crypto-foo\");\n\n assert_eq!(s, Some(\"/dev/crypto/foo\".to_string()));\n\n }\n\n\n", "file_path": "src/util.rs", "rank": 64, "score": 2.8071602386133874 }, { "content": " assert_eq!(ord, cmp::Ordering::Greater);\n\n }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_greater_capacity_smaller_name() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.capacity = 123;\n\n mnt1.mnt_dir = \"/a\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.capacity = 64;\n\n mnt2.mnt_dir = \"/b\".to_string();\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n\n assert_eq!(ord, cmp::Ordering::Less);\n\n }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_smaller_capacity_smaller_name() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.capacity = 64;\n", "file_path": "src/util.rs", "rank": 65, "score": 2.107898960462229 }, { "content": " mnt1.capacity = 123;\n\n mnt1.mnt_dir = \"/b\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.capacity = 64;\n\n mnt2.mnt_dir = \"/a\".to_string();\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n\n assert_eq!(ord, cmp::Ordering::Greater);\n\n }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_smaller_capacity_greater_name() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.capacity = 64;\n\n mnt1.mnt_dir = \"/b\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.capacity = 123;\n\n mnt2.mnt_dir = \"/a\".to_string();\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n", "file_path": "src/util.rs", "rank": 66, "score": 2.0852797074373406 }, { "content": " mnt1.mnt_dir = \"/a\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.capacity = 123;\n\n mnt2.mnt_dir = \"/b\".to_string();\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n\n assert_eq!(ord, cmp::Ordering::Less);\n\n }\n\n\n\n #[test]\n\n fn cmp_by_capacity_and_dir_name_equal_capacity_greater_name() {\n\n let mut mnt1 = Mount::named(\"foo\".into());\n\n mnt1.capacity = 123;\n\n mnt1.mnt_dir = \"/b\".to_string();\n\n let mut mnt2 = Mount::named(\"bar\".into());\n\n mnt2.capacity = 123;\n\n mnt2.mnt_dir = \"/a\".to_string();\n\n\n\n let ord = cmp_by_capacity_and_dir_name(&mnt1, &mnt2);\n\n assert_eq!(ord, cmp::Ordering::Greater);\n", "file_path": "src/util.rs", "rank": 67, "score": 2.041466927191033 } ]
Rust
rust/k210-shared/src/soc/sysctl/pll_compute.rs
egcd32/k210-sdk-stuff
ab95e18ba81e011f721237472b1f5434506fd7fb
use core::convert::TryInto; use libm::F64Ext; /** PLL configuration */ #[derive(Debug, PartialEq, Eq)] pub struct Params { pub clkr: u8, pub clkf: u8, pub clkod: u8, pub bwadj: u8, } /* constants for PLL frequency computation */ const VCO_MIN: f64 = 3.5e+08; const VCO_MAX: f64 = 1.75e+09; const REF_MIN: f64 = 1.36719e+07; const REF_MAX: f64 = 1.75e+09; const NR_MIN: i32 = 1; const NR_MAX: i32 = 16; const NF_MIN: i32 = 1; const NF_MAX: i32 = 64; const NO_MIN: i32 = 1; const NO_MAX: i32 = 16; const NB_MIN: i32 = 1; const NB_MAX: i32 = 64; const MAX_VCO: bool = true; const REF_RNG: bool = true; /* * Calculate PLL registers' value by finding closest matching parameters * NOTE: this uses floating point math ... this is horrible for something so critical :-( * TODO: implement this without fp ops */ pub fn compute_params(freq_in: u32, freq_out: u32) -> Option<Params> { let fin: f64 = freq_in.into(); let fout: f64 = freq_out.into(); let val: f64 = fout / fin; let terr: f64 = 0.5 / ((NF_MAX / 2) as f64); let mut merr: f64 = terr; let mut x_nrx: i32 = 0; let mut x_no: i32 = 0; let mut found: Option<Params> = None; for nfi in (val as i32)..NF_MAX { let nr: i32 = ((nfi as f64) / val).round() as i32; if nr == 0 { continue; } if REF_RNG && (nr < NR_MIN) { continue; } if fin / (nr as f64) > REF_MAX { continue; } let mut nrx: i32 = nr; let mut nf: i32 = nfi; let mut nfx: i64 = nfi.into(); let nval: f64 = (nfx as f64) / (nr as f64); if nf == 0 { nf = 1; } let err: f64 = 1.0 - nval / val; if (err.abs() < merr * (1.0 + 1e-6)) || (err.abs() < 1e-16) { let mut not: i32 = (VCO_MAX / fout).floor() as i32; let mut no: i32 = if not > NO_MAX { NO_MAX } else { not }; while no > NO_MIN { if (REF_RNG) && ((nr / no) < NR_MIN) { no -= 1; continue; } if (nr % no) == 0 { break; } no -= 1; } if (nr % no) != 0 { continue; } let mut nor: i32 = (if not > NO_MAX { NO_MAX } else { not }) / no; let mut nore: i32 = NF_MAX / nf; if nor > nore { nor = nore; } let noe: i32 = (VCO_MIN / fout).ceil() as i32; if !MAX_VCO { nore = (noe - 1) / no + 1; nor = nore; not = 0; /* force next if to fail */ } if (((no * nor) < (not >> 1)) || ((no * nor) < noe)) && ((no * nor) < (NF_MAX / nf)) { no = NF_MAX / nf; if no > NO_MAX { no = NO_MAX; } if no > not { no = not; } nfx *= no as i64; nf *= no; if (no > 1) && !found.is_none() { continue; } /* wait for larger nf in later iterations */ } else { nrx /= no; nfx *= nor as i64; nf *= nor; no *= nor; if no > NO_MAX { continue; } if (nor > 1) && !found.is_none() { continue; } /* wait for larger nf in later iterations */ } let mut nb: i32 = nfx as i32; if nb < NB_MIN { nb = NB_MIN; } if nb > NB_MAX { continue; } let fvco: f64 = fin / (nrx as f64) * (nfx as f64); if fvco < VCO_MIN { continue; } if fvco > VCO_MAX { continue; } if nf < NF_MIN { continue; } if REF_RNG && (fin / (nrx as f64) < REF_MIN) { continue; } if REF_RNG && (nrx > NR_MAX) { continue; } if found.is_some() { if !((err.abs() < merr * (1.0 - 1e-6)) || (MAX_VCO && (no > x_no))) { continue; } if nrx > x_nrx { continue; } } found = Some(Params { clkr: (nrx - 1).try_into().unwrap(), clkf: (nfx - 1).try_into().unwrap(), clkod: (no - 1).try_into().unwrap(), bwadj: (nb - 1).try_into().unwrap(), }); merr = err.abs(); x_no = no; x_nrx = nrx; } } if merr >= terr * (1.0 - 1e-6) { None } else { found } } #[cfg(test)] mod tests { use super::*; #[test] fn test_ompute_params() { /* check against output of C implementation */ assert_eq!(compute_params(26_000_000, 1_500_000_000), Some(Params { clkr: 0, clkf: 57, clkod: 0, bwadj: 57 })); assert_eq!(compute_params(26_000_000, 1_000_000_000), Some(Params { clkr: 0, clkf: 37, clkod: 0, bwadj: 37 })); assert_eq!(compute_params(26_000_000, 800_000_000), Some(Params { clkr: 0, clkf: 61, clkod: 1, bwadj: 61 })); assert_eq!(compute_params(26_000_000, 700_000_000), Some(Params { clkr: 0, clkf: 53, clkod: 1, bwadj: 53 })); assert_eq!(compute_params(26_000_000, 300_000_000), Some(Params { clkr: 0, clkf: 45, clkod: 3, bwadj: 45 })); assert_eq!(compute_params(26_000_000, 45_158_400), Some(Params { clkr: 0, clkf: 25, clkod: 14, bwadj: 25 })); } }
use core::convert::TryInto; use libm::F64Ext; /** PLL configuration */ #[derive(Debug, PartialEq, Eq)] pub struct Params { pub clkr: u8, pub clkf: u8, pub clkod: u8, pub bwadj: u8, } /* constants for PLL frequency computation */ const VCO_MIN: f64 = 3.5e+08; const VCO_MAX: f64 = 1.75e+09; const REF_MIN: f64 = 1.36719e+07; const REF_MAX: f64 = 1.75e+09; const NR_MIN: i32 = 1; const NR_MAX: i32 = 16; const NF_MIN: i32 = 1; const NF_MAX: i32 = 64; const NO_MIN: i32 = 1; const NO_MAX: i32 = 16; const NB_MIN: i32 = 1; const NB_MAX: i32 = 64; const MAX_VCO: bool = true; const REF_RNG: bool = true; /* * Calculate PLL registers' value by finding closest matching parameters * NOTE: this uses floating point math ... this is horrible for something so critical :-( * TODO: implement this without fp ops */ pub fn compute_params(freq_in: u32, freq_out: u32) -> Option<Params> { let fin: f64 = freq_in.into(); let fout: f64 = freq_out.into(); let val: f64 = fout / fin; let terr: f64 = 0.5 / ((NF_MAX / 2) as f64); let mut merr: f64 = terr; let mut x_nrx: i32 = 0; let mut x_no: i32 = 0; let mut found: Option<Params> = None; for nfi in (val as i32)..NF_MAX { let nr: i32 = ((nfi as f64) / val).round() as i32; if nr == 0 { continue; } if REF_RNG && (nr < NR_MIN) { continue; } if fin / (nr as f64) > REF_MAX { continue; } let mut nrx: i32 = nr; let mut nf: i32 = nfi; let mut nfx: i64 = nfi.into(); let nval: f64 = (nfx as f64) / (nr as f64); if nf == 0 { nf = 1; } let err: f64 = 1.0 - nval / val; if (err.abs() < merr * (1.0 + 1e-6)) || (err.abs() < 1e-16) { let mut not: i32 = (VCO_MAX / fout).floor() as i32; let mut no: i32 = if not > NO_MAX { NO_MAX } else { not }; while no > NO_MIN { if (REF_RNG) && ((nr / no) < NR_MIN) { no -= 1; continue; } if (nr % no) == 0 { break; } no -= 1; } if (nr % no) != 0 { continue; } let mut nor: i32 = (if not > NO_MAX { NO_MAX } else { not }) / no; let mut nore: i32 = NF_MAX / nf; if nor > nore { nor = nore; } let noe: i32 = (VCO_MIN / fout).ceil() as i32; if !MAX_VCO { nore = (noe - 1) / no + 1; nor = nore; not = 0; /* force next if to fail */ } if (((no * nor) < (not >> 1)) || ((no * nor) < noe)) && ((no * nor) < (NF_MAX / nf)) { no = NF_MAX / nf; if no > NO_MAX { no = NO_MAX; } if no > not { no = not; } nfx *= no as i64; nf *= no; if (no > 1) && !found.is_none() { continue; } /* wait for larger nf in later iterations */ } else { nrx /= no; nfx *= nor as i64; nf *= nor; no *= nor; if no > NO_MAX { continue; } if (nor > 1) && !found.is_none() { continue; } /* wait for larger nf in later iterations */ } let mut nb: i32 = nfx as i32; if nb < NB_MIN { nb = NB_MIN; } if nb > NB_MAX { continue; } let fvco: f64 = fin / (nrx as f64) * (nfx as f64); if fvco < VCO_MIN { continue; } if fvco > VCO_MAX { continue; } if nf < NF_MIN { continue; } if REF_RNG && (fin / (nrx as f64) < REF_MIN) { continue; } if REF_RNG && (nrx > NR_MAX) { continue; } if found.is_some() { if !((err.abs() < merr * (1.0 - 1e-6)) || (MAX_VCO && (no > x_no))) { continue; } if nrx > x_nrx { continue; } } found = Some(Params { clkr: (nrx - 1).try_into().unwrap(), clkf: (nfx - 1).try_into().unwrap(), clkod: (no - 1).try_into().unwrap(), bwadj: (nb - 1).try_into().unwrap(), }); merr = err.abs(); x_no = no; x_nrx = nrx; } } if merr >= terr * (1.0 - 1e-6) { None } else { found } } #[cfg(test)] mod tests { use super::*; #[test]
}
fn test_ompute_params() { /* check against output of C implementation */ assert_eq!(compute_params(26_000_000, 1_500_000_000), Some(Params { clkr: 0, clkf: 57, clkod: 0, bwadj: 57 })); assert_eq!(compute_params(26_000_000, 1_000_000_000), Some(Params { clkr: 0, clkf: 37, clkod: 0, bwadj: 37 })); assert_eq!(compute_params(26_000_000, 800_000_000), Some(Params { clkr: 0, clkf: 61, clkod: 1, bwadj: 61 })); assert_eq!(compute_params(26_000_000, 700_000_000), Some(Params { clkr: 0, clkf: 53, clkod: 1, bwadj: 53 })); assert_eq!(compute_params(26_000_000, 300_000_000), Some(Params { clkr: 0, clkf: 45, clkod: 3, bwadj: 45 })); assert_eq!(compute_params(26_000_000, 45_158_400), Some(Params { clkr: 0, clkf: 25, clkod: 14, bwadj: 25 })); }
function_block-full_function
[ { "content": "pub fn set_bit(inval: u32, bit: u8, state: bool) -> u32 {\n\n if state {\n\n inval | (1 << u32::from(bit))\n\n } else {\n\n inval & !(1 << u32::from(bit))\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/utils.rs", "rank": 1, "score": 305117.4159386919 }, { "content": "pub fn get_bit(inval: u32, bit: u8) -> bool {\n\n (inval & (1 << u32::from(bit))) != 0\n\n}\n", "file_path": "rust/k210-shared/src/soc/utils.rs", "rank": 2, "score": 302737.7121477399 }, { "content": "pub fn recv_nb(s: &mut [u8]) -> usize {\n\n let irecv = unsafe { &mut UART1_INSTANCE_RECV };\n\n let head = irecv.head.load(Ordering::SeqCst);\n\n let mut tail = irecv.tail.load(Ordering::SeqCst);\n\n if head == tail { // Early-out without tail.store if ring buffer empty\n\n return 0;\n\n }\n\n let mut ptr = 0;\n\n while ptr < s.len() && tail != head {\n\n s[ptr] = irecv.buf[tail];\n\n tail += 1;\n\n if tail == UART_BUFSIZE {\n\n tail = 0;\n\n }\n\n ptr += 1;\n\n }\n\n irecv.tail.store(tail, Ordering::SeqCst);\n\n ptr\n\n}\n\n\n\n/** Receive data from UART (blocks for at least one byte if the buffer can hold one, returns number\n\n * of bytes received) */\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 3, "score": 300743.3791217424 }, { "content": "pub fn write_num_u32<W>(w: &mut W, mut val: u32) -> Result<(), W::Error>\n\nwhere\n\n W: Write,\n\n{\n\n let mut buf = [0u8; 10];\n\n let mut curr = buf.len();\n\n for byte in buf.iter_mut().rev() {\n\n *byte = b'0' + (val % 10) as u8;\n\n val = val / 10;\n\n curr -= 1;\n\n if val == 0 {\n\n break;\n\n }\n\n }\n\n w.write_all(&buf[curr..])\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "util/esp8266at/src/util.rs", "rank": 4, "score": 299469.46824911475 }, { "content": "pub fn set_pin(pin: u8, value: bool) {\n\n unsafe {\n\n let ptr = pac::GPIOHS::ptr();\n\n (*ptr)\n\n .output_val\n\n .modify(|r, w| w.bits(set_bit(r.bits(), pin, value)));\n\n }\n\n}\n\n\n\n/** Get input value for a GPIOHS pin */\n", "file_path": "rust/k210-shared/src/soc/gpiohs.rs", "rank": 5, "score": 294275.8050514783 }, { "content": "pub fn pll_get_freq(pll: pll) -> u32 {\n\n let freq_in;\n\n let nr;\n\n let nf;\n\n let od;\n\n\n\n match pll {\n\n pll::PLL0 => {\n\n freq_in = clock_source_get_freq(clock_source::IN0);\n\n unsafe {\n\n let val = (*pac::SYSCTL::ptr()).pll0.read();\n\n nr = val.clkr().bits() + 1;\n\n nf = val.clkf().bits() + 1;\n\n od = val.clkod().bits() + 1;\n\n }\n\n }\n\n pll::PLL1 => {\n\n freq_in = clock_source_get_freq(clock_source::IN0);\n\n unsafe {\n\n let val = (*pac::SYSCTL::ptr()).pll1.read();\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 6, "score": 285145.71426260064 }, { "content": "pub fn pll_set_freq(pll: pll, freq: u32) -> Result<u32,()> {\n\n assert!(freq != 0);\n\n let ptr = pac::SYSCTL::ptr();\n\n use pll::*;\n\n\n\n /* 1. Change CPU CLK to XTAL */\n\n if pll == PLL0 {\n\n clock_set_clock_select(clock_select::ACLK, 0 /* clock_source::IN0 */);\n\n }\n\n\n\n /* 2. Disable PLL output */\n\n unsafe {\n\n match pll {\n\n PLL0 => (*ptr).pll0.modify(|_,w| w.out_en().clear_bit()),\n\n PLL1 => (*ptr).pll1.modify(|_,w| w.out_en().clear_bit()),\n\n PLL2 => (*ptr).pll2.modify(|_,w| w.out_en().clear_bit()),\n\n };\n\n }\n\n\n\n /* 3. Turn off PLL */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 7, "score": 279413.881764031 }, { "content": "pub fn mod_euc(a: i32, b: i32) -> i32 {\n\n let r = a % b;\n\n if r < 0 {\n\n r + b\n\n } else {\n\n r\n\n }\n\n}\n\n\n\n/** Minimum of two f32 values. */\n", "file_path": "rust/voxel/src/main.rs", "rank": 8, "score": 266542.1802233051 }, { "content": "pub fn get_pin(pin: u8) -> bool {\n\n unsafe {\n\n let ptr = pac::GPIOHS::ptr();\n\n get_bit((*ptr).input_val.read().bits(), pin)\n\n }\n\n}\n", "file_path": "rust/k210-shared/src/soc/gpiohs.rs", "rank": 9, "score": 260875.5505234409 }, { "content": "pub fn recv(s: &mut [u8]) -> usize {\n\n if s.len() == 0 {\n\n return 0;\n\n }\n\n loop {\n\n let n = recv_nb(s);\n\n if n != 0 {\n\n return n;\n\n }\n\n unsafe { asm::wfi(); }\n\n }\n\n}\n\n\n\n/** Initialize interrupts and buffered UART handling */\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 10, "score": 258614.26185677323 }, { "content": "fn pll_is_lock(pll: pll) -> bool {\n\n let ptr = pac::SYSCTL::ptr();\n\n let pll_lock = unsafe { (*ptr).pll_lock.read() };\n\n match pll {\n\n pll::PLL0 => pll_lock.pll_lock0().bits() == 3,\n\n pll::PLL1 => (pll_lock.pll_lock1().bits() & 1) == 1,\n\n pll::PLL2 => (pll_lock.pll_lock2().bits() & 1) == 1,\n\n }\n\n}\n\n\n\n/** Clear PLL slip, this is done repeatedly until lock is achieved */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 11, "score": 231155.4791549613 }, { "content": "fn pll_clear_slip(pll: pll) -> bool {\n\n let ptr = pac::SYSCTL::ptr();\n\n unsafe {\n\n (*ptr).pll_lock.modify(|_,w|\n\n match pll {\n\n pll::PLL0 => w.pll_slip_clear0().set_bit(),\n\n pll::PLL1 => w.pll_slip_clear1().set_bit(),\n\n pll::PLL2 => w.pll_slip_clear2().set_bit(),\n\n }\n\n );\n\n }\n\n pll_is_lock(pll)\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 12, "score": 229467.0310730278 }, { "content": "pub fn write_qstr<W>(w: &mut W, s: &[u8]) -> Result<(), W::Error>\n\nwhere\n\n W: Write,\n\n{\n\n w.write_all(b\"\\\"\")?;\n\n for ch in s {\n\n w.write_all(match ch {\n\n b'\\\"' => &[b'\\\\', b'\"'],\n\n b'\\\\' => &[b'\\\\', b'\\\\'],\n\n _ => slice::from_ref(ch),\n\n })?;\n\n }\n\n w.write_all(b\"\\\"\")?;\n\n Ok(())\n\n}\n\n\n\n/** Write decimal unsigned number */\n", "file_path": "util/esp8266at/src/util.rs", "rank": 13, "score": 224442.162425404 }, { "content": "fn pll_source_set_freq(pll: pll, source: clock_source, freq: u32) -> Result<u32,()> {\n\n use pll::*;\n\n /* PLL0 and 1 can only source from IN0 */\n\n if (pll == PLL0 || pll == PLL1) && source != clock_source::IN0 {\n\n return Err(());\n\n }\n\n let freq_in = clock_source_get_freq(source);\n\n if freq_in == 0 {\n\n return Err(());\n\n }\n\n if let Some(found) = pll_compute::compute_params(freq_in, freq) {\n\n let ptr = pac::SYSCTL::ptr();\n\n unsafe {\n\n match pll {\n\n PLL0 => {\n\n (*ptr).pll0.modify(|_,w|\n\n w.clkr().bits(found.clkr)\n\n .clkf().bits(found.clkf)\n\n .clkod().bits(found.clkod)\n\n .bwadj().bits(found.bwadj)\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 14, "score": 213218.8851401645 }, { "content": "pub fn send(s: &[u8]) {\n\n let uart = pac::UART1::ptr();\n\n for &c in s {\n\n unsafe {\n\n while ((*uart).lsr.read().bits() & (1 << 5)) != 0 {}\n\n (*uart).rbr_dll_thr.write(|w| w.bits(c.into()));\n\n }\n\n }\n\n}\n\n\n\n/** Receive data from UART (non-blocking, returns number of bytes received) */\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 15, "score": 210393.88784993818 }, { "content": "fn set_register<IF: I2C>(i2c: &IF, reg: reg, val: u8) -> Result<(), ()> {\n\n i2c.send_data(&[reg as u8, val])\n\n}\n\n\n\nimpl<IF: I2C> Accelerometer<IF> {\n\n /** Initialize chip */\n\n pub fn init(i2c: IF) -> Result<Self, ()> {\n\n let correct_id = 0x13;\n\n if let Ok(part_id) = read_register(&i2c, reg::PARTID) {\n\n if part_id != correct_id {\n\n //writeln!(stdout, \"MSA device not found (ID should be {:02x} but is {:02x})\", correct_id, part_id).unwrap();\n\n return Err(());\n\n }\n\n } else {\n\n //writeln!(stdout, \"Could not read MSA device ID\").unwrap();\n\n return Err(());\n\n }\n\n\n\n // set (and check) the power mode to 0x1A: normal power mode + 500Hz bandwidth\n\n let desired_mode = 0x1A;\n", "file_path": "rust/k210-shared/src/board/msa300.rs", "rank": 16, "score": 209229.90196689352 }, { "content": "fn sample_cirle(x: i32, y: i32, cx: i32, cy: i32, r: i32, rr: i32) -> bool {\n\n // Early-out based on bounding box\n\n (x - cx).abs() <= r && (y - cy).abs() <= r &&\n\n ((x - cx) * (x - cx) + (y - cy) * (y - cy)) <= rr\n\n}\n\n\n", "file_path": "rust/accelerometer/src/main.rs", "rank": 17, "score": 206931.31251096344 }, { "content": "pub fn from(ch: u8) -> char {\n\n FROM[usize::from(ch)]\n\n}\n\n\n", "file_path": "rust/k210-console/src/cp437.rs", "rank": 18, "score": 201354.14915482 }, { "content": "pub fn set_spi0_dvp_data(status: bool) {\n\n unsafe {\n\n (*pac::SYSCTL::ptr())\n\n .misc\n\n .modify(|_, w| w.spi_dvp_data_enable().bit(status));\n\n }\n\n}\n\n\n\n/** Map PLL2 cksel value to clock source */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 19, "score": 200406.20820874098 }, { "content": "pub fn parse(response: &[u8]) -> ParseResult {\n\n match parse_response(response) {\n\n Ok((residue, resp)) => ParseResult::Ok(response.offset(residue), resp),\n\n Err(nom::Err::Incomplete(_)) => ParseResult::Incomplete,\n\n Err(_) => ParseResult::Err,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test() {\n\n assert_eq!(\n\n parse_response(b\"AT\\r\\n\"),\n\n Ok((&b\"\"[..], Response::Echo(b\"AT\")))\n\n );\n\n assert_eq!(parse_response(b\"\\r\\n\"), Ok((&b\"\"[..], Response::Empty)));\n\n assert_eq!(parse_response(b\"> \"), Ok((&b\"\"[..], Response::RecvPrompt)));\n\n assert_eq!(\n\n parse_response(b\"OK\\r\\n\"),\n\n Ok((&b\"\"[..], Response::Gen(GenResponse::OK)))\n\n );\n\n }\n\n}\n", "file_path": "util/esp8266at/src/response.rs", "rank": 20, "score": 199289.5537759483 }, { "content": "fn show_uart_info(debug: &mut dyn core::fmt::Write, uart: *const pac::uart1::RegisterBlock) {\n\n let cpr = unsafe { (*uart).cpr.read() }.bits();\n\n let ucv = unsafe { (*uart).ucv.read() }.bits();\n\n let ctr = unsafe { (*uart).ctr.read() }.bits();\n\n writeln!(debug, \"UART1: Designware UART version {}.{}.{}\",\n\n char::from((ucv >> 24) as u8),\n\n char::from(((ucv >> 16) & 0xff) as u8),\n\n char::from(((ucv >> 8) & 0xff) as u8),\n\n ).unwrap();\n\n write!(debug, \"UART1: Features\").unwrap();\n\n write!(debug, \" APB {} bits\", match cpr & 3 {\n\n 0 => 8,\n\n 1 => 16,\n\n 2 => 32,\n\n _ => 0,\n\n }).unwrap();\n\n let flags = [\n\n (4, \"AFCE\"),\n\n (5, \"THRE\"),\n\n (6, \"SIR\"),\n", "file_path": "rust/weather/src/main.rs", "rank": 21, "score": 197684.66977257462 }, { "content": "/// Get clock divider\n\npub fn clock_get_threshold(which: threshold) -> u32 {\n\n unsafe {\n\n // TODO: this should return a multiplier directly, not a peripheral specific value\n\n let ptr = pac::SYSCTL::ptr();\n\n match which {\n\n /* 2 bit wide */\n\n threshold::ACLK => (*ptr).clk_sel0.read().aclk_divider_sel().bits().into(),\n\n\n\n /* 3 bit wide */\n\n threshold::APB0 => (*ptr).clk_sel0.read().apb0_clk_sel().bits().into(),\n\n threshold::APB1 => (*ptr).clk_sel0.read().apb1_clk_sel().bits().into(),\n\n threshold::APB2 => (*ptr).clk_sel0.read().apb2_clk_sel().bits().into(),\n\n\n\n /* 4 bit wide */\n\n threshold::SRAM0 => (*ptr).clk_th0.read().sram0_gclk().bits().into(),\n\n threshold::SRAM1 => (*ptr).clk_th0.read().sram1_gclk().bits().into(),\n\n threshold::AI => (*ptr).clk_th0.read().ai_gclk().bits().into(),\n\n threshold::DVP => (*ptr).clk_th0.read().dvp_gclk().bits().into(),\n\n threshold::ROM => (*ptr).clk_th0.read().rom_gclk().bits().into(),\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 22, "score": 195377.96628001254 }, { "content": "pub fn clock_get_freq(clock: clock) -> u32 {\n\n // TODO: all of these are source / threshold, where source can depend on clock_select: generalize this\n\n // to some kind of clock tree\n\n // TODO: clock_source_get_freq(ACLK) calls back into here, don't do this\n\n match clock {\n\n clock::IN0 => clock_source_get_freq(clock_source::IN0),\n\n clock::PLL0 => clock_source_get_freq(clock_source::PLL0),\n\n clock::PLL1 => clock_source_get_freq(clock_source::PLL1),\n\n clock::PLL2 => clock_source_get_freq(clock_source::PLL2),\n\n clock::CPU | clock::DMA | clock::FFT | clock::ACLK | clock::HCLK => match clock_get_clock_select(clock_select::ACLK) {\n\n 0 => clock_source_get_freq(clock_source::IN0),\n\n 1 => {\n\n clock_source_get_freq(clock_source::PLL0)\n\n / (2 << clock_get_threshold(threshold::ACLK))\n\n }\n\n _ => panic!(\"invalid cpu clock select\"),\n\n },\n\n clock::SRAM0 => clock_source_get_freq(clock_source::ACLK) / (clock_get_threshold(threshold::SRAM0) + 1),\n\n clock::SRAM1 => clock_source_get_freq(clock_source::ACLK) / (clock_get_threshold(threshold::SRAM1) + 1),\n\n clock::ROM => clock_source_get_freq(clock_source::ACLK) / (clock_get_threshold(threshold::ROM) + 1),\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 23, "score": 193488.39040754177 }, { "content": "pub fn clock_get_clock_select(which: clock_select) -> u8 {\n\n unsafe {\n\n let ptr = pac::SYSCTL::ptr();\n\n // Seems that PLL2 is the only one that has a non-boolean clock select\n\n // TODO: return a clock_source directly when we know the meanings of these bits\n\n // meaning seems to be usually:\n\n // 0 IN0\n\n // 1 PLL0\n\n // (2 PLL1)\n\n // it's likely different for _BYPASS, which, I suspect, wires the PLL output to the\n\n // input (IN0 for PLL0 and PLL1, selectable for PLL2)\n\n match which {\n\n clock_select::PLL0_BYPASS => (*ptr).pll0.read().bypass().bit().into(),\n\n clock_select::PLL1_BYPASS => (*ptr).pll1.read().bypass().bit().into(),\n\n clock_select::PLL2_BYPASS => (*ptr).pll2.read().bypass().bit().into(),\n\n clock_select::PLL2 => (*ptr).pll2.read().ckin_sel().bits().into(),\n\n clock_select::ACLK => (*ptr).clk_sel0.read().aclk_sel().bit().into(),\n\n clock_select::SPI3 => (*ptr).clk_sel0.read().spi3_clk_sel().bit().into(),\n\n clock_select::TIMER0 => (*ptr).clk_sel0.read().timer0_clk_sel().bit().into(),\n\n clock_select::TIMER1 => (*ptr).clk_sel0.read().timer1_clk_sel().bit().into(),\n\n clock_select::TIMER2 => (*ptr).clk_sel0.read().timer2_clk_sel().bit().into(),\n\n clock_select::SPI3_SAMPLE => (*ptr).clk_sel1.read().spi3_sample_clk_sel().bit().into(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 24, "score": 191615.69345185568 }, { "content": "pub fn clock_source_get_freq(source: clock_source) -> u32 {\n\n match source {\n\n clock_source::IN0 => SYSCTRL_CLOCK_FREQ_IN0,\n\n clock_source::PLL0 => pll_get_freq(pll::PLL0),\n\n clock_source::PLL1 => pll_get_freq(pll::PLL1),\n\n clock_source::PLL2 => pll_get_freq(pll::PLL2),\n\n clock_source::ACLK => clock_get_freq(clock::ACLK),\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 25, "score": 189867.489569759 }, { "content": "/// Set clock divider\n\npub fn clock_set_threshold(which: threshold, threshold: u32) {\n\n // TODO: this should take a multiplier directly, not a peripheral specific value\n\n unsafe {\n\n let ptr = pac::SYSCTL::ptr();\n\n match which {\n\n /* 2 bit wide */\n\n threshold::ACLK => (*ptr).clk_sel0.modify(|_, w| w.aclk_divider_sel().bits(threshold as u8)),\n\n\n\n /* 3 bit wide */\n\n threshold::APB0 => (*ptr).clk_sel0.modify(|_, w| w.apb0_clk_sel().bits(threshold as u8)),\n\n threshold::APB1 => (*ptr).clk_sel0.modify(|_, w| w.apb1_clk_sel().bits(threshold as u8)),\n\n threshold::APB2 => (*ptr).clk_sel0.modify(|_, w| w.apb2_clk_sel().bits(threshold as u8)),\n\n\n\n /* 4 bit wide */\n\n threshold::SRAM0 => (*ptr).clk_th0.modify(|_, w| w.sram0_gclk().bits(threshold as u8)),\n\n threshold::SRAM1 => (*ptr).clk_th0.modify(|_, w| w.sram1_gclk().bits(threshold as u8)),\n\n threshold::AI => (*ptr).clk_th0.modify(|_, w| w.ai_gclk().bits(threshold as u8)),\n\n threshold::DVP => (*ptr).clk_th0.modify(|_, w| w.dvp_gclk().bits(threshold as u8)),\n\n threshold::ROM => (*ptr).clk_th0.modify(|_, w| w.rom_gclk().bits(threshold as u8)),\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 26, "score": 187806.72390139638 }, { "content": "pub fn render_image<L, I>(lcd: &mut L, mut image: I)\n\nwhere\n\n L: LCDHL,\n\n I: FnMut(u16, u16) -> u16,\n\n{\n\n // Theoretically this initialization could be avoided by directly initializing from an\n\n // iterator, however, rust doesn't have built-in functionality for this. There's a crate\n\n // (array_init) but it doesn't work for large arrays.\n\n let mut idata: ScreenImage = [0; DISP_PIXELS / 2];\n\n let yx = (0..DISP_HEIGHT)\n\n .flat_map(|y| core::iter::repeat(y).zip(0..DISP_WIDTH / 2));\n\n idata.iter_mut().zip(yx).for_each(|(v, (y, x))| {\n\n *v = (u32::from(image(x * 2 + 0, y)) << 16) | (u32::from(image(x * 2 + 1, y)));\n\n });\n\n\n\n // It would be possible to make draw_picture take an iterator directly\n\n // instead of rendering to an array first, however, this means that the\n\n // computation has to keep up with the SPI clock speed or there will be\n\n // glitches -- also it means that DMA cannot be used -- whereas a sufficiently\n\n // advanced DMA engine is indistinguishable from a GPU, the one in K210\n\n // isn't that.\n\n lcd.draw_picture(0, 0, DISP_WIDTH, DISP_HEIGHT, &idata);\n\n}\n", "file_path": "rust/k210-shared/src/board/lcd_render.rs", "rank": 27, "score": 187713.24314713918 }, { "content": "fn mandelbrot(cx: f32, cy: f32, iterations: u32) -> u32 {\n\n let mut z: (f32, f32) = (0.0, 0.0);\n\n let mut i: u32 = 0;\n\n while (z.0*z.0 + z.1*z.1) < 2.0*2.0 && i < iterations {\n\n z = (z.0 * z.0 - z.1 * z.1 + cx, 2.0 * z.0 * z.1 + cy);\n\n i += 1;\n\n }\n\n i\n\n}\n\n\n", "file_path": "rust/mandelbrot/src/main.rs", "rank": 28, "score": 185348.2374493368 }, { "content": "pub fn clock_set_clock_select(which: clock_select, select: u8) {\n\n unsafe {\n\n let ptr = pac::SYSCTL::ptr();\n\n // Seems that PLL2 is the only one that takes a non-boolean clock select\n\n // TODO: take a clock_source directly when we know the meanings of these bits\n\n match which {\n\n clock_select::PLL0_BYPASS => (*ptr).pll0.modify(|_, w| w.bypass().bit(select != 0)),\n\n clock_select::PLL1_BYPASS => (*ptr).pll1.modify(|_, w| w.bypass().bit(select != 0)),\n\n clock_select::PLL2_BYPASS => (*ptr).pll2.modify(|_, w| w.bypass().bit(select != 0)),\n\n clock_select::PLL2 => (*ptr).pll2.modify(|_, w| w.ckin_sel().bits(select)),\n\n clock_select::ACLK => (*ptr).clk_sel0.modify(|_, w| w.aclk_sel().bit(select != 0)),\n\n clock_select::SPI3 => (*ptr).clk_sel0.modify(|_, w| w.spi3_clk_sel().bit(select != 0)),\n\n clock_select::TIMER0 => (*ptr).clk_sel0.modify(|_, w| w.timer0_clk_sel().bit(select != 0)),\n\n clock_select::TIMER1 => (*ptr).clk_sel0.modify(|_, w| w.timer1_clk_sel().bit(select != 0)),\n\n clock_select::TIMER2 => (*ptr).clk_sel0.modify(|_, w| w.timer2_clk_sel().bit(select != 0)),\n\n clock_select::SPI3_SAMPLE => (*ptr).clk_sel1.modify(|_, w| w.spi3_sample_clk_sel().bit(select != 0)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 29, "score": 184151.127401105 }, { "content": "pub fn set_direction(pin: u8, direction: gpio::direction) {\n\n unsafe {\n\n let ptr = pac::GPIOHS::ptr();\n\n (*ptr)\n\n .output_en\n\n .modify(|r, w| w.bits(set_bit(r.bits(), pin, direction == gpio::direction::OUTPUT)));\n\n (*ptr)\n\n .input_en\n\n .modify(|r, w| w.bits(set_bit(r.bits(), pin, direction == gpio::direction::INPUT)));\n\n }\n\n}\n\n\n\n/** Set output value for a GPIOHS pin */\n", "file_path": "rust/k210-shared/src/soc/gpiohs.rs", "rank": 30, "score": 181184.8575457458 }, { "content": "fn reset_ctl(reset: reset, rst_value: bool) {\n\n unsafe {\n\n let ptr = pac::SYSCTL::ptr();\n\n match reset {\n\n reset::SOC => (*ptr).soft_reset.modify(|_, w| w.soft_reset().bit(rst_value)),\n\n reset::ROM => (*ptr).peri_reset.modify(|_, w| w.rom_reset().bit(rst_value)),\n\n reset::DMA => (*ptr).peri_reset.modify(|_, w| w.dma_reset().bit(rst_value)),\n\n reset::AI => (*ptr).peri_reset.modify(|_, w| w.ai_reset().bit(rst_value)),\n\n reset::DVP => (*ptr).peri_reset.modify(|_, w| w.dvp_reset().bit(rst_value)),\n\n reset::FFT => (*ptr).peri_reset.modify(|_, w| w.fft_reset().bit(rst_value)),\n\n reset::GPIO => (*ptr).peri_reset.modify(|_, w| w.gpio_reset().bit(rst_value)),\n\n reset::SPI0 => (*ptr).peri_reset.modify(|_, w| w.spi0_reset().bit(rst_value)),\n\n reset::SPI1 => (*ptr).peri_reset.modify(|_, w| w.spi1_reset().bit(rst_value)),\n\n reset::SPI2 => (*ptr).peri_reset.modify(|_, w| w.spi2_reset().bit(rst_value)),\n\n reset::SPI3 => (*ptr).peri_reset.modify(|_, w| w.spi3_reset().bit(rst_value)),\n\n reset::I2S0 => (*ptr).peri_reset.modify(|_, w| w.i2s0_reset().bit(rst_value)),\n\n reset::I2S1 => (*ptr).peri_reset.modify(|_, w| w.i2s1_reset().bit(rst_value)),\n\n reset::I2S2 => (*ptr).peri_reset.modify(|_, w| w.i2s2_reset().bit(rst_value)),\n\n reset::I2C0 => (*ptr).peri_reset.modify(|_, w| w.i2c0_reset().bit(rst_value)),\n\n reset::I2C1 => (*ptr).peri_reset.modify(|_, w| w.i2c1_reset().bit(rst_value)),\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 31, "score": 172457.58673452726 }, { "content": "fn hexdump<T: core::fmt::Write>(stdout: &mut T, buffer: &[u8], base: usize) {\n\n for (i, chunk) in buffer.chunks_exact(16).enumerate() {\n\n writeln!(stdout, \"{:08x}: {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {:02x} {}{}{}{}{}{}{}{}{}{}{}{}{}{}{}{}\",\n\n base + i * 16,\n\n chunk[0], chunk[1], chunk[2], chunk[3],\n\n chunk[4], chunk[5], chunk[6], chunk[7],\n\n chunk[8], chunk[9], chunk[10], chunk[11],\n\n chunk[12], chunk[13], chunk[14], chunk[15],\n\n ch(chunk[0]), ch(chunk[1]), ch(chunk[2]), ch(chunk[3]),\n\n ch(chunk[4]), ch(chunk[5]), ch(chunk[6]), ch(chunk[7]),\n\n ch(chunk[8]), ch(chunk[9]), ch(chunk[10]), ch(chunk[11]),\n\n ch(chunk[12]), ch(chunk[13]), ch(chunk[14]), ch(chunk[15]),\n\n ).unwrap();\n\n }\n\n}\n\n\n", "file_path": "rust/sdtest/src/main.rs", "rank": 32, "score": 162510.2683395633 }, { "content": "fn read_register<IF: I2C>(i2c: &IF, reg: reg) -> Result<u8, ()> {\n\n let mut reg_val = [0u8; 2];\n\n if i2c.recv_data(&[reg as u8], &mut reg_val).is_ok() {\n\n Ok(reg_val[0])\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n\n/** Set a register of the MSA300 via I2C */\n", "file_path": "rust/k210-shared/src/board/msa300.rs", "rank": 33, "score": 156826.10420897714 }, { "content": "pub fn init() {\n\n unsafe {\n\n // Enable interrupts in general\n\n mstatus::set_mie();\n\n // Set the Machine-Software bit in MIE\n\n mie::set_msoft();\n\n // Set the Machine-External bit in MIE\n\n mie::set_mext();\n\n }\n\n\n\n uart_init(115_200);\n\n uart_enable_intr(true);\n\n}\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 34, "score": 154300.55089762935 }, { "content": "pub fn clock() -> u64 {\n\n let freq = sysctl::clock_get_freq(sysctl::clock::CPU) as u64;\n\n let cycles = mcycle::read64();\n\n return cycles * 1_000_000 / freq;\n\n}\n", "file_path": "rust/k210-shared/src/timing.rs", "rank": 35, "score": 145556.68762191737 }, { "content": "fn uart_enable_intr(recv: bool) {\n\n unsafe {\n\n let uart = pac::UART1::ptr();\n\n if recv {\n\n (*uart)\n\n .dlh_ier\n\n .modify(|r, w| w.bits(r.bits() | UART_IER_ERBFI));\n\n plic_set_priority(Interrupt::UART1, 6);\n\n plic_irq_enable(Interrupt::UART1, true);\n\n } else {\n\n (*uart)\n\n .dlh_ier\n\n .modify(|r, w| w.bits(r.bits() & !UART_IER_ERBFI));\n\n plic_set_priority(Interrupt::UART1, 0);\n\n plic_irq_enable(Interrupt::UART1, false);\n\n }\n\n }\n\n}\n\n\n\n/** Send data to UART (blocking) */\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 36, "score": 143169.34243488486 }, { "content": "fn uart_init(baud_rate: u32) {\n\n let uart = pac::UART1::ptr();\n\n sysctl::clock_enable(sysctl::clock::UART1);\n\n sysctl::reset(sysctl::reset::UART1);\n\n\n\n // Hardcode these for now:\n\n let data_width = 8; // 8 data bits\n\n let stopbit_val = 0; // 1 stop bit\n\n let parity_val = 0; // No parity\n\n let divisor = sysctl::clock_get_freq(sysctl::clock::APB0) / baud_rate;\n\n let dlh = ((divisor >> 12) & 0xff) as u8;\n\n let dll = ((divisor >> 4) & 0xff) as u8;\n\n let dlf = (divisor & 0xf) as u8;\n\n unsafe {\n\n // Set Divisor Latch Access Bit (enables DLL DLH) to set baudrate\n\n (*uart).lcr.write(|w| w.bits(1 << 7));\n\n (*uart).dlh_ier.write(|w| w.bits(dlh.into()));\n\n (*uart).rbr_dll_thr.write(|w| w.bits(dll.into()));\n\n (*uart).dlf.write(|w| w.bits(dlf.into()));\n\n // Clear Divisor Latch Access Bit after setting baudrate\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 37, "score": 143164.62346516846 }, { "content": "fn ch(i: u8) -> char {\n\n if i >= 0x20 && i < 0x80 {\n\n i.into()\n\n } else {\n\n '.'\n\n }\n\n}\n\n\n", "file_path": "rust/sdtest/src/main.rs", "rank": 38, "score": 141655.45468791077 }, { "content": "pub fn usleep(n: usize) {\n\n let freq = sysctl::clock_get_freq(sysctl::clock::CPU) as usize;\n\n cycle_sleep(freq * n / 1000000);\n\n}\n", "file_path": "rust/k210-shared/src/soc/sleep.rs", "rank": 39, "score": 138436.24361316813 }, { "content": "fn is_memory(address: u64) -> bool {\n\n let mem_len = 6 * 1024 * 1024;\n\n let mem_no_cache_len = 8 * 1024 * 1024;\n\n // Note: This comes from the Kendryte SDK as-is. No, I have no idea why the AES accelerator\n\n // input address 0x50450040 is considered memory, either.\n\n ((address >= 0x80000000) && (address < 0x80000000 + mem_len))\n\n || ((address >= 0x40000000) && (address < 0x40000000 + mem_no_cache_len))\n\n || (address == 0x50450040)\n\n}\n\n\n\nimpl DMAC {\n\n fn new(dmac: pac::DMAC) -> Self {\n\n let rv = Self { dmac };\n\n rv.init();\n\n rv\n\n }\n\n\n\n /** Get DMAC ID */\n\n pub fn read_id(&self) -> u64 {\n\n return self.dmac.id.read().bits();\n", "file_path": "rust/k210-shared/src/soc/dmac.rs", "rank": 40, "score": 137442.76899383886 }, { "content": "pub fn init(dvp: &DVP) {\n\n let (_manuf_id, _device_id) = read_id(dvp);\n\n // TODO: do something with the IDs (like check it against expected and fail if different?)\n\n // printf(\"manuf_id:0x%04x,device_id:0x%04x\\n\", v_manuf_id, v_device_id);\n\n for &(register, value) in CONFIG {\n\n dvp.sccb_send_data(OV2640_ADDR, register.into(), value.into());\n\n }\n\n}\n\n\n\n/** Return (manuf_id, device_id) */\n", "file_path": "rust/k210-shared/src/board/ov2640.rs", "rank": 41, "score": 137063.13794491382 }, { "content": "pub fn reset(reset: reset) {\n\n reset_ctl(reset, true);\n\n usleep(10);\n\n reset_ctl(reset, false);\n\n}\n\n\n\n/** Select DMA handshake for a channel */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 42, "score": 137063.13794491382 }, { "content": "pub fn cycle_sleep(n: usize) {\n\n let start = mcycle::read();\n\n while (mcycle::read().wrapping_sub(start)) < n {\n\n // IDLE\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sleep.rs", "rank": 43, "score": 137063.13794491382 }, { "content": "pub fn clock_enable(clock: clock) {\n\n clock_bus_en(clock, true);\n\n clock_device_en(clock, true);\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 44, "score": 135730.63131444683 }, { "content": "pub fn sysctl_clock_disable(clock: clock) {\n\n clock_bus_en(clock, false);\n\n clock_device_en(clock, false);\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 45, "score": 134436.94934714393 }, { "content": "pub fn mainloop<P, F, X>(\n\n h: &mut SerialNetworkHandler<X>,\n\n port: &mut P,\n\n mut f: F,\n\n debug: &mut dyn fmt::Write,\n\n) -> io::Result<()>\n\nwhere\n\n P: io::Read,\n\n F: FnMut(&mut SerialNetworkHandler<X>, NetworkEvent, &mut dyn fmt::Write) -> bool,\n\n X: io::Write,\n\n{\n\n let mut serial_buf: Vec<u8> = vec![0; 2560]; // 2048 + some\n\n let mut ofs: usize = 0;\n\n let mut running: bool = true;\n\n while running {\n\n // Receive bytes into buffer\n\n match port.read(&mut serial_buf[ofs..]) {\n\n Ok(t) => {\n\n // io::stdout().write_all(&serial_buf[ofs..ofs+t]).unwrap();\n\n ofs += t;\n", "file_path": "util/esp8266at/src/mainloop.rs", "rank": 46, "score": 133847.12015490377 }, { "content": "fn pll2_cksel_to_source(bits: u8) -> clock_source {\n\n match bits {\n\n 0 => clock_source::IN0,\n\n 1 => clock_source::PLL0,\n\n 2 => clock_source::PLL1,\n\n _ => panic!(\"invalid value for PLL2 ckin_sel\"),\n\n }\n\n}\n\n\n\n/** Map clock source to PLL2 cksel value */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 47, "score": 133507.9671412691 }, { "content": "fn pll2_source_to_cksel(source: clock_source) -> u8 {\n\n match source {\n\n clock_source::IN0 => 0,\n\n clock_source::PLL0 => 1,\n\n clock_source::PLL1 => 2,\n\n _ => panic!(\"unsupported clock source for PLL2\"),\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 48, "score": 133507.9671412691 }, { "content": "pub fn clampf(v: f32) -> f32 {\n\n if v < 0.0 {\n\n 0.0\n\n } else if v > 1.0 {\n\n 1.0\n\n } else {\n\n v\n\n }\n\n}\n", "file_path": "rust/k210-shared/src/board/lcd_colors.rs", "rank": 49, "score": 131141.50785618243 }, { "content": "fn plic_irq_enable(interrupt: Interrupt, enabled: bool) {\n\n let targetid = mhartid::read() * 2;\n\n let irq_nr = interrupt.nr();\n\n unsafe {\n\n let plic = pac::PLIC::ptr();\n\n let bit = 1 << ((irq_nr as u32) % 32);\n\n if enabled {\n\n (*plic).target_enables[targetid].enable[(irq_nr as usize) / 32]\n\n .modify(|r, w| w.bits(r.bits() | bit));\n\n } else {\n\n (*plic).target_enables[targetid].enable[(irq_nr as usize) / 32]\n\n .modify(|r, w| w.bits(r.bits() & !bit));\n\n }\n\n }\n\n}\n\n\n\n/** Set interrupt priority (0-7) */\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 50, "score": 130204.13058269524 }, { "content": "fn plic_set_priority(interrupt: Interrupt, priority: u32) {\n\n let irq_nr = interrupt.nr();\n\n unsafe {\n\n let plic = pac::PLIC::ptr();\n\n (*plic).priority[irq_nr as usize].write(|w| w.bits(priority));\n\n }\n\n}\n\n\n\n/** Initialize UART */\n", "file_path": "rust/buffered-uart/src/lib.rs", "rank": 51, "score": 130199.54910753648 }, { "content": "fn clock_device_en(clock: clock, en: bool) {\n\n unsafe {\n\n let ptr = pac::SYSCTL::ptr();\n\n match clock {\n\n clock::PLL0 => (*ptr).pll0.modify(|_, w| w.out_en().bit(en)),\n\n clock::PLL1 => (*ptr).pll1.modify(|_, w| w.out_en().bit(en)),\n\n clock::PLL2 => (*ptr).pll2.modify(|_, w| w.out_en().bit(en)),\n\n clock::CPU => (*ptr).clk_en_cent.modify(|_, w| w.cpu_clk_en().bit(en)),\n\n clock::SRAM0 => (*ptr).clk_en_cent.modify(|_, w| w.sram0_clk_en().bit(en)),\n\n clock::SRAM1 => (*ptr).clk_en_cent.modify(|_, w| w.sram1_clk_en().bit(en)),\n\n clock::APB0 => (*ptr).clk_en_cent.modify(|_, w| w.apb0_clk_en().bit(en)),\n\n clock::APB1 => (*ptr).clk_en_cent.modify(|_, w| w.apb1_clk_en().bit(en)),\n\n clock::APB2 => (*ptr).clk_en_cent.modify(|_, w| w.apb2_clk_en().bit(en)),\n\n clock::ROM => (*ptr).clk_en_peri.modify(|_, w| w.rom_clk_en().bit(en)),\n\n clock::DMA => (*ptr).clk_en_peri.modify(|_, w| w.dma_clk_en().bit(en)),\n\n clock::AI => (*ptr).clk_en_peri.modify(|_, w| w.ai_clk_en().bit(en)),\n\n clock::DVP => (*ptr).clk_en_peri.modify(|_, w| w.dvp_clk_en().bit(en)),\n\n clock::FFT => (*ptr).clk_en_peri.modify(|_, w| w.fft_clk_en().bit(en)),\n\n clock::SPI3 => (*ptr).clk_en_peri.modify(|_, w| w.spi3_clk_en().bit(en)),\n\n clock::GPIO => (*ptr).clk_en_peri.modify(|_, w| w.gpio_clk_en().bit(en)),\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 52, "score": 128944.79912890674 }, { "content": "fn clock_bus_en(clock: clock, en: bool) {\n\n /*\n\n * The timer is under APB0, to prevent apb0_clk_en1 and apb0_clk_en0\n\n * on same register, we split it to peripheral and central two\n\n * registers, to protect CPU close apb0 clock accidentally.\n\n *\n\n * The apb0_clk_en0 and apb0_clk_en1 have same function,\n\n * one of them set, the APB0 clock enable.\n\n */\n\n\n\n /* The APB clock should carefully disable */\n\n if en {\n\n match clock {\n\n /*\n\n * These peripheral devices are under APB0\n\n * GPIO, UART1, UART2, UART3, SPI_SLAVE, I2S0, I2S1,\n\n * I2S2, I2C0, I2C1, I2C2, FPIOA, SHA256, TIMER0,\n\n * TIMER1, TIMER2\n\n */\n\n clock::GPIO\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 53, "score": 128944.79912890674 }, { "content": "pub fn to(ch: char) -> (u16, u16) {\n\n (match ch {\n\n '\\u{0000}' => 0x00, // NUL\n\n '\\u{263a}' => 0x01, // WHITE SMILING FACE\n\n '\\u{263b}' => 0x02, // BLACK SMILING FACE\n\n '\\u{2665}' => 0x03, // BLACK HEART SUIT\n\n '\\u{2666}' => 0x04, // BLACK DIAMOND SUIT\n\n '\\u{2663}' => 0x05, // BLACK CLUB SUIT\n\n '\\u{2660}' => 0x06, // BLACK SPADE SUIT\n\n '\\u{2022}' => 0x07, // BULLET\n\n '\\u{25d8}' => 0x08, // INVERSE BULLET\n\n '\\u{25cb}' => 0x09, // WHITE CIRCLE\n\n '\\u{25d9}' => 0x0a, // INVERSE WHITE CIRCLE\n\n '\\u{2642}' => 0x0b, // MALE SIGN\n\n '\\u{2640}' => 0x0c, // FEMALE SIGN\n\n '\\u{266a}' => 0x0d, // EIGHTH NOTE\n\n '\\u{266b}' => 0x0e, // BEAMED EIGHTH NOTES\n\n '\\u{263c}' => 0x0f, // WHITE SUN WITH RAYS\n\n '\\u{25ba}' => 0x10, // BLACK RIGHT-POINTING POINTER\n\n '\\u{25c4}' => 0x11, // BLACK LEFT-POINTING POINTER\n", "file_path": "rust/k210-console/src/cp437.rs", "rank": 54, "score": 128702.4638407423 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let p = Peripherals::take().unwrap();\n\n sysctl::pll_set_freq(sysctl::pll::PLL0, 800_000_000).unwrap();\n\n sysctl::pll_set_freq(sysctl::pll::PLL1, 300_000_000).unwrap();\n\n sysctl::pll_set_freq(sysctl::pll::PLL2, 45_158_400).unwrap();\n\n let clocks = k210_hal::clock::Clocks::new();\n\n\n\n usleep(200000);\n\n\n\n let serial = p.UARTHS.configure((p.pins.pin5, p.pins.pin4), 115_200.bps(), &clocks);\n\n let (mut tx, _) = serial.split();\n\n\n\n let mut stdout = Stdout(&mut tx);\n\n\n\n let bufsize = unsafe { core::mem::size_of_val(&SECP256K1_BUF) };\n\n writeln!(stdout, \"testing {}(sign {} + verify {}) / {}\",\n\n Secp256k1::preallocate_size(),\n\n Secp256k1::preallocate_signing_size(),\n\n Secp256k1::preallocate_verification_size(),\n\n bufsize).unwrap();\n", "file_path": "rust/secp256k1-test/src/main.rs", "rank": 55, "score": 127443.11082920303 }, { "content": "pub fn read_id(dvp: &DVP) -> (u16, u16) {\n\n // 0xFF RA_DLMT - Register Bank Select: Sensor address\n\n dvp.sccb_send_data(OV2640_ADDR, 0xFF, 0x01);\n\n // 0x1C MIDH - Manufacturer ID Byte – High (Read only = 0x7F)\n\n // 0x1D MIDL - Manufacturer ID Byte – Low (Read only = 0xA2)\n\n let manuf_id = (u16::from(dvp.sccb_receive_data(OV2640_ADDR, 0x1C)) << 8) | u16::from(dvp.sccb_receive_data(OV2640_ADDR, 0x1D));\n\n // 0x0A PIDH - Product ID Number MSB (Read only)\n\n // 0x0B PIDL - Product ID Number LSB (Read only)\n\n let device_id = (u16::from(dvp.sccb_receive_data(OV2640_ADDR, 0x0A)) << 8) | u16::from(dvp.sccb_receive_data(OV2640_ADDR, 0x0B));\n\n (manuf_id, device_id)\n\n}\n", "file_path": "rust/k210-shared/src/board/ov2640.rs", "rank": 56, "score": 124819.74547785925 }, { "content": "pub fn dma_select(channel: dma_channel, select: dma_select)\n\n{\n\n unsafe {\n\n use dma_channel::*;\n\n let ptr = pac::SYSCTL::ptr();\n\n match channel {\n\n CHANNEL0 => (*ptr).dma_sel0.modify(|_,w| w.dma_sel0().variant(select)),\n\n CHANNEL1 => (*ptr).dma_sel0.modify(|_,w| w.dma_sel1().variant(select)),\n\n CHANNEL2 => (*ptr).dma_sel0.modify(|_,w| w.dma_sel2().variant(select)),\n\n CHANNEL3 => (*ptr).dma_sel0.modify(|_,w| w.dma_sel3().variant(select)),\n\n CHANNEL4 => (*ptr).dma_sel0.modify(|_,w| w.dma_sel4().variant(select)),\n\n CHANNEL5 => (*ptr).dma_sel1.modify(|_,w| w.dma_sel5().variant(select)),\n\n }\n\n }\n\n}\n\n\n\n/** Return whether the selected PLL has achieved lock */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 57, "score": 122411.91960249591 }, { "content": "pub trait TruncU32 {\n\n fn trunc(val: u32)-> Self;\n\n}\n\nimpl TruncU32 for u32 { fn trunc(val: u32) -> u32 { return val; } }\n\nimpl TruncU32 for u16 { fn trunc(val: u32) -> u16 { return (val & 0xffff) as u16; } }\n\nimpl TruncU32 for u8 { fn trunc(val: u32) -> u8 { return (val & 0xff) as u8; } }\n\n\n\nimpl<IF: SPI01> SPI for SPIImpl<IF> {\n\n /// Configure SPI transaction\n\n fn configure(\n\n &self,\n\n work_mode: work_mode,\n\n frame_format: frame_format,\n\n data_bit_length: u8,\n\n endian: u32,\n\n instruction_length: u8,\n\n address_length: u8,\n\n wait_cycles: u8,\n\n instruction_address_trans_mode: aitm,\n\n tmod: tmod,\n", "file_path": "rust/k210-shared/src/soc/spi.rs", "rank": 58, "score": 122121.61830413397 }, { "content": "pub fn set_power_mode(power_bank: power_bank, mode: io_power_mode) {\n\n unsafe {\n\n (*pac::SYSCTL::ptr()).power_sel.modify(|r, w| {\n\n w.bits(set_bit(\n\n r.bits(),\n\n power_bank as u8,\n\n match mode {\n\n io_power_mode::V33 => false,\n\n io_power_mode::V18 => true,\n\n },\n\n ))\n\n });\n\n }\n\n}\n\n\n\n/** Route SPI0_D0-D7 DVP_D0-D7 functions to SPI and DVP data pins (bypassing FPIOA). */\n", "file_path": "rust/k210-shared/src/soc/sysctl.rs", "rank": 59, "score": 119042.11732904604 }, { "content": "pub fn rgbf565(r: f32, g: f32, b: f32) -> u16 {\n\n (((r * ALMOST_32) as u16) << 11) |\n\n (((g * ALMOST_64) as u16) << 5) |\n\n ((b * ALMOST_32) as u16)\n\n}\n\n\n\n/** HSV to RGB. `h` is 0.0..360.0, `s` and `v` are 0.0..1.0 output RGB will be 0.0..1.0 (all ranges\n\n * inclusive)\n\n */\n", "file_path": "rust/k210-shared/src/board/lcd_colors.rs", "rank": 60, "score": 116686.44660046755 }, { "content": "pub fn set_function<N: Into<usize>>(number: N, function: function) {\n\n // TODO: check for overlapping assignments and assign to RESV0 as the Kendryte SDK does?\n\n unsafe {\n\n let ptr = pac::FPIOA::ptr();\n\n (*ptr).io[number.into()].write(|w| w.bits(FUNCTION_DEFAULTS[function as usize]));\n\n }\n\n}\n\n\n", "file_path": "rust/k210-shared/src/soc/fpioa.rs", "rank": 61, "score": 114278.6207251042 }, { "content": "pub fn set_io_pull<N: Into<usize>>(number: N, pull: pull) {\n\n unsafe {\n\n (*pac::FPIOA::ptr()).io[number.into()].modify(|_, w| match pull {\n\n pull::NONE => w.pu().bit(false).pd().bit(false),\n\n pull::DOWN => w.pu().bit(false).pd().bit(true),\n\n pull::UP => w.pu().bit(true).pd().bit(false),\n\n });\n\n }\n\n}\n", "file_path": "rust/k210-shared/src/soc/fpioa.rs", "rank": 62, "score": 113124.42730325967 }, { "content": "pub fn read<IF: I2C>(i2c: &IF, cmd: command) -> Result<u16, ()>\n\n{\n\n let mut buf = [0u8; 2];\n\n if i2c.recv_data(&[cmd as u8], &mut buf).is_ok() {\n\n Ok((u16::from(buf[0]) << 4) | (u16::from(buf[1]) >> 4))\n\n } else {\n\n Err(())\n\n }\n\n}\n\n\n\n/* high level functions */\n\n\n\n/** Position filter */\n\npub struct TSFilter {\n\n mx: filters::Median<i32>,\n\n my: filters::Median<i32>,\n\n nx: filters::Mean<i32>,\n\n ny: filters::Mean<i32>,\n\n cal: [i32; 7],\n\n}\n", "file_path": "rust/k210-shared/src/board/ns2009.rs", "rank": 63, "score": 112279.74009758426 }, { "content": "pub fn hsv2rgb(h: f32, s: f32, v: f32) -> (f32, f32, f32) {\n\n let h = h / 60.0;\n\n let i = h.trunc();\n\n let f = h - i;\n\n\n\n let c = v * (1.0 - s * f);\n\n let b = v * (1.0 - s + s * f);\n\n let o = v * (1.0 - s);\n\n match i as u32 {\n\n // yellow to green\n\n 1 => (c, v, o),\n\n // green to cyan\n\n 2 => (o, v, b),\n\n // cyan to blue\n\n 3 => (o, c, v),\n\n // blue to magenta\n\n 4 => (b, o, v),\n\n // magenta to red\n\n 5 => (v, o, c),\n\n // red to yellow\n\n _ => (v, b, o),\n\n }\n\n}\n\n\n\n/** Clamp a float between 0 and 1 */\n", "file_path": "rust/k210-shared/src/board/lcd_colors.rs", "rank": 64, "score": 110527.23619024694 }, { "content": "/// Trait for generalizing over SPI0 and SPI1 (SPI2 is slave-only and SPI3 is !!!special!!!)\n\npub trait SPI01: Deref<Target = spi0::RegisterBlock> {\n\n #[doc(hidden)]\n\n const CLK: sysctl::clock;\n\n #[doc(hidden)]\n\n const DIV: sysctl::threshold;\n\n #[doc(hidden)]\n\n const DMA_RX: sysctl::dma_select;\n\n #[doc(hidden)]\n\n const DMA_TX: sysctl::dma_select;\n\n}\n\n\n\nimpl SPI01 for SPI0 {\n\n const CLK: sysctl::clock = sysctl::clock::SPI0;\n\n const DIV: sysctl::threshold = sysctl::threshold::SPI0;\n\n const DMA_RX: sysctl::dma_select = sysctl::dma_select::SSI0_RX_REQ;\n\n const DMA_TX: sysctl::dma_select = sysctl::dma_select::SSI0_TX_REQ;\n\n}\n\nimpl SPI01 for SPI1 {\n\n const CLK: sysctl::clock = sysctl::clock::SPI1;\n\n const DIV: sysctl::threshold = sysctl::threshold::SPI1;\n", "file_path": "rust/k210-shared/src/soc/spi.rs", "rank": 65, "score": 106146.70565898767 }, { "content": "pub trait TimerExt: Deref<Target = timer0::RegisterBlock> + Sized {\n\n #[doc(hidden)]\n\n const CLK: sysctl::clock;\n\n #[doc(hidden)]\n\n const DIV: sysctl::threshold;\n\n\n\n /// Constrains TIMER peripheral for PWM use\n\n /// A timer channel can either be used for PWM or as a normal timer (say, for interrupt\n\n /// generation). Currently this has a larger granularity than needed and\n\n /// constrains the entire peripheral for PWM use.\n\n fn constrain_pwm(self) -> PWMImpl<Self>;\n\n}\n\n\n\nimpl TimerExt for TIMER0 {\n\n const CLK: sysctl::clock = sysctl::clock::TIMER0;\n\n const DIV: sysctl::threshold = sysctl::threshold::TIMER0;\n\n\n\n fn constrain_pwm(self) -> PWMImpl<Self> { PWMImpl::<Self> { timer: self } }\n\n}\n\nimpl TimerExt for TIMER1 {\n", "file_path": "rust/k210-shared/src/soc/pwm.rs", "rank": 66, "score": 102139.02014855674 }, { "content": "/// Trait for generalizing over I2C0-2\n\npub trait I2CExt: Deref<Target = i2c0::RegisterBlock> + Sized {\n\n #[doc(hidden)]\n\n const CLK: sysctl::clock;\n\n #[doc(hidden)]\n\n const DIV: sysctl::threshold;\n\n #[doc(hidden)]\n\n const RESET: sysctl::reset;\n\n\n\n /// Constrains I2C peripheral so it plays nicely with the other abstractions\n\n fn constrain(self) -> I2CImpl<Self>;\n\n}\n\n\n\nimpl I2CExt for I2C0 {\n\n const CLK: sysctl::clock = sysctl::clock::I2C0;\n\n const DIV: sysctl::threshold = sysctl::threshold::I2C0;\n\n const RESET: sysctl::reset = sysctl::reset::I2C0;\n\n\n\n fn constrain(self) -> I2CImpl<Self> { I2CImpl::<Self> { i2c: self } }\n\n}\n\nimpl I2CExt for I2C1 {\n", "file_path": "rust/k210-shared/src/soc/i2c.rs", "rank": 67, "score": 100978.01245107138 }, { "content": "void test_num_mod(void) {\n\n int i;\n\n secp256k1_scalar s;\n\n secp256k1_num order, n;\n\n\n\n /* check that 0 mod anything is 0 */\n\n random_scalar_order_test(&s);\n\n secp256k1_scalar_get_num(&order, &s);\n\n secp256k1_scalar_set_int(&s, 0);\n\n secp256k1_scalar_get_num(&n, &s);\n\n secp256k1_num_mod(&n, &order);\n\n CHECK(secp256k1_num_is_zero(&n));\n\n\n\n /* check that anything mod 1 is 0 */\n\n secp256k1_scalar_set_int(&s, 1);\n\n secp256k1_scalar_get_num(&order, &s);\n\n secp256k1_scalar_get_num(&n, &s);\n\n secp256k1_num_mod(&n, &order);\n\n CHECK(secp256k1_num_is_zero(&n));\n\n\n\n /* check that increasing the number past 2^256 does not break this */\n\n random_scalar_order_test(&s);\n\n secp256k1_scalar_get_num(&n, &s);\n\n /* multiply by 2^8, which'll test this case with high probability */\n\n for (i = 0; i < 8; ++i) {\n\n secp256k1_num_add(&n, &n, &n);\n\n }\n\n secp256k1_num_mod(&n, &order);\n\n CHECK(secp256k1_num_is_zero(&n));\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 68, "score": 100626.99776641771 }, { "content": "void test_ecmult_constants(void) {\n\n /* Test ecmult_gen() for [0..36) and [order-36..0). */\n\n secp256k1_scalar x;\n\n secp256k1_gej r;\n\n secp256k1_ge ng;\n\n int i;\n\n int j;\n\n secp256k1_ge_neg(&ng, &secp256k1_ge_const_g);\n\n for (i = 0; i < 36; i++ ) {\n\n secp256k1_scalar_set_int(&x, i);\n\n secp256k1_ecmult_gen(&ctx->ecmult_gen_ctx, &r, &x);\n\n for (j = 0; j < i; j++) {\n\n if (j == i - 1) {\n\n ge_equals_gej(&secp256k1_ge_const_g, &r);\n\n }\n\n secp256k1_gej_add_ge(&r, &r, &ng);\n\n }\n\n CHECK(secp256k1_gej_is_infinity(&r));\n\n }\n\n for (i = 1; i <= 36; i++ ) {\n\n secp256k1_scalar_set_int(&x, i);\n\n secp256k1_scalar_negate(&x, &x);\n\n secp256k1_ecmult_gen(&ctx->ecmult_gen_ctx, &r, &x);\n\n for (j = 0; j < i; j++) {\n\n if (j == i - 1) {\n\n ge_equals_gej(&ng, &r);\n\n }\n\n secp256k1_gej_add_ge(&r, &r, &secp256k1_ge_const_g);\n\n }\n\n CHECK(secp256k1_gej_is_infinity(&r));\n\n }\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 69, "score": 100617.390998659 }, { "content": "void test_constant_wnaf(const secp256k1_scalar *number, int w) {\n\n secp256k1_scalar x, shift;\n\n int wnaf[256] = {0};\n\n int i;\n\n int skew;\n\n int bits = 256;\n\n secp256k1_scalar num = *number;\n\n\n\n secp256k1_scalar_set_int(&x, 0);\n\n secp256k1_scalar_set_int(&shift, 1 << w);\n\n /* With USE_ENDOMORPHISM on we only consider 128-bit numbers */\n\n#ifdef USE_ENDOMORPHISM\n\n for (i = 0; i < 16; ++i) {\n\n secp256k1_scalar_shr_int(&num, 8);\n\n }\n\n bits = 128;\n\n#endif\n\n skew = secp256k1_wnaf_const(wnaf, num, w, bits);\n\n\n\n for (i = WNAF_SIZE_BITS(bits, w); i >= 0; --i) {\n\n secp256k1_scalar t;\n\n int v = wnaf[i];\n\n CHECK(v != 0); /* check nonzero */\n\n CHECK(v & 1); /* check parity */\n\n CHECK(v > -(1 << w)); /* check range above */\n\n CHECK(v < (1 << w)); /* check range below */\n\n\n\n secp256k1_scalar_mul(&x, &x, &shift);\n\n if (v >= 0) {\n\n secp256k1_scalar_set_int(&t, v);\n\n } else {\n\n secp256k1_scalar_set_int(&t, -v);\n\n secp256k1_scalar_negate(&t, &t);\n\n }\n\n secp256k1_scalar_add(&x, &x, &t);\n\n }\n\n /* Skew num because when encoding numbers as odd we use an offset */\n\n secp256k1_scalar_cadd_bit(&num, skew == 2, 1);\n\n CHECK(secp256k1_scalar_eq(&x, &num));\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 70, "score": 100617.390998659 }, { "content": "static int nonce_function_test_fail(unsigned char *nonce32, const unsigned char *msg32, const unsigned char *key32, const unsigned char *algo16, void *data, unsigned int counter) {\n\n /* Dummy nonce generator that has a fatal error on the first counter value. */\n\n if (counter == 0) {\n\n return 0;\n\n }\n\n return nonce_function_rfc6979(nonce32, msg32, key32, algo16, data, counter - 1);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 71, "score": 99613.89247378618 }, { "content": "void test_constant_wnaf_negate(const secp256k1_scalar *number) {\n\n secp256k1_scalar neg1 = *number;\n\n secp256k1_scalar neg2 = *number;\n\n int sign1 = 1;\n\n int sign2 = 1;\n\n\n\n if (!secp256k1_scalar_get_bits(&neg1, 0, 1)) {\n\n secp256k1_scalar_negate(&neg1, &neg1);\n\n sign1 = -1;\n\n }\n\n sign2 = secp256k1_scalar_cond_negate(&neg2, secp256k1_scalar_is_even(&neg2));\n\n CHECK(sign1 == sign2);\n\n CHECK(secp256k1_scalar_eq(&neg1, &neg2));\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 72, "score": 99599.29552802427 }, { "content": "void run_ecmult_const_tests(void) {\n\n ecmult_const_mult_zero_one();\n\n ecmult_const_random_mult();\n\n ecmult_const_commutativity();\n\n ecmult_const_chain_multiply();\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 73, "score": 99562.57922517386 }, { "content": "void test_point_times_order(const secp256k1_gej *point) {\n\n /* X * (point + G) + (order-X) * (pointer + G) = 0 */\n\n secp256k1_scalar x;\n\n secp256k1_scalar nx;\n\n secp256k1_scalar zero = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0);\n\n secp256k1_scalar one = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 1);\n\n secp256k1_gej res1, res2;\n\n secp256k1_ge res3;\n\n unsigned char pub[65];\n\n size_t psize = 65;\n\n random_scalar_order_test(&x);\n\n secp256k1_scalar_negate(&nx, &x);\n\n secp256k1_ecmult(&ctx->ecmult_ctx, &res1, point, &x, &x); /* calc res1 = x * point + x * G; */\n\n secp256k1_ecmult(&ctx->ecmult_ctx, &res2, point, &nx, &nx); /* calc res2 = (order - x) * point + (order - x) * G; */\n\n secp256k1_gej_add_var(&res1, &res1, &res2, NULL);\n\n CHECK(secp256k1_gej_is_infinity(&res1));\n\n CHECK(secp256k1_gej_is_valid_var(&res1) == 0);\n\n secp256k1_ge_set_gej(&res3, &res1);\n\n CHECK(secp256k1_ge_is_infinity(&res3));\n\n CHECK(secp256k1_ge_is_valid_var(&res3) == 0);\n\n CHECK(secp256k1_eckey_pubkey_serialize(&res3, pub, &psize, 0) == 0);\n\n psize = 65;\n\n CHECK(secp256k1_eckey_pubkey_serialize(&res3, pub, &psize, 1) == 0);\n\n /* check zero/one edge cases */\n\n secp256k1_ecmult(&ctx->ecmult_ctx, &res1, point, &zero, &zero);\n\n secp256k1_ge_set_gej(&res3, &res1);\n\n CHECK(secp256k1_ge_is_infinity(&res3));\n\n secp256k1_ecmult(&ctx->ecmult_ctx, &res1, point, &one, &zero);\n\n secp256k1_ge_set_gej(&res3, &res1);\n\n ge_equals_gej(&res3, point);\n\n secp256k1_ecmult(&ctx->ecmult_ctx, &res1, point, &zero, &one);\n\n secp256k1_ge_set_gej(&res3, &res1);\n\n ge_equals_ge(&res3, &secp256k1_ge_const_g);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 74, "score": 99553.42167311785 }, { "content": "void test_ecmult_multi_pippenger_max_points(void) {\n\n size_t scratch_size = secp256k1_rand_int(256);\n\n size_t max_size = secp256k1_pippenger_scratch_size(secp256k1_pippenger_bucket_window_inv(PIPPENGER_MAX_BUCKET_WINDOW-1)+512, 12);\n\n secp256k1_scratch *scratch;\n\n size_t n_points_supported;\n\n int bucket_window = 0;\n\n\n\n for(; scratch_size < max_size; scratch_size+=256) {\n\n scratch = secp256k1_scratch_create(&ctx->error_callback, scratch_size);\n\n CHECK(scratch != NULL);\n\n n_points_supported = secp256k1_pippenger_max_points(scratch);\n\n if (n_points_supported == 0) {\n\n secp256k1_scratch_destroy(scratch);\n\n continue;\n\n }\n\n bucket_window = secp256k1_pippenger_bucket_window(n_points_supported);\n\n CHECK(secp256k1_scratch_allocate_frame(scratch, secp256k1_pippenger_scratch_size(n_points_supported, bucket_window), PIPPENGER_SCRATCH_OBJECTS));\n\n secp256k1_scratch_deallocate_frame(scratch);\n\n secp256k1_scratch_destroy(scratch);\n\n }\n\n CHECK(bucket_window == PIPPENGER_MAX_BUCKET_WINDOW);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 75, "score": 97597.10262065115 }, { "content": "void run_ecmult_constants(void) {\n\n test_ecmult_constants();\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 76, "score": 96272.02132555543 }, { "content": "void ecmult_const_commutativity(void) {\n\n secp256k1_scalar a;\n\n secp256k1_scalar b;\n\n secp256k1_gej res1;\n\n secp256k1_gej res2;\n\n secp256k1_ge mid1;\n\n secp256k1_ge mid2;\n\n random_scalar_order_test(&a);\n\n random_scalar_order_test(&b);\n\n\n\n secp256k1_ecmult_const(&res1, &secp256k1_ge_const_g, &a, 256);\n\n secp256k1_ecmult_const(&res2, &secp256k1_ge_const_g, &b, 256);\n\n secp256k1_ge_set_gej(&mid1, &res1);\n\n secp256k1_ge_set_gej(&mid2, &res2);\n\n secp256k1_ecmult_const(&res1, &mid1, &b, 256);\n\n secp256k1_ecmult_const(&res2, &mid2, &a, 256);\n\n secp256k1_ge_set_gej(&mid1, &res1);\n\n secp256k1_ge_set_gej(&mid2, &res2);\n\n ge_equals_ge(&mid1, &mid2);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 77, "score": 96234.76222159315 }, { "content": "static void secp256k1_ecmult_const(secp256k1_gej *r, const secp256k1_ge *a, const secp256k1_scalar *q, int bits);\n", "file_path": "src/secp256k1_tests/ecmult_const.h", "rank": 78, "score": 95836.51494022962 }, { "content": "void ecmult_const_random_mult(void) {\n\n /* random starting point A (on the curve) */\n\n secp256k1_ge a = SECP256K1_GE_CONST(\n\n 0x6d986544, 0x57ff52b8, 0xcf1b8126, 0x5b802a5b,\n\n 0xa97f9263, 0xb1e88044, 0x93351325, 0x91bc450a,\n\n 0x535c59f7, 0x325e5d2b, 0xc391fbe8, 0x3c12787c,\n\n 0x337e4a98, 0xe82a9011, 0x0123ba37, 0xdd769c7d\n\n );\n\n /* random initial factor xn */\n\n secp256k1_scalar xn = SECP256K1_SCALAR_CONST(\n\n 0x649d4f77, 0xc4242df7, 0x7f2079c9, 0x14530327,\n\n 0xa31b876a, 0xd2d8ce2a, 0x2236d5c6, 0xd7b2029b\n\n );\n\n /* expected xn * A (from sage) */\n\n secp256k1_ge expected_b = SECP256K1_GE_CONST(\n\n 0x23773684, 0x4d209dc7, 0x098a786f, 0x20d06fcd,\n\n 0x070a38bf, 0xc11ac651, 0x03004319, 0x1e2a8786,\n\n 0xed8c3b8e, 0xc06dd57b, 0xd06ea66e, 0x45492b0f,\n\n 0xb84e4e1b, 0xfb77e21f, 0x96baae2a, 0x63dec956\n\n );\n\n secp256k1_gej b;\n\n secp256k1_ecmult_const(&b, &a, &xn, 256);\n\n\n\n CHECK(secp256k1_ge_is_valid_var(&a));\n\n ge_equals_gej(&expected_b, &b);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 79, "score": 95117.62833945428 }, { "content": "void ecmult_const_chain_multiply(void) {\n\n /* Check known result (randomly generated test problem from sage) */\n\n const secp256k1_scalar scalar = SECP256K1_SCALAR_CONST(\n\n 0x4968d524, 0x2abf9b7a, 0x466abbcf, 0x34b11b6d,\n\n 0xcd83d307, 0x827bed62, 0x05fad0ce, 0x18fae63b\n\n );\n\n const secp256k1_gej expected_point = SECP256K1_GEJ_CONST(\n\n 0x5494c15d, 0x32099706, 0xc2395f94, 0x348745fd,\n\n 0x757ce30e, 0x4e8c90fb, 0xa2bad184, 0xf883c69f,\n\n 0x5d195d20, 0xe191bf7f, 0x1be3e55f, 0x56a80196,\n\n 0x6071ad01, 0xf1462f66, 0xc997fa94, 0xdb858435\n\n );\n\n secp256k1_gej point;\n\n secp256k1_ge res;\n\n int i;\n\n\n\n secp256k1_gej_set_ge(&point, &secp256k1_ge_const_g);\n\n for (i = 0; i < 100; ++i) {\n\n secp256k1_ge tmp;\n\n secp256k1_ge_set_gej(&tmp, &point);\n\n secp256k1_ecmult_const(&point, &tmp, &scalar, 256);\n\n }\n\n secp256k1_ge_set_gej(&res, &point);\n\n ge_equals_gej(&res, &expected_point);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 80, "score": 95117.62833945428 }, { "content": "void run_point_times_order(void) {\n\n int i;\n\n secp256k1_fe x = SECP256K1_FE_CONST(0, 0, 0, 0, 0, 0, 0, 2);\n\n static const secp256k1_fe xr = SECP256K1_FE_CONST(\n\n 0x7603CB59, 0xB0EF6C63, 0xFE608479, 0x2A0C378C,\n\n 0xDB3233A8, 0x0F8A9A09, 0xA877DEAD, 0x31B38C45\n\n );\n\n for (i = 0; i < 500; i++) {\n\n secp256k1_ge p;\n\n if (secp256k1_ge_set_xo_var(&p, &x, 1)) {\n\n secp256k1_gej j;\n\n CHECK(secp256k1_ge_is_valid_var(&p));\n\n secp256k1_gej_set_ge(&j, &p);\n\n CHECK(secp256k1_gej_is_valid_var(&j));\n\n test_point_times_order(&j);\n\n }\n\n secp256k1_fe_sqr(&x, &x);\n\n }\n\n secp256k1_fe_normalize_var(&x);\n\n CHECK(secp256k1_fe_equal_var(&x, &xr));\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 81, "score": 95108.47078739827 }, { "content": "static int secp256k1_wnaf_const(int *wnaf, secp256k1_scalar s, int w, int size) {\n\n int global_sign;\n\n int skew = 0;\n\n int word = 0;\n\n\n\n /* 1 2 3 */\n\n int u_last;\n\n int u;\n\n\n\n int flip;\n\n int bit;\n\n secp256k1_scalar neg_s;\n\n int not_neg_one;\n\n /* Note that we cannot handle even numbers by negating them to be odd, as is\n\n * done in other implementations, since if our scalars were specified to have\n\n * width < 256 for performance reasons, their negations would have width 256\n\n * and we'd lose any performance benefit. Instead, we use a technique from\n\n * Section 4.2 of the Okeya/Tagaki paper, which is to add either 1 (for even)\n\n * or 2 (for odd) to the number we are encoding, returning a skew value indicating\n\n * this, and having the caller compensate after doing the multiplication.\n\n *\n\n * In fact, we _do_ want to negate numbers to minimize their bit-lengths (and in\n\n * particular, to ensure that the outputs from the endomorphism-split fit into\n\n * 128 bits). If we negate, the parity of our number flips, inverting which of\n\n * {1, 2} we want to add to the scalar when ensuring that it's odd. Further\n\n * complicating things, -1 interacts badly with `secp256k1_scalar_cadd_bit` and\n\n * we need to special-case it in this logic. */\n\n flip = secp256k1_scalar_is_high(&s);\n\n /* We add 1 to even numbers, 2 to odd ones, noting that negation flips parity */\n\n bit = flip ^ !secp256k1_scalar_is_even(&s);\n\n /* We check for negative one, since adding 2 to it will cause an overflow */\n\n secp256k1_scalar_negate(&neg_s, &s);\n\n not_neg_one = !secp256k1_scalar_is_one(&neg_s);\n\n secp256k1_scalar_cadd_bit(&s, bit, not_neg_one);\n\n /* If we had negative one, flip == 1, s.d[0] == 0, bit == 1, so caller expects\n\n * that we added two to it and flipped it. In fact for -1 these operations are\n\n * identical. We only flipped, but since skewing is required (in the sense that\n\n * the skew must be 1 or 2, never zero) and flipping is not, we need to change\n\n * our flags to claim that we only skewed. */\n\n global_sign = secp256k1_scalar_cond_negate(&s, flip);\n\n global_sign *= not_neg_one * 2 - 1;\n\n skew = 1 << bit;\n\n\n\n /* 4 */\n\n u_last = secp256k1_scalar_shr_int(&s, w);\n\n while (word * w < size) {\n\n int sign;\n\n int even;\n\n\n\n /* 4.1 4.4 */\n\n u = secp256k1_scalar_shr_int(&s, w);\n\n /* 4.2 */\n\n even = ((u & 1) == 0);\n\n sign = 2 * (u_last > 0) - 1;\n\n u += sign * even;\n\n u_last -= sign * even * (1 << w);\n\n\n\n /* 4.3, adapted for global sign change */\n\n wnaf[word++] = u_last * global_sign;\n\n\n\n u_last = u;\n\n }\n\n wnaf[word] = u * global_sign;\n\n\n\n VERIFY_CHECK(secp256k1_scalar_is_zero(&s));\n\n VERIFY_CHECK(word == WNAF_SIZE_BITS(size, w));\n\n return skew;\n", "file_path": "src/secp256k1_tests/ecmult_const_impl.h", "rank": 82, "score": 94751.77613523536 }, { "content": "static void secp256k1_ecmult_const(secp256k1_gej *r, const secp256k1_ge *a, const secp256k1_scalar *scalar, int size) {\n\n secp256k1_ge pre_a[ECMULT_TABLE_SIZE(WINDOW_A)];\n\n secp256k1_ge tmpa;\n\n secp256k1_fe Z;\n\n\n\n int skew_1;\n\n#ifdef USE_ENDOMORPHISM\n\n secp256k1_ge pre_a_lam[ECMULT_TABLE_SIZE(WINDOW_A)];\n\n int wnaf_lam[1 + WNAF_SIZE(WINDOW_A - 1)];\n\n int skew_lam;\n\n secp256k1_scalar q_1, q_lam;\n\n#endif\n\n int wnaf_1[1 + WNAF_SIZE(WINDOW_A - 1)];\n\n\n\n int i;\n\n secp256k1_scalar sc = *scalar;\n\n\n\n /* build wnaf representation for q. */\n\n int rsize = size;\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n rsize = 128;\n\n /* split q into q_1 and q_lam (where q = q_1 + q_lam*lambda, and q_1 and q_lam are ~128 bit) */\n\n secp256k1_scalar_split_lambda(&q_1, &q_lam, &sc);\n\n skew_1 = secp256k1_wnaf_const(wnaf_1, q_1, WINDOW_A - 1, 128);\n\n skew_lam = secp256k1_wnaf_const(wnaf_lam, q_lam, WINDOW_A - 1, 128);\n\n } else\n\n#endif\n\n {\n\n skew_1 = secp256k1_wnaf_const(wnaf_1, sc, WINDOW_A - 1, size);\n\n#ifdef USE_ENDOMORPHISM\n\n skew_lam = 0;\n\n#endif\n\n }\n\n\n\n /* Calculate odd multiples of a.\n\n * All multiples are brought to the same Z 'denominator', which is stored\n\n * in Z. Due to secp256k1' isomorphism we can do all operations pretending\n\n * that the Z coordinate was 1, use affine addition formulae, and correct\n\n * the Z coordinate of the result once at the end.\n\n */\n\n secp256k1_gej_set_ge(r, a);\n\n secp256k1_ecmult_odd_multiples_table_globalz_windowa(pre_a, &Z, r);\n\n for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) {\n\n secp256k1_fe_normalize_weak(&pre_a[i].y);\n\n }\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n for (i = 0; i < ECMULT_TABLE_SIZE(WINDOW_A); i++) {\n\n secp256k1_ge_mul_lambda(&pre_a_lam[i], &pre_a[i]);\n\n }\n\n }\n\n#endif\n\n\n\n /* first loop iteration (separated out so we can directly set r, rather\n\n * than having it start at infinity, get doubled several times, then have\n\n * its new value added to it) */\n\n i = wnaf_1[WNAF_SIZE_BITS(rsize, WINDOW_A - 1)];\n\n VERIFY_CHECK(i != 0);\n\n ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a, i, WINDOW_A);\n\n secp256k1_gej_set_ge(r, &tmpa);\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n i = wnaf_lam[WNAF_SIZE_BITS(rsize, WINDOW_A - 1)];\n\n VERIFY_CHECK(i != 0);\n\n ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a_lam, i, WINDOW_A);\n\n secp256k1_gej_add_ge(r, r, &tmpa);\n\n }\n\n#endif\n\n /* remaining loop iterations */\n\n for (i = WNAF_SIZE_BITS(rsize, WINDOW_A - 1) - 1; i >= 0; i--) {\n\n int n;\n\n int j;\n\n for (j = 0; j < WINDOW_A - 1; ++j) {\n\n secp256k1_gej_double_nonzero(r, r, NULL);\n\n }\n\n\n\n n = wnaf_1[i];\n\n ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a, n, WINDOW_A);\n\n VERIFY_CHECK(n != 0);\n\n secp256k1_gej_add_ge(r, r, &tmpa);\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n n = wnaf_lam[i];\n\n ECMULT_CONST_TABLE_GET_GE(&tmpa, pre_a_lam, n, WINDOW_A);\n\n VERIFY_CHECK(n != 0);\n\n secp256k1_gej_add_ge(r, r, &tmpa);\n\n }\n\n#endif\n\n }\n\n\n\n secp256k1_fe_mul(&r->z, &r->z, &Z);\n\n\n\n {\n\n /* Correct for wNAF skew */\n\n secp256k1_ge correction = *a;\n\n secp256k1_ge_storage correction_1_stor;\n\n#ifdef USE_ENDOMORPHISM\n\n secp256k1_ge_storage correction_lam_stor;\n\n#endif\n\n secp256k1_ge_storage a2_stor;\n\n secp256k1_gej tmpj;\n\n secp256k1_gej_set_ge(&tmpj, &correction);\n\n secp256k1_gej_double_var(&tmpj, &tmpj, NULL);\n\n secp256k1_ge_set_gej(&correction, &tmpj);\n\n secp256k1_ge_to_storage(&correction_1_stor, a);\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n secp256k1_ge_to_storage(&correction_lam_stor, a);\n\n }\n\n#endif\n\n secp256k1_ge_to_storage(&a2_stor, &correction);\n\n\n\n /* For odd numbers this is 2a (so replace it), for even ones a (so no-op) */\n\n secp256k1_ge_storage_cmov(&correction_1_stor, &a2_stor, skew_1 == 2);\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n secp256k1_ge_storage_cmov(&correction_lam_stor, &a2_stor, skew_lam == 2);\n\n }\n\n#endif\n\n\n\n /* Apply the correction */\n\n secp256k1_ge_from_storage(&correction, &correction_1_stor);\n\n secp256k1_ge_neg(&correction, &correction);\n\n secp256k1_gej_add_ge(r, r, &correction);\n\n\n\n#ifdef USE_ENDOMORPHISM\n\n if (size > 128) {\n\n secp256k1_ge_from_storage(&correction, &correction_lam_stor);\n\n secp256k1_ge_neg(&correction, &correction);\n\n secp256k1_ge_mul_lambda(&correction, &correction);\n\n secp256k1_gej_add_ge(r, r, &correction);\n\n }\n\n#endif\n\n }\n", "file_path": "src/secp256k1_tests/ecmult_const_impl.h", "rank": 83, "score": 94751.77613523536 }, { "content": "static void uncounting_illegal_callback_fn(const char* str, void* data) {\n\n /* Dummy callback function that just counts (backwards). */\n\n int32_t *p;\n\n (void)str;\n\n p = data;\n\n (*p)--;\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 84, "score": 94628.47305580051 }, { "content": "static void counting_illegal_callback_fn(const char* str, void* data) {\n\n /* Dummy callback function that just counts. */\n\n int32_t *p;\n\n (void)str;\n\n p = data;\n\n (*p)++;\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 85, "score": 94628.47305580051 }, { "content": "void ecmult_const_mult_zero_one(void) {\n\n secp256k1_scalar zero = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 0);\n\n secp256k1_scalar one = SECP256K1_SCALAR_CONST(0, 0, 0, 0, 0, 0, 0, 1);\n\n secp256k1_scalar negone;\n\n secp256k1_gej res1;\n\n secp256k1_ge res2;\n\n secp256k1_ge point;\n\n secp256k1_scalar_negate(&negone, &one);\n\n\n\n random_group_element_test(&point);\n\n secp256k1_ecmult_const(&res1, &point, &zero, 3);\n\n secp256k1_ge_set_gej(&res2, &res1);\n\n CHECK(secp256k1_ge_is_infinity(&res2));\n\n secp256k1_ecmult_const(&res1, &point, &one, 2);\n\n secp256k1_ge_set_gej(&res2, &res1);\n\n ge_equals_ge(&res2, &point);\n\n secp256k1_ecmult_const(&res1, &point, &negone, 256);\n\n secp256k1_gej_neg(&res1, &res1);\n\n secp256k1_ge_set_gej(&res2, &res1);\n\n ge_equals_ge(&res2, &point);\n", "file_path": "src/secp256k1_tests/tests.c", "rank": 86, "score": 94027.54850556442 }, { "content": "static int secp256k1_test_rng_precomputed_used = 8;\n", "file_path": "src/secp256k1_tests/testrand_impl.h", "rank": 96, "score": 93013.36626517303 }, { "content": " secp256k1_pubkey point;\n", "file_path": "src/secp256k1_bench/main.c", "rank": 97, "score": 88161.56711788254 }, { "content": " * the comment in ecmult_gen_impl.h for rationale. */ \\\n", "file_path": "src/secp256k1_tests/ecmult_const_impl.h", "rank": 98, "score": 87786.82732506307 }, { "content": "static int secp256k1_scalar_eq(const secp256k1_scalar *a, const secp256k1_scalar *b);\n", "file_path": "src/secp256k1_tests/scalar.h", "rank": 99, "score": 86521.5790645835 } ]
Rust
src/support/mod.rs
Twinklebear/tobj_viewer
c6f59993eb4bf0a7b1262602fec17884ec48c1f3
#![allow(dead_code)] extern crate clock_ticks; extern crate tobj; use glium::vertex::VertexBufferAny; use glium::{self, Display}; use std::f32; use std::path::Path; use std::thread; use std::time::{Duration, Instant}; pub mod camera; pub enum Action { Stop, Continue, } pub fn start_loop<F>(mut callback: F) where F: FnMut() -> Action, { let mut accumulator = Duration::new(0, 0); let mut previous_clock = Instant::now(); loop { match callback() { Action::Stop => break, Action::Continue => (), }; let now = Instant::now(); accumulator += now - previous_clock; previous_clock = now; let fixed_time_stamp = Duration::new(0, 16666667); while accumulator >= fixed_time_stamp { accumulator -= fixed_time_stamp; } thread::sleep(fixed_time_stamp - accumulator); } } pub fn load_wavefront(display: &Display, path: &Path) -> (VertexBufferAny, f32) { #[derive(Copy, Clone)] struct Vertex { position: [f32; 3], normal: [f32; 3], color_diffuse: [f32; 3], color_specular: [f32; 4], } implement_vertex!(Vertex, position, normal, color_diffuse, color_specular); let mut min_pos = [f32::INFINITY; 3]; let mut max_pos = [f32::NEG_INFINITY; 3]; let mut vertex_data = Vec::new(); match tobj::load_obj(path) { Ok((models, mats)) => { for model in &models { let mesh = &model.mesh; println!("Uploading model: {}", model.name); for idx in &mesh.indices { let i = *idx as usize; let pos = [ mesh.positions[3 * i], mesh.positions[3 * i + 1], mesh.positions[3 * i + 2], ]; let normal = if !mesh.normals.is_empty() { [ mesh.normals[3 * i], mesh.normals[3 * i + 1], mesh.normals[3 * i + 2], ] } else { [0.0, 0.0, 0.0] }; let (color_diffuse, color_specular) = match mesh.material_id { Some(i) => ( mats[i].diffuse, [ mats[i].specular[0], mats[i].specular[1], mats[i].specular[2], mats[i].shininess, ], ), None => ([0.8, 0.8, 0.8], [0.15, 0.15, 0.15, 15.0]), }; vertex_data.push(Vertex { position: pos, normal: normal, color_diffuse: color_diffuse, color_specular: color_specular, }); for i in 0..3 { min_pos[i] = f32::min(min_pos[i], pos[i]); max_pos[i] = f32::max(max_pos[i], pos[i]); } } } } Err(e) => panic!("Loading of {:?} failed due to {:?}", path, e), } let diagonal_len = 6.0; let current_len = f32::powf(max_pos[0] - min_pos[0], 2.0) + f32::powf(max_pos[1] - min_pos[1], 2.0) + f32::powf(max_pos[2] - min_pos[2], 2.0); let scale = f32::sqrt(diagonal_len / current_len); println!("Model scaled by {} to fit", scale); ( glium::vertex::VertexBuffer::new(display, &vertex_data) .unwrap() .into_vertex_buffer_any(), scale, ) }
#![allow(dead_code)] extern crate clock_ticks; extern crate tobj; use glium::vertex::VertexBufferAny; use glium::{self, Display}; use std::f32; use std::path::Path; use std::thread; use std::time::{Duration, Instant}; pub mod camera; pub enum Action { Stop, Continue, } pub fn start_loop<F>(mut callback: F) where F: FnMut() -> Action, { let mut accumulator = Duration::new(0, 0); let mut previous_clock = Instant::now(); loop { match callback() { Action::Stop => break, Action::Continue => (), }; let now = Instant::now(); accumulator += now - previous_clock; previous_clock = now; let fixed_time_stamp = Duration::new(0, 16666667); while accumulator >= fixed_time_stamp { accumulator -= fixed_time_stamp; } thread::sleep(fixed_time_stamp - accumulator); } } pub fn load_wavefront(display: &Display, path: &Path) -> (VertexBufferAny, f32) { #[derive(Copy, Clone)] struct Vertex { position: [f32; 3], normal: [f32; 3], color_diffuse: [f32; 3], color_specular: [f32; 4], } implement_vertex!(Vertex, position, normal, color_diffuse, color_specular); let mut min_pos = [f32::INFINITY; 3]; let mut max_pos = [f32::NEG_INFINITY; 3]; let mut vertex_data = Vec::new(); match tobj::load_obj(path) { Ok((models, mats)) => { for model in &models { let mesh = &model.mesh; println!("Uploading model: {}", model.name); for idx in &mesh.indices { let i = *idx as usize; let pos = [ mesh.positions[3 * i], mesh.positions[3 * i + 1], mesh.positions[3 * i + 2], ]; let normal = if !mesh.normals.is_empty() { [ mesh.normals[3 * i], mesh.normals[3 * i + 1], mesh.normals[3 * i + 2], ] } else { [0.0, 0.0, 0.0] }; let (color_diffuse, color_specular) = match mesh.material_id { Some(i) => ( mats[i].diffuse, [ mats[i].specular[0], mats[i].specular[1], mats[i].specular[2], mats[i].shininess, ], ), None => ([0.8, 0.8, 0.8], [0.15, 0.15, 0.15, 15.0]), }; vertex_data.push(Vertex { position: pos, normal: normal, color_diffuse: color_diffuse, color_specular: color_specular, }); for i in 0..3 { min_pos[i] = f32::min(min_pos[i], pos[i]); max_pos[i] = f32::max(max_pos[i], pos[i]); } } } } Err(e) => panic!("Loading of {:?} failed due to {:?}", path, e), } let diagonal_len = 6.0; let current_len = f32::powf(max_pos[0] - min_pos[0], 2.0) +
f32::powf(max_pos[1] - min_pos[1], 2.0) + f32::powf(max_pos[2] - min_pos[2], 2.0); let scale = f32::sqrt(diagonal_len / current_len); println!("Model scaled by {} to fit", scale); ( glium::vertex::VertexBuffer::new(display, &vertex_data) .unwrap() .into_vertex_buffer_any(), scale, ) }
function_block-function_prefix_line
[ { "content": "fn show_test_window<'a>(ui: &Ui<'a>, state: &mut State, opened: &mut bool) {\n\n if state.show_app_metrics {\n\n ui.show_metrics_window(&mut state.show_app_metrics);\n\n }\n\n if state.show_app_main_menu_bar { show_example_app_main_menu_bar(ui, state) }\n\n if state.show_app_auto_resize {\n\n show_example_app_auto_resize(ui, &mut state.auto_resize_state, &mut state.show_app_auto_resize);\n\n }\n\n if state.show_app_fixed_overlay {\n\n show_example_app_fixed_overlay(ui, &mut state.show_app_fixed_overlay);\n\n }\n\n if state.show_app_manipulating_window_title {\n\n show_example_app_manipulating_window_title(ui);\n\n }\n\n if state.show_app_about {\n\n ui.window()\n\n .name(im_str!(\"About ImGui\"))\n\n .always_auto_resize(true)\n\n .opened(&mut state.show_app_about)\n\n .build(|| {\n", "file_path": "src/test_window.rs", "rank": 2, "score": 46621.11853199529 }, { "content": "fn show_example_app_auto_resize<'a>(ui: &Ui<'a>, state: &mut AutoResizeState, opened: &mut bool) {\n\n ui.window()\n\n .name(im_str!(\"Example: Auto-resizing window\"))\n\n .opened(opened)\n\n .always_auto_resize(true)\n\n .build(|| {\n\n ui.text(im_str!(\"Window will resize every-ui to the size of its content.\n\nNote that you probably don't want to query the window size to\n\noutput your content because that would create a feedback loop.\"));\n\n ui.slider_i32(im_str!(\"Number of lines\"), &mut state.lines, 1, 20).build();\n\n for i in 0 .. state.lines {\n\n ui.text(im_str!(\"{:2$}This is line {}\", \"\", i, i as usize * 4));\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 3, "score": 43251.91867691147 }, { "content": "fn show_example_app_fixed_overlay<'a>(ui: &Ui<'a>, opened: &mut bool) {\n\n ui.window()\n\n .name(im_str!(\"Example: Fixed Overlay\"))\n\n .opened(opened)\n\n .bg_alpha(0.3)\n\n .title_bar(false)\n\n .resizable(false)\n\n .movable(false)\n\n .save_settings(false)\n\n .build(|| {\n\n ui.text(im_str!(\"Simple overlay\\non the top-left side of the screen.\"));\n\n ui.separator();\n\n let mouse_pos = ui.imgui().mouse_pos();\n\n ui.text(im_str!(\"Mouse Position: ({:.1},{:.1})\", mouse_pos.0, mouse_pos.1));\n\n })\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 4, "score": 39623.73386025416 }, { "content": "fn show_example_menu_file<'a>(ui: &Ui<'a>, state: &mut FileMenuState) {\n\n ui.menu_item(im_str!(\"(dummy menu)\")).enabled(false).build();\n\n ui.menu_item(im_str!(\"New\")).build();\n\n ui.menu_item(im_str!(\"Open\")).shortcut(im_str!(\"Ctrl+O\")).build();\n\n ui.menu(im_str!(\"Open Recent\")).build(|| {\n\n ui.menu_item(im_str!(\"fish_hat.c\")).build();\n\n ui.menu_item(im_str!(\"fish_hat.inl\")).build();\n\n ui.menu_item(im_str!(\"fish_hat.h\")).build();\n\n ui.menu(im_str!(\"More..\")).build(|| {\n\n ui.menu_item(im_str!(\"Hello\")).build();\n\n ui.menu_item(im_str!(\"Sailor\")).build();\n\n ui.menu(im_str!(\"Recurse..\")).build(|| {\n\n show_example_menu_file(ui, state);\n\n });\n\n });\n\n });\n\n ui.menu_item(im_str!(\"Save\")).shortcut(im_str!(\"Ctrl+S\")).build();\n\n ui.menu_item(im_str!(\"Save As..\")).build();\n\n ui.separator();\n\n ui.menu(im_str!(\"Options\")).build(|| {\n", "file_path": "src/test_window.rs", "rank": 5, "score": 38705.6278173344 }, { "content": "fn show_example_app_main_menu_bar<'a>(ui: &Ui<'a>, state: &mut State) {\n\n ui.main_menu_bar(|| {\n\n ui.menu(im_str!(\"File\")).build(|| {\n\n show_example_menu_file(ui, &mut state.file_menu);\n\n });\n\n ui.menu(im_str!(\"Edit\")).build(|| {\n\n ui.menu_item(im_str!(\"Undo\")).shortcut(im_str!(\"CTRL+Z\")).build();\n\n ui.menu_item(im_str!(\"Redo\"))\n\n .shortcut(im_str!(\"CTRL+Y\")).enabled(false).build();\n\n ui.separator();\n\n ui.menu_item(im_str!(\"Cut\")).shortcut(im_str!(\"CTRL+X\")).build();\n\n ui.menu_item(im_str!(\"Copy\")).shortcut(im_str!(\"CTRL+C\")).build();\n\n ui.menu_item(im_str!(\"Paste\")).shortcut(im_str!(\"CTRL+V\")).build();\n\n });\n\n });\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 6, "score": 38705.6278173344 }, { "content": "struct State {\n\n clear_color: (f32, f32, f32, f32),\n\n show_app_metrics: bool,\n\n show_app_main_menu_bar: bool,\n\n show_app_console: bool,\n\n show_app_layout: bool,\n\n show_app_long_text: bool,\n\n show_app_auto_resize: bool,\n\n show_app_fixed_overlay: bool,\n\n show_app_custom_rendering: bool,\n\n show_app_manipulating_window_title: bool,\n\n show_app_about: bool,\n\n no_titlebar: bool,\n\n no_border: bool,\n\n no_resize: bool,\n\n no_move: bool,\n\n no_scrollbar: bool,\n\n no_collapse: bool,\n\n no_menu: bool,\n\n bg_alpha: f32,\n", "file_path": "src/test_window.rs", "rank": 7, "score": 35672.93574036775 }, { "content": "struct AutoResizeState {\n\n lines: i32\n\n}\n\n\n\nimpl Default for AutoResizeState {\n\n fn default() -> Self {\n\n AutoResizeState {\n\n lines: 10\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 8, "score": 33662.28177646053 }, { "content": "struct FileMenuState {\n\n enabled: bool\n\n}\n\n\n\nimpl Default for FileMenuState {\n\n fn default() -> Self {\n\n FileMenuState {\n\n enabled: true\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 9, "score": 33662.28177646053 }, { "content": "// This code is essentially straight from the glium teapot example\n\nfn main() {\n\n let model_file = match std::env::args().nth(1) {\n\n Some(arg) => arg,\n\n None => panic!(\"Usage: ./exe model_file\"),\n\n };\n\n\n\n // building the display, ie. the main object\n\n let mut events_loop = glutin::EventsLoop::new();\n\n let window = glutin::WindowBuilder::new();\n\n let context = glutin::ContextBuilder::new().with_depth_buffer(24);\n\n let display = glium::Display::new(window, context, &events_loop).unwrap();\n\n\n\n // building the vertex and index buffers\n\n let (mut vertex_buffer, mut scale) = support::load_wavefront(&display, Path::new(&model_file));\n\n\n\n // the program\n\n let program = program!(&display,\n\n 140 => {\n\n vertex: \"\n\n #version 140\n", "file_path": "src/main.rs", "rank": 10, "score": 30794.821271908862 }, { "content": "fn main() {\n\n let mut state = State::default();\n\n let mut support = Support::init();\n\n let mut opened = true;\n\n\n\n loop {\n\n let active = support.render(state.clear_color, |ui| {\n\n show_test_window(ui, &mut state, &mut opened);\n\n });\n\n if !active || !opened { break }\n\n }\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 11, "score": 29663.706751047415 }, { "content": "fn show_user_guide<'a>(ui: &Ui<'a>) {\n\n ui.bullet_text(im_str!(\"Double-click on title bar to collapse window.\"));\n\n ui.bullet_text(im_str!(\"Click and drag on lower right corner to resize window.\"));\n\n ui.bullet_text(im_str!(\"Click and drag on any empty space to move window.\"));\n\n ui.bullet_text(im_str!(\"Mouse Wheel to scroll.\"));\n\n ui.bullet_text(im_str!(\"TAB/SHIFT+TAB to cycle through keyboard editable fields.\"));\n\n ui.bullet_text(im_str!(\"CTRL+Click on a slider or drag box to input text.\"));\n\n ui.bullet_text(im_str!(\n\n\"While editing text:\n\n- Hold SHIFT or use mouse to select text\n\n- CTRL+Left/Right to word jump\n\n- CTRL+A or double-click to select all\n\n- CTRL+X,CTRL+C,CTRL+V clipboard\n\n- CTRL+Z,CTRL+Y undo/redo\n\n- ESCAPE to revert\n\n- You can apply arithmetic operators +,*,/ on numerical values.\n\n Use +- to subtract.\"));\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 12, "score": 22876.38951071436 }, { "content": "fn show_example_app_manipulating_window_title<'a>(ui: &Ui<'a>) {\n\n ui.window()\n\n .name(im_str!(\"Same title as another window##1\"))\n\n .position((100.0, 100.0), ImGuiSetCond_FirstUseEver)\n\n .build(|| {\n\n ui.text(im_str!(\"This is window 1.\n\nMy title is the same as window 2, but my identifier is unique.\"));\n\n });\n\n ui.window()\n\n .name(im_str!(\"Same title as another window##2\"))\n\n .position((100.0, 200.0), ImGuiSetCond_FirstUseEver)\n\n .build(|| {\n\n ui.text(im_str!(\"This is window 2.\n\nMy title is the same as window 1, but my identifier is unique.\"));\n\n });\n\n let chars = ['|', '/', '-', '\\\\'];\n\n let ch_idx = (ui.imgui().get_time() / 0.25) as usize & 3;\n\n let num = ui.imgui().get_frame_count(); // The C++ version uses rand() here\n\n let title = im_str!(\"Animated title {} {}###AnimatedTitle\", chars[ch_idx], num);\n\n ui.window()\n\n .name(title)\n\n .position((100.0, 300.0), ImGuiSetCond_FirstUseEver)\n\n .build(|| {\n\n ui.text(im_str!(\"This window has a changing title\"));\n\n });\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 13, "score": 21005.17226177211 }, { "content": "use glium::glutin;\n\n\n\npub struct CameraState {\n\n aspect_ratio: f32,\n\n position: (f32, f32, f32),\n\n direction: (f32, f32, f32),\n\n\n\n moving_up: bool,\n\n moving_left: bool,\n\n moving_down: bool,\n\n moving_right: bool,\n\n moving_forward: bool,\n\n moving_backward: bool,\n\n}\n\n\n\nimpl CameraState {\n\n pub fn new() -> CameraState {\n\n CameraState {\n\n aspect_ratio: 1024.0 / 768.0,\n\n position: (0.1, 0.1, 1.0),\n", "file_path": "src/support/camera.rs", "rank": 18, "score": 20285.118177919805 }, { "content": " direction: (0.0, 0.0, -1.0),\n\n moving_up: false,\n\n moving_left: false,\n\n moving_down: false,\n\n moving_right: false,\n\n moving_forward: false,\n\n moving_backward: false,\n\n }\n\n }\n\n\n\n pub fn set_position(&mut self, pos: (f32, f32, f32)) {\n\n self.position = pos;\n\n }\n\n\n\n pub fn set_direction(&mut self, dir: (f32, f32, f32)) {\n\n self.direction = dir;\n\n }\n\n\n\n pub fn get_perspective(&self) -> [[f32; 4]; 4] {\n\n let fov: f32 = 3.141592 / 2.0;\n", "file_path": "src/support/camera.rs", "rank": 19, "score": 20283.737152800535 }, { "content": " );\n\n\n\n let p = (\n\n -self.position.0 * s.0 - self.position.1 * s.1 - self.position.2 * s.2,\n\n -self.position.0 * u.0 - self.position.1 * u.1 - self.position.2 * u.2,\n\n -self.position.0 * f.0 - self.position.1 * f.1 - self.position.2 * f.2,\n\n );\n\n\n\n // note: remember that this is column-major, so the lines of code are actually columns\n\n [\n\n [s_norm.0, u.0, f.0, 0.0],\n\n [s_norm.1, u.1, f.1, 0.0],\n\n [s_norm.2, u.2, f.2, 0.0],\n\n [p.0, p.1, p.2, 1.0],\n\n ]\n\n }\n\n\n\n pub fn update(&mut self) {\n\n let f = {\n\n let f = self.direction;\n", "file_path": "src/support/camera.rs", "rank": 20, "score": 20281.295649003252 }, { "content": " }\n\n\n\n pub fn process_input(&mut self, event: &glutin::WindowEvent) {\n\n let input = match *event {\n\n glutin::WindowEvent::KeyboardInput { input, .. } => input,\n\n _ => return,\n\n };\n\n let pressed = input.state == glutin::ElementState::Pressed;\n\n let key = match input.virtual_keycode {\n\n Some(key) => key,\n\n None => return,\n\n };\n\n match key {\n\n glutin::VirtualKeyCode::Up => self.moving_up = pressed,\n\n glutin::VirtualKeyCode::Down => self.moving_down = pressed,\n\n glutin::VirtualKeyCode::A => self.moving_left = pressed,\n\n glutin::VirtualKeyCode::D => self.moving_right = pressed,\n\n glutin::VirtualKeyCode::W => self.moving_forward = pressed,\n\n glutin::VirtualKeyCode::S => self.moving_backward = pressed,\n\n _ => (),\n\n };\n\n }\n\n}\n", "file_path": "src/support/camera.rs", "rank": 21, "score": 20279.59222121054 }, { "content": " self.position.2 -= u.2 * 0.01;\n\n }\n\n\n\n if self.moving_right {\n\n self.position.0 += s.0 * 0.01;\n\n self.position.1 += s.1 * 0.01;\n\n self.position.2 += s.2 * 0.01;\n\n }\n\n\n\n if self.moving_forward {\n\n self.position.0 += f.0 * 0.01;\n\n self.position.1 += f.1 * 0.01;\n\n self.position.2 += f.2 * 0.01;\n\n }\n\n\n\n if self.moving_backward {\n\n self.position.0 -= f.0 * 0.01;\n\n self.position.1 -= f.1 * 0.01;\n\n self.position.2 -= f.2 * 0.01;\n\n }\n", "file_path": "src/support/camera.rs", "rank": 22, "score": 20278.10780491874 }, { "content": " s.1 * f.2 - s.2 * f.1,\n\n s.2 * f.0 - s.0 * f.2,\n\n s.0 * f.1 - s.1 * f.0,\n\n );\n\n\n\n if self.moving_up {\n\n self.position.0 += u.0 * 0.01;\n\n self.position.1 += u.1 * 0.01;\n\n self.position.2 += u.2 * 0.01;\n\n }\n\n\n\n if self.moving_left {\n\n self.position.0 -= s.0 * 0.01;\n\n self.position.1 -= s.1 * 0.01;\n\n self.position.2 -= s.2 * 0.01;\n\n }\n\n\n\n if self.moving_down {\n\n self.position.0 -= u.0 * 0.01;\n\n self.position.1 -= u.1 * 0.01;\n", "file_path": "src/support/camera.rs", "rank": 23, "score": 20278.096074726454 }, { "content": " let zfar = 1024.0;\n\n let znear = 0.1;\n\n\n\n let f = 1.0 / (fov / 2.0).tan();\n\n\n\n // note: remember that this is column-major, so the lines of code are actually columns\n\n [\n\n [f / self.aspect_ratio, 0.0, 0.0, 0.0],\n\n [0.0, f, 0.0, 0.0],\n\n [0.0, 0.0, (zfar + znear) / (zfar - znear), 1.0],\n\n [0.0, 0.0, -(2.0 * zfar * znear) / (zfar - znear), 0.0],\n\n ]\n\n }\n\n\n\n pub fn get_view(&self) -> [[f32; 4]; 4] {\n\n let f = {\n\n let f = self.direction;\n\n let len = f.0 * f.0 + f.1 * f.1 + f.2 * f.2;\n\n let len = len.sqrt();\n\n (f.0 / len, f.1 / len, f.2 / len)\n", "file_path": "src/support/camera.rs", "rank": 24, "score": 20277.947257008098 }, { "content": " let len = f.0 * f.0 + f.1 * f.1 + f.2 * f.2;\n\n let len = len.sqrt();\n\n (f.0 / len, f.1 / len, f.2 / len)\n\n };\n\n\n\n let up = (0.0, 1.0, 0.0);\n\n\n\n let s = (\n\n f.1 * up.2 - f.2 * up.1,\n\n f.2 * up.0 - f.0 * up.2,\n\n f.0 * up.1 - f.1 * up.0,\n\n );\n\n\n\n let s = {\n\n let len = s.0 * s.0 + s.1 * s.1 + s.2 * s.2;\n\n let len = len.sqrt();\n\n (s.0 / len, s.1 / len, s.2 / len)\n\n };\n\n\n\n let u = (\n", "file_path": "src/support/camera.rs", "rank": 25, "score": 20275.06323879429 }, { "content": " };\n\n\n\n let up = (0.0, 1.0, 0.0);\n\n\n\n let s = (\n\n f.1 * up.2 - f.2 * up.1,\n\n f.2 * up.0 - f.0 * up.2,\n\n f.0 * up.1 - f.1 * up.0,\n\n );\n\n\n\n let s_norm = {\n\n let len = s.0 * s.0 + s.1 * s.1 + s.2 * s.2;\n\n let len = len.sqrt();\n\n (s.0 / len, s.1 / len, s.2 / len)\n\n };\n\n\n\n let u = (\n\n s_norm.1 * f.2 - s_norm.2 * f.1,\n\n s_norm.2 * f.0 - s_norm.0 * f.2,\n\n s_norm.0 * f.1 - s_norm.1 * f.0,\n", "file_path": "src/support/camera.rs", "rank": 26, "score": 20275.06323879429 }, { "content": "tobj viewer\n\n===\n\nA simple Wavefront OBJ viewer that uses [tobj](https://github.com/Twinklebear/tobj) to load models and\n\n[glium](https://github.com/tomaka/glium) to render them. The rendering code is essentially straight out of\n\nthe glium teapot demo but will also re-scale models to fit within a unit cube so it's easier to view a wide\n\nvariety of models at potentially very different scales. To run pass the OBJ file to the viewer, e.g. through\n\nCargo you can run the provided Utah teapot example:\n\n\n\n```bash\n\ncargo run -- teapot.obj\n\n```\n\n\n\nYou can then use WASD to move forward/backward/sideways and the up/down arrows to move vertically.\n\n\n\nSamples\n\n---\n\nThe rendering quality is extremely basic, this program is mostly used to check that tobj is loading things properly\n\non some bigger scenes.\n\n\n\n![Stanford Buddha](http://i.imgur.com/eUsqZd8.png)\n\n![Rust Logo](http://i.imgur.com/uJbca2d.png)\n\n![Rungholt](http://i.imgur.com/wImyNG4.png)\n\n\n\nThe Buddha is from the [Stanford Scanning Repository](http://graphics.stanford.edu/data/3Dscanrep/) and the Rust logo\n\nwas modeled [Nylithius on BlenderArtists](http://blenderartists.org/forum/showthread.php?362836-Rust-language-3D-logo).\n\nThe [Rungholt](http://graphics.cs.williams.edu/data/meshes.xml) model can be found on Morgan McGuire's meshes page and\n\nwas originally built by kescha.\n\n\n", "file_path": "README.md", "rank": 27, "score": 13203.750751531801 }, { "content": "The MIT License (MIT)\n\n\n\nCopyright (c) 2017 Will Usher\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in\n\nall copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n\nTHE SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 28, "score": 13200.426566622697 }, { "content": " let mut action = support::Action::Continue;\n\n\n\n // polling and handling the events received by the window\n\n events_loop.poll_events(|event| match event {\n\n glutin::Event::WindowEvent { event, .. } => match event {\n\n glutin::WindowEvent::Closed => action = support::Action::Stop,\n\n ev => camera.process_input(&ev),\n\n },\n\n _ => (),\n\n });\n\n\n\n action\n\n });\n\n\n\n /*\n\n // the main loop\n\n support::start_loop(|| {\n\n camera.update();\n\n\n\n // building the uniforms\n", "file_path": "src/main.rs", "rank": 29, "score": 12.230794181778036 }, { "content": "extern crate cgmath;\n\n#[macro_use]\n\nextern crate glium;\n\n\n\nuse std::default::Default;\n\nuse std::path::Path;\n\n\n\nuse glium::{glutin, Surface};\n\n\n\nmod support;\n\n\n\n// This code is essentially straight from the glium teapot example\n", "file_path": "src/main.rs", "rank": 30, "score": 11.003395030966312 }, { "content": "#[macro_use]\n\nextern crate glium;\n\n#[macro_use]\n\nextern crate imgui;\n\nextern crate time;\n\n\n\nuse imgui::*;\n\n\n\nuse self::support::Support;\n\n\n\nmod support;\n\n\n", "file_path": "src/test_window.rs", "rank": 31, "score": 10.13604436180961 }, { "content": " },\n\n glutin::Event::MouseInput(state, glutin::MouseButton::Right) => {\n\n mouse_pressed[1] = state == glutin::ElementState::Pressed;\n\n },\n\n glutin::Event::MouseInput(state, glutin::MouseButton::Middle) => {\n\n mouse_pressed[2] = state == glutin::ElementState::Pressed;\n\n },\n\n glutin::Event::DroppedFile(path) => {\n\n println!(\"Dropped file {}\", path.display());\n\n match path.extension() {\n\n Some(ext) if ext == \"obj\" => {\n\n let load = support::load_wavefront(&display, path.as_path());\n\n vertex_buffer = load.0;\n\n scale = load.1;\n\n },\n\n _ => println!(\"Invalid file\"),\n\n }\n\n },\n\n ev => camera.process_input(&ev),\n\n }\n\n }\n\n\n\n support::Action::Continue\n\n });\n\n */\n\n}\n", "file_path": "src/main.rs", "rank": 32, "score": 8.69482818251003 }, { "content": " // drawing a frame\n\n target.clear_color_and_depth((0.0, 0.0, 0.0, 0.0), 1.0);\n\n target.draw(&vertex_buffer,\n\n &glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n\n &program, &uniforms, &params).unwrap();\n\n\n\n target.finish().unwrap();\n\n\n\n // polling and handling the events received by the window\n\n for event in display.poll_events() {\n\n match event {\n\n glutin::Event::Closed => return support::Action::Stop,\n\n glutin::Event::KeyboardInput(glutin::ElementState::Pressed, _, Some(glutin::VirtualKeyCode::Escape)) => {\n\n return support::Action::Stop;\n\n },\n\n glutin::Event::MouseMoved(x, y) => {\n\n mouse_pos = (x as f32, y as f32);\n\n },\n\n glutin::Event::MouseInput(state, glutin::MouseButton::Left) => {\n\n mouse_pressed[0] = state == glutin::ElementState::Pressed;\n", "file_path": "src/main.rs", "rank": 33, "score": 6.806994310226537 }, { "content": " .unwrap();\n\n\n\n let mut camera = support::camera::CameraState::new();\n\n let mut mouse_pressed = [false; 3];\n\n let mut mouse_pos = (0.0, 0.0);\n\n\n\n // the main loop\n\n support::start_loop(|| {\n\n camera.update();\n\n\n\n // building the uniforms\n\n let uniforms = uniform! {\n\n persp_matrix: camera.get_perspective(),\n\n view_matrix: camera.get_view(),\n\n };\n\n\n\n // draw parameters\n\n let params = glium::DrawParameters {\n\n depth: glium::Depth {\n\n test: glium::DepthTest::IfLess,\n", "file_path": "src/main.rs", "rank": 34, "score": 6.542410561775423 }, { "content": " let uniforms = uniform! {\n\n persp_matrix: camera.get_perspective(),\n\n view_matrix: camera.get_view(),\n\n scaling: scale,\n\n eye_pos: camera.get_position(),\n\n light_dir: camera.get_direction(),\n\n };\n\n\n\n // draw parameters\n\n let params = glium::DrawParameters {\n\n depth: glium::Depth {\n\n test: glium::DepthTest::IfLess,\n\n write: true,\n\n .. Default::default()\n\n },\n\n .. Default::default()\n\n };\n\n\n\n let mut target = display.draw();\n\n\n", "file_path": "src/main.rs", "rank": 35, "score": 5.9144034407006 }, { "content": " vec3 color = (0.3 + 0.7 * lum) * vec3(1.0, 1.0, 1.0);\n\n gl_FragColor = vec4(color, 1.0);\n\n }\n\n \",\n\n },\n\n\n\n 100 => {\n\n vertex: \"\n\n #version 100\n\n\n\n uniform lowp mat4 persp_matrix;\n\n uniform lowp mat4 view_matrix;\n\n\n\n attribute lowp vec3 position;\n\n attribute lowp vec3 normal;\n\n varying lowp vec3 v_position;\n\n varying lowp vec3 v_normal;\n\n\n\n void main() {\n\n v_position = position;\n", "file_path": "src/main.rs", "rank": 36, "score": 5.536905478590463 }, { "content": " attribute vec3 normal;\n\n varying vec3 v_position;\n\n varying vec3 v_normal;\n\n\n\n void main() {\n\n v_position = position;\n\n v_normal = normal;\n\n gl_Position = persp_matrix * view_matrix * vec4(v_position * 0.005, 1.0);\n\n }\n\n \",\n\n\n\n fragment: \"\n\n #version 110\n\n\n\n varying vec3 v_normal;\n\n\n\n const vec3 LIGHT = vec3(-0.2, 0.8, 0.1);\n\n\n\n void main() {\n\n float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);\n", "file_path": "src/main.rs", "rank": 37, "score": 5.396643860287711 }, { "content": "\n\n uniform mat4 persp_matrix;\n\n uniform mat4 view_matrix;\n\n\n\n in vec3 position;\n\n in vec3 normal;\n\n out vec3 v_position;\n\n out vec3 v_normal;\n\n\n\n void main() {\n\n v_position = position;\n\n v_normal = normal;\n\n gl_Position = persp_matrix * view_matrix * vec4(v_position * 0.005, 1.0);\n\n }\n\n \",\n\n\n\n fragment: \"\n\n #version 140\n\n\n\n in vec3 v_normal;\n", "file_path": "src/main.rs", "rank": 38, "score": 5.386333967459973 }, { "content": " out vec4 f_color;\n\n\n\n const vec3 LIGHT = vec3(-0.2, 0.8, 0.1);\n\n\n\n void main() {\n\n float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);\n\n vec3 color = (0.3 + 0.7 * lum) * vec3(1.0, 1.0, 1.0);\n\n f_color = vec4(color, 1.0);\n\n }\n\n \",\n\n },\n\n\n\n 110 => {\n\n vertex: \"\n\n #version 110\n\n\n\n uniform mat4 persp_matrix;\n\n uniform mat4 view_matrix;\n\n\n\n attribute vec3 position;\n", "file_path": "src/main.rs", "rank": 39, "score": 5.146905046900063 }, { "content": " v_normal = normal;\n\n gl_Position = persp_matrix * view_matrix * vec4(v_position * 0.005, 1.0);\n\n }\n\n \",\n\n\n\n fragment: \"\n\n #version 100\n\n\n\n varying lowp vec3 v_normal;\n\n\n\n const lowp vec3 LIGHT = vec3(-0.2, 0.8, 0.1);\n\n\n\n void main() {\n\n lowp float lum = max(dot(normalize(v_normal), normalize(LIGHT)), 0.0);\n\n lowp vec3 color = (0.3 + 0.7 * lum) * vec3(1.0, 1.0, 1.0);\n\n gl_FragColor = vec4(color, 1.0);\n\n }\n\n \",\n\n },\n\n )\n", "file_path": "src/main.rs", "rank": 40, "score": 4.572494392179845 }, { "content": " write: true,\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n };\n\n\n\n // drawing a frame\n\n let mut target = display.draw();\n\n target.clear_color_and_depth((0.0, 0.0, 0.0, 0.0), 1.0);\n\n target\n\n .draw(\n\n &vertex_buffer,\n\n &glium::index::NoIndices(glium::index::PrimitiveType::TrianglesList),\n\n &program,\n\n &uniforms,\n\n &params,\n\n )\n\n .unwrap();\n\n target.finish().unwrap();\n\n\n", "file_path": "src/main.rs", "rank": 41, "score": 4.134306267194636 }, { "content": " ui.checkbox(im_str!(\"no resize\"), &mut state.no_resize);\n\n ui.checkbox(im_str!(\"no move\"), &mut state.no_move);\n\n ui.same_line(150.0);\n\n ui.checkbox(im_str!(\"no scrollbar\"), &mut state.no_scrollbar);\n\n ui.same_line(300.0);\n\n ui.checkbox(im_str!(\"no collapse\"), &mut state.no_collapse);\n\n ui.checkbox(im_str!(\"no menu\"), &mut state.no_menu);\n\n ui.slider_f32(im_str!(\"bg alpha\"), &mut state.bg_alpha, 0.0, 1.0).build();\n\n\n\n ui.tree_node(im_str!(\"Style\")).build(|| {\n\n // TODO: Reimplement style editor\n\n ui.show_default_style_editor();\n\n });\n\n ui.tree_node(im_str!(\"Fonts\"))\n\n .label(im_str!(\"Fonts ({})\", \"TODO\"))\n\n .build(|| {\n\n ui.text_wrapped(im_str!(\"Tip: Load fonts with io.Fonts->AddFontFromFileTTF().\"));\n\n ui.tree_node(im_str!(\"Atlas texture\")).build(|| {\n\n // TODO\n\n });\n", "file_path": "src/test_window.rs", "rank": 42, "score": 2.769473019970877 }, { "content": " ui.menu_bar(|| {\n\n ui.menu(im_str!(\"Menu\")).build(|| {\n\n show_example_menu_file(ui, &mut state.file_menu);\n\n });\n\n ui.menu(im_str!(\"Examples\")).build(|| {\n\n ui.menu_item(im_str!(\"Main menu bar\"))\n\n .selected(&mut state.show_app_main_menu_bar).build();\n\n ui.menu_item(im_str!(\"Console\"))\n\n .selected(&mut state.show_app_console).build();\n\n ui.menu_item(im_str!(\"Simple layout\"))\n\n .selected(&mut state.show_app_layout).build();\n\n ui.menu_item(im_str!(\"Long text display\"))\n\n .selected(&mut state.show_app_long_text).build();\n\n ui.menu_item(im_str!(\"Auto-resizing window\"))\n\n .selected(&mut state.show_app_auto_resize).build();\n\n ui.menu_item(im_str!(\"Simple overlay\"))\n\n .selected(&mut state.show_app_fixed_overlay).build();\n\n ui.menu_item(im_str!(\"Manipulating window title\"))\n\n .selected(&mut state.show_app_manipulating_window_title).build();\n\n ui.menu_item(im_str!(\"Custom rendering\"))\n", "file_path": "src/test_window.rs", "rank": 43, "score": 2.6814271934944283 }, { "content": " ui.menu_item(im_str!(\"Enabled\")).selected(&mut state.enabled).build();\n\n // TODO\n\n });\n\n ui.menu(im_str!(\"Colors\")).build(|| {\n\n // TODO\n\n });\n\n ui.menu(im_str!(\"Disabled\")).enabled(false).build(|| {\n\n unreachable!();\n\n });\n\n let mut checked = true;\n\n ui.menu_item(im_str!(\"Checked\")).selected(&mut checked).build();\n\n ui.menu_item(im_str!(\"Quit\")).shortcut(im_str!(\"Alt+F4\")).build();\n\n}\n\n\n", "file_path": "src/test_window.rs", "rank": 44, "score": 1.9992842478382766 }, { "content": " .selected(&mut state.show_app_custom_rendering).build();\n\n });\n\n ui.menu(im_str!(\"Help\")).build(|| {\n\n ui.menu_item(im_str!(\"Metrics\"))\n\n .selected(&mut state.show_app_metrics).build();\n\n ui.menu_item(im_str!(\"About ImGui\"))\n\n .selected(&mut state.show_app_about).build();\n\n });\n\n });\n\n ui.spacing();\n\n if ui.collapsing_header(im_str!(\"Help\")).build() {\n\n ui.text_wrapped(im_str!(\"This window is being created by the show_test_window() function. Please refer to the code for programming reference.\\n\\nUser Guide:\"));\n\n show_user_guide(ui);\n\n }\n\n\n\n if ui.collapsing_header(im_str!(\"Window options\")).build() {\n\n ui.checkbox(im_str!(\"no titlebar\"), &mut state.no_titlebar);\n\n ui.same_line(150.0);\n\n ui.checkbox(im_str!(\"no border\"), &mut state.no_border);\n\n ui.same_line(300.0);\n", "file_path": "src/test_window.rs", "rank": 45, "score": 1.9635768539541543 }, { "content": " ui.text(im_str!(\"ImGui {}\", imgui::get_version()));\n\n ui.separator();\n\n ui.text(im_str!(\"By Omar Cornut and all github contributors.\"));\n\n ui.text(im_str!(\"ImGui is licensed under the MIT License, see LICENSE for more information.\"));\n\n })\n\n }\n\n\n\n ui.window().name(im_str!(\"ImGui Demo\"))\n\n .title_bar(!state.no_titlebar)\n\n .show_borders(!state.no_border)\n\n .resizable(!state.no_resize)\n\n .movable(!state.no_move)\n\n .scroll_bar(!state.no_scrollbar)\n\n .collapsible(!state.no_collapse)\n\n .menu_bar(!state.no_menu)\n\n .bg_alpha(state.bg_alpha)\n\n .size((550.0, 680.0), ImGuiSetCond_FirstUseEver)\n\n .opened(opened)\n\n .build(|| {\n\n ui.text(im_str!(\"ImGui says hello.\"));\n", "file_path": "src/test_window.rs", "rank": 46, "score": 0.8302223670547324 } ]
Rust
src/ast/pp_visitor.rs
ffwff/iro
2e92ab30bee7f3ab9036726e383edd8ec788fdd9
use crate::ast::*; use crate::compiler::sources; use bit_set::BitSet; use std::path::PathBuf; pub struct PreprocessState { imported: BitSet<u32>, total_imported_statements: Vec<NodeBox>, prelude: Option<PathBuf>, } impl PreprocessState { pub fn new(prelude: Option<PathBuf>) -> Self { Self { imported: BitSet::new(), total_imported_statements: vec![], prelude, } } } pub struct PreprocessVisitor<'a> { file: sources::FileIndex, working_path: Option<PathBuf>, state: Option<&'a RefCell<PreprocessState>>, sources: &'a mut sources::Sources, } impl<'a> PreprocessVisitor<'a> { pub fn postprocess( program: &mut Program, file: sources::FileIndex, working_path: Option<PathBuf>, state: Option<&'a RefCell<PreprocessState>>, sources: &'a mut sources::Sources, ) -> VisitorResult { let mut visitor = Self { file, working_path, state, sources, }; visitor.visit_program(program) } fn fill_box(&self, boxed: &NodeBox) { boxed.span_ref().update(|mut span| { span.file = self.file; span }); } fn ungenerate_retvar(b: &NodeBox) { b.generate_retvar.set(false); } fn import(&mut self, working_path: PathBuf) -> VisitorResult { if let Some(mut state) = self.state.as_ref().map(|x| x.borrow_mut()) { let sources = &mut self.sources; let (index, _) = sources .read(&working_path) .map_err(|error| compiler::Error::io_error(error))?; if state.imported.insert(index) { std::mem::drop(state); let ast = compiler::parse_file_to_ast( index, working_path, sources, self.state.clone().unwrap(), ) .map_err(|err| { err.span.map(|mut span| { span.file = index; span }); err })?; let mut state = self.state.as_ref().unwrap().borrow_mut(); state .total_imported_statements .extend(ast.exprs.into_iter().filter(|ast| ast.can_import())); } } Ok(()) } } impl<'a> Visitor for PreprocessVisitor<'a> { fn visit_program(&mut self, n: &mut Program) -> VisitorResult { for node in &n.exprs { Self::ungenerate_retvar(node); node.visit(self)?; } if self.file == 0 { if let Some(state_cell) = self.state { { let state = state_cell.borrow(); if let Some(prelude) = &state.prelude { let mut working_path = std::env::current_dir() .map_err(|error| compiler::Error::io_error(error))?; working_path.push(prelude); working_path = std::fs::canonicalize(working_path) .map_err(|error| compiler::Error::io_error(error))?; std::mem::drop(state); self.import(working_path)?; } } let mut state = state_cell.borrow_mut(); n.exprs.extend(std::mem::replace( &mut state.total_imported_statements, vec![], )); } } Ok(()) } fn visit_import(&mut self, n: &ImportStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); let mut working_path = self.working_path.clone().unwrap(); working_path.pop(); working_path.push(&n.path); working_path.set_extension("iro"); working_path = std::fs::canonicalize(working_path) .map_err(|error| compiler::Error::io_error(error))?; self.import(working_path)?; Ok(()) } fn visit_class(&mut self, _n: &ClassStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } fn visit_class_init(&mut self, n: &ClassInitExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); for (_, boxed) in &n.inits { boxed.visit(self)?; } Ok(()) } fn visit_modstmt(&mut self, n: &ModStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); for expr in &n.exprs { expr.visit(self)?; } Ok(()) } fn visit_defstmt(&mut self, n: &DefStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); let last_idx = n.exprs.len().wrapping_sub(1); for (idx, expr) in n.exprs.iter().enumerate() { if idx != last_idx { Self::ungenerate_retvar(expr); } expr.visit(self)?; } Ok(()) } fn visit_return(&mut self, n: &ReturnExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_whileexpr(&mut self, n: &WhileExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.cond.visit(self)?; let last_idx = n.exprs.len().wrapping_sub(1); for (idx, expr) in n.exprs.iter().enumerate() { if idx != last_idx { Self::ungenerate_retvar(expr); } expr.visit(self)?; } Ok(()) } fn visit_ifexpr(&mut self, n: &IfExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.cond.visit(self)?; let last_idx = n.exprs.len().wrapping_sub(1); for (idx, expr) in n.exprs.iter().enumerate() { if idx != last_idx && !b.generate_retvar.get() { Self::ungenerate_retvar(expr); } expr.visit(self)?; } let last_idx = n.elses.len().wrapping_sub(1); for (idx, expr) in n.elses.iter().enumerate() { if idx != last_idx && !b.generate_retvar.get() { Self::ungenerate_retvar(expr); } expr.visit(self)?; } Ok(()) } fn visit_callexpr(&mut self, n: &CallExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); for expr in &n.args { expr.visit(self)?; } Ok(()) } fn visit_letexpr(&mut self, n: &LetExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; n.right.visit(self)?; Ok(()) } fn visit_binexpr(&mut self, n: &BinExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; n.right.visit(self)?; Ok(()) } fn visit_asexpr(&mut self, n: &AsExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; Ok(()) } fn visit_member_expr(&mut self, n: &MemberExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; for arm in &n.right { if let MemberExprArm::Index(boxed) = arm { boxed.visit(self)?; } } Ok(()) } fn visit_value(&mut self, n: &Value, b: &NodeBox) -> VisitorResult { self.fill_box(b); if let Value::Slice(vec) = &n { for expr in vec { expr.visit(self)?; } } Ok(()) } fn visit_typeid(&mut self, _n: &TypeId, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } fn visit_break(&mut self, _n: &BreakExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } fn visit_borrow(&mut self, n: &BorrowExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_deref(&mut self, n: &DerefExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_unary(&mut self, n: &UnaryExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_path(&mut self, _n: &PathExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } }
use crate::ast::*; use crate::compiler::sources; use bit_set::BitSet; use std::path::PathBuf; pub struct PreprocessState { imported: BitSet<u32>, total_imported_statements: Vec<NodeBox>, prelude: Option<PathBuf>, } impl PreprocessState { pub fn new(prelude: Option<PathBuf>) -> Self { Self { imported: BitSet::new(), total_imported_statements: vec![], prelude, } } } pub struct PreprocessVisitor<'a> { file: sources::FileIndex, working_path: Option<PathBuf>, state: Option<&'a RefCell<PreprocessState>>, sources: &'a mut sources::Sources, } impl<'a> PreprocessVisitor<'a> { pub fn postprocess( program: &mut Program, file: sources::FileIndex, working_path: Option<PathBuf>, state: Option<&'a RefCell<PreprocessState>>, sources: &'a mut sources::Sources, ) -> VisitorResult { let mut visitor = Self { file, working_path, state, sources, }; visitor.visit_program(program) } fn fill_box(&self, boxed: &NodeBox) { boxed.span_ref().update(|mut span| { span.file = self.file; span }); } fn ungenerate_retvar(b: &NodeBox) { b.generate_retvar.set(false); }
} impl<'a> Visitor for PreprocessVisitor<'a> { fn visit_program(&mut self, n: &mut Program) -> VisitorResult { for node in &n.exprs { Self::ungenerate_retvar(node); node.visit(self)?; } if self.file == 0 { if let Some(state_cell) = self.state { { let state = state_cell.borrow(); if let Some(prelude) = &state.prelude { let mut working_path = std::env::current_dir() .map_err(|error| compiler::Error::io_error(error))?; working_path.push(prelude); working_path = std::fs::canonicalize(working_path) .map_err(|error| compiler::Error::io_error(error))?; std::mem::drop(state); self.import(working_path)?; } } let mut state = state_cell.borrow_mut(); n.exprs.extend(std::mem::replace( &mut state.total_imported_statements, vec![], )); } } Ok(()) } fn visit_import(&mut self, n: &ImportStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); let mut working_path = self.working_path.clone().unwrap(); working_path.pop(); working_path.push(&n.path); working_path.set_extension("iro"); working_path = std::fs::canonicalize(working_path) .map_err(|error| compiler::Error::io_error(error))?; self.import(working_path)?; Ok(()) } fn visit_class(&mut self, _n: &ClassStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } fn visit_class_init(&mut self, n: &ClassInitExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); for (_, boxed) in &n.inits { boxed.visit(self)?; } Ok(()) } fn visit_modstmt(&mut self, n: &ModStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); for expr in &n.exprs { expr.visit(self)?; } Ok(()) } fn visit_defstmt(&mut self, n: &DefStatement, b: &NodeBox) -> VisitorResult { self.fill_box(b); let last_idx = n.exprs.len().wrapping_sub(1); for (idx, expr) in n.exprs.iter().enumerate() { if idx != last_idx { Self::ungenerate_retvar(expr); } expr.visit(self)?; } Ok(()) } fn visit_return(&mut self, n: &ReturnExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_whileexpr(&mut self, n: &WhileExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.cond.visit(self)?; let last_idx = n.exprs.len().wrapping_sub(1); for (idx, expr) in n.exprs.iter().enumerate() { if idx != last_idx { Self::ungenerate_retvar(expr); } expr.visit(self)?; } Ok(()) } fn visit_ifexpr(&mut self, n: &IfExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.cond.visit(self)?; let last_idx = n.exprs.len().wrapping_sub(1); for (idx, expr) in n.exprs.iter().enumerate() { if idx != last_idx && !b.generate_retvar.get() { Self::ungenerate_retvar(expr); } expr.visit(self)?; } let last_idx = n.elses.len().wrapping_sub(1); for (idx, expr) in n.elses.iter().enumerate() { if idx != last_idx && !b.generate_retvar.get() { Self::ungenerate_retvar(expr); } expr.visit(self)?; } Ok(()) } fn visit_callexpr(&mut self, n: &CallExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); for expr in &n.args { expr.visit(self)?; } Ok(()) } fn visit_letexpr(&mut self, n: &LetExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; n.right.visit(self)?; Ok(()) } fn visit_binexpr(&mut self, n: &BinExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; n.right.visit(self)?; Ok(()) } fn visit_asexpr(&mut self, n: &AsExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; Ok(()) } fn visit_member_expr(&mut self, n: &MemberExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.left.visit(self)?; for arm in &n.right { if let MemberExprArm::Index(boxed) = arm { boxed.visit(self)?; } } Ok(()) } fn visit_value(&mut self, n: &Value, b: &NodeBox) -> VisitorResult { self.fill_box(b); if let Value::Slice(vec) = &n { for expr in vec { expr.visit(self)?; } } Ok(()) } fn visit_typeid(&mut self, _n: &TypeId, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } fn visit_break(&mut self, _n: &BreakExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } fn visit_borrow(&mut self, n: &BorrowExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_deref(&mut self, n: &DerefExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_unary(&mut self, n: &UnaryExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); n.expr.visit(self)?; Ok(()) } fn visit_path(&mut self, _n: &PathExpr, b: &NodeBox) -> VisitorResult { self.fill_box(b); Ok(()) } }
fn import(&mut self, working_path: PathBuf) -> VisitorResult { if let Some(mut state) = self.state.as_ref().map(|x| x.borrow_mut()) { let sources = &mut self.sources; let (index, _) = sources .read(&working_path) .map_err(|error| compiler::Error::io_error(error))?; if state.imported.insert(index) { std::mem::drop(state); let ast = compiler::parse_file_to_ast( index, working_path, sources, self.state.clone().unwrap(), ) .map_err(|err| { err.span.map(|mut span| { span.file = index; span }); err })?; let mut state = self.state.as_ref().unwrap().borrow_mut(); state .total_imported_statements .extend(ast.exprs.into_iter().filter(|ast| ast.can_import())); } } Ok(()) }
function_block-full_function
[ { "content": "pub fn mangle_string(source: &str, dest: &mut String) {\n\n for ch in source.chars() {\n\n assert!((0x20..0x7e).contains(&(ch as _)));\n\n dest.push(ch);\n\n }\n\n}\n\n\n", "file_path": "src/codegen/mangler.rs", "rank": 0, "score": 158156.51224327856 }, { "content": "pub fn cleanup_high_level_instructions(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n for block in &mut context.blocks {\n\n let old_len = block.ins.len();\n\n let old_ins = std::mem::replace(&mut block.ins, Vec::with_capacity(old_len));\n\n\n\n for ins in old_ins {\n\n match ins.typed {\n\n InsType::Drop(x) => {\n\n let source_location = ins.source_location();\n\n fn insert_destructor(\n\n x: Variable,\n\n variables: &mut [Type],\n\n block: &mut Block,\n\n source_location: u32,\n\n ) {\n\n match &variables[usize::from(x)] {\n\n Type::Pointer(data) => {\n\n if data.tag == BorrowModifier::Unique {\n\n block.ins.push(Ins::empty_ret(\n\n InsType::DeallocHeap(x),\n", "file_path": "src/ssa/passes/postprocess.rs", "rank": 1, "score": 152657.60340998153 }, { "content": "pub fn parse_to_ssa(source: &str) -> Result<isa::Program, compiler::Error> {\n\n compiler::parse_source_to_ssa(source)\n\n}\n", "file_path": "tests/utils.rs", "rank": 2, "score": 149171.5286570544 }, { "content": "pub fn parse_source_to_ssa(source: &str) -> Result<ssa::isa::Program, compiler::Error> {\n\n let mut sources = Sources::new();\n\n let tokenizer = lexer::Lexer::new(source, 0);\n\n let mut ast = parser::TopParser::new().parse(tokenizer)?;\n\n PreprocessVisitor::postprocess(&mut ast, 0, None, None, &mut sources)?;\n\n process_program(&mut sources, ast)\n\n}\n\n\n", "file_path": "src/compiler/mod.rs", "rank": 3, "score": 148106.570357742 }, { "content": "pub fn parse_file_to_ast(\n\n file: usize,\n\n working_path: PathBuf,\n\n sources: &mut Sources,\n\n state: &RefCell<PreprocessState>,\n\n) -> Result<ast::Program, compiler::Error> {\n\n let tokenizer = lexer::Lexer::new(sources.file(file).unwrap(), file);\n\n let mut ast = parser::TopParser::new().parse(tokenizer)?;\n\n PreprocessVisitor::postprocess(&mut ast, file, Some(working_path), Some(state), sources)?;\n\n Ok(ast)\n\n}\n\n\n", "file_path": "src/compiler/mod.rs", "rank": 4, "score": 141371.75390416943 }, { "content": "pub fn preprocess(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n dbg_println!(\"start: {}\", context.print());\n\n let len = context.blocks.len();\n\n if len == 0 {\n\n return Flow::Break;\n\n }\n\n for (idx, block) in context.blocks.iter_mut().enumerate() {\n\n if let Some(ins) = block.ins.last() {\n\n if !ins.typed.is_jmp() {\n\n block.ins.push(Ins::empty_ret(InsType::Jmp(idx + 1), 0));\n\n }\n\n } else if idx + 1 != len {\n\n block.ins.push(Ins::empty_ret(InsType::Jmp(idx + 1), 0));\n\n }\n\n }\n\n context.block_vars = vec![BlockVars::new(); context.blocks.len()];\n\n Flow::Continue\n\n}\n\n\n", "file_path": "src/ssa/passes/graph.rs", "rank": 5, "score": 137007.26593392726 }, { "content": "pub fn fuse_postlude(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n for block in &mut context.blocks {\n\n block.ins.push(block.postlude.take().unwrap());\n\n }\n\n Flow::Continue\n\n}\n", "file_path": "src/ssa/passes/postlude.rs", "rank": 6, "score": 134705.47419601167 }, { "content": "pub fn check(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n fn walk(\n\n block: &Block,\n\n block_idx: usize,\n\n context: &Context,\n\n previous_mem_state: Option<&Paths>,\n\n drops: &mut Drops,\n\n ) -> Result<Paths, Code> {\n\n let mut mem_state = Paths::default();\n\n for ins in &block.ins {\n\n match &ins.typed {\n\n InsType::Drop(var) => {\n\n if let Some(state) = mem_state\n\n .insert(*var, MemoryState::FullyMoved(ins.source_location()))\n\n .map(|x| x.into_opt())\n\n .flatten()\n\n {\n\n return Err(Code::MemoryError {\n\n position: ins.source_location(),\n\n var: Variable::from(*var),\n", "file_path": "src/ssa/passes/memcheck/checker.rs", "rank": 7, "score": 134705.47419601167 }, { "content": "pub fn register_to_memory(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n dbg_println!(\"before r2m: {}\", context.print());\n\n\n\n // Turn primitive variables with borrows into memory registers\n\n let mut borrowed_vars: BTreeSet<Variable> = BTreeSet::new();\n\n for block in context.blocks.iter_mut() {\n\n for ins in &block.ins {\n\n if let InsType::Borrow { var, .. } = &ins.typed {\n\n borrowed_vars.insert(*var);\n\n }\n\n }\n\n }\n\n if borrowed_vars.is_empty() {\n\n return Flow::Continue;\n\n }\n\n let borrowed_vars = borrowed_vars\n\n .into_iter()\n\n .filter(|var| context.variable(*var).is_primitive())\n\n .collect::<Vec<Variable>>();\n\n for &var in &borrowed_vars {\n", "file_path": "src/ssa/passes/mem.rs", "rank": 8, "score": 134705.47419601167 }, { "content": "pub fn fold_constants(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n dbg_println!(\"before folding: {}\", context.print());\n\n\n\n let mut var_to_const = BTreeMap::new();\n\n for block in &mut context.blocks {\n\n for ins in &mut block.ins {\n\n match &ins.typed {\n\n const_ins if const_ins.is_const() => {\n\n var_to_const.insert(ins.retvar().unwrap(), const_ins.clone());\n\n }\n\n InsType::Copy(x) => {\n\n if let Some(k) = var_to_const.get(&x).cloned() {\n\n ins.typed = k.clone();\n\n var_to_const.insert(ins.retvar().unwrap(), k);\n\n }\n\n }\n\n InsType::Cast { var, typed } => {\n\n if let Some(const_ins) = var_to_const.get(&var) {\n\n if let Some(casted) = const_ins.const_cast(typed) {\n\n var_to_const.insert(ins.retvar().unwrap(), casted.clone());\n", "file_path": "src/ssa/passes/fold.rs", "rank": 9, "score": 134705.47419601167 }, { "content": "pub fn separate_postlude(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n for block in &mut context.blocks {\n\n if let Some(ins) = block.ins.last() {\n\n if ins.typed.is_jmp() {\n\n block.postlude = block.ins.pop();\n\n } else {\n\n unreachable!()\n\n }\n\n }\n\n }\n\n Flow::Continue\n\n}\n\n\n", "file_path": "src/ssa/passes/postlude.rs", "rank": 10, "score": 134705.47419601167 }, { "content": "pub fn eliminate_phi(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n if context.blocks.len() < 2 {\n\n return Flow::Continue;\n\n }\n\n\n\n let mut replacements: BTreeMap<Variable, SmallVec<[Variable; 4]>> = BTreeMap::new();\n\n for block in &mut context.blocks {\n\n for ins in &block.ins {\n\n let retvar = ins.retvar();\n\n if let InsType::Phi { vars, .. } = &ins.typed {\n\n let retvar = retvar.unwrap();\n\n for var in vars.iter() {\n\n if let Some(vec) = replacements.get_mut(var) {\n\n vec.push(retvar);\n\n } else {\n\n replacements.insert(*var, smallvec![retvar]);\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/ssa/passes/mem.rs", "rank": 11, "score": 134705.47419601167 }, { "content": "pub fn reference_drop_insertion(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n for idx in 0..context.blocks.len() {\n\n let block = &mut context.blocks[idx];\n\n let mut vars_total_imported = btreeset![];\n\n\n\n for &pred in &block.preds {\n\n let pred_block_vars = &mut context.block_vars[pred];\n\n vars_total_imported.extend(pred_block_vars.vars_exported.iter().cloned());\n\n }\n\n\n\n let block_vars = &mut context.block_vars[idx];\n\n block_vars.vars_total_imported = vars_total_imported;\n\n }\n\n\n\n for (block, block_vars) in context.blocks.iter_mut().zip(context.block_vars.iter()) {\n\n // Variables that die in this block are variables which\n\n // flow into the block or are declared in this block, and are never exported\n\n let mut dead_vars = block_vars\n\n .vars_declared_in_this_block\n\n .union(&block_vars.vars_total_imported)\n", "file_path": "src/ssa/passes/mem.rs", "rank": 12, "score": 132530.1345585031 }, { "content": "pub fn build_graph(data: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n if context.blocks.len() < 2 || !data.invalid_build_graph {\n\n return Flow::Continue;\n\n }\n\n data.invalid_build_graph = false;\n\n\n\n let num_blocks = context.blocks.len();\n\n\n\n // Build the successor/predecessor set corresponding to each block\n\n let mut predecessors_map: Vec<SmallVec<[usize; 2]>> = vec![smallvec![]; num_blocks];\n\n let mut successors_map: Vec<SmallVec<[usize; 2]>> = vec![smallvec![]; num_blocks];\n\n let mut insert_node = |succ: usize, pred: usize| {\n\n predecessors_map[succ].push(pred);\n\n successors_map[pred].push(succ);\n\n };\n\n\n\n // Filter out nops and build the graph maps\n\n for (idx, block) in context.blocks.iter_mut().enumerate() {\n\n let mut jumped = false;\n\n block.ins.retain(|ins| {\n", "file_path": "src/ssa/passes/graph.rs", "rank": 13, "score": 132530.1345585031 }, { "content": "pub fn calculate_data_flow(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n if context.blocks.len() < 2 {\n\n return Flow::Continue;\n\n }\n\n\n\n fn walk(\n\n block_idx: usize,\n\n blocks: &[Block],\n\n block_vars: &mut Vec<BlockVars>,\n\n prev_vars_exported: Option<&mut BTreeSet<Variable>>,\n\n ) {\n\n let mut vars_exported = btreeset![];\n\n let block = &blocks[block_idx];\n\n for &succ in &block.succs {\n\n if succ > block_idx {\n\n walk(succ, blocks, block_vars, Some(&mut vars_exported));\n\n }\n\n }\n\n let block_vars = &mut block_vars[block_idx];\n\n block_vars.vars_exported = vars_exported;\n", "file_path": "src/ssa/passes/mem.rs", "rank": 14, "score": 132530.1345585031 }, { "content": "pub fn cleanup_jump_blocks(data: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n if context.blocks.len() < 2 {\n\n return Flow::Continue;\n\n }\n\n data.invalid_build_graph = true;\n\n\n\n // map of new block idx -> old block idx\n\n let mut idx_map: Vec<usize> = (0..context.blocks.len()).collect();\n\n // map of old jmp location -> new jmp location (in old block indices)\n\n let mut jmp_map: BTreeMap<usize, usize> = btreemap![];\n\n\n\n let len = context.blocks.len();\n\n let old_blocks = std::mem::replace(&mut context.blocks, Vec::with_capacity(len));\n\n let old_block_vars = std::mem::replace(&mut context.block_vars, Vec::with_capacity(len));\n\n\n\n for (idx, block) in old_blocks.iter().enumerate() {\n\n if let InsType::Jmp(n) = block.postlude.as_ref().unwrap().typed {\n\n if block.ins.is_empty() {\n\n jmp_map.insert(idx, n);\n\n }\n", "file_path": "src/ssa/passes/graph.rs", "rank": 15, "score": 130470.92631473993 }, { "content": "pub fn collect_garbage_vars(data: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n dbg_println!(\"before tracing: {}\", context.print());\n\n let mut var_to_ins: Vec<Option<&Ins>> = vec![None; context.variables.len()];\n\n let mut roots = vec![];\n\n for block in &context.blocks {\n\n for ins in &block.ins {\n\n if let Some(retvar) = ins.retvar() {\n\n var_to_ins[usize::from(retvar)] = Some(ins);\n\n }\n\n if ins.typed.is_jmp() || ins.typed.has_side_effects() {\n\n if let Some(retvar) = ins.retvar() {\n\n roots.push(usize::from(retvar));\n\n }\n\n ins.each_used_var(|var| roots.push(usize::from(var)));\n\n }\n\n }\n\n }\n\n let mut alive: BitSet = BitSet::with_capacity(context.variables.len());\n\n fn trace(var: usize, var_to_ins: &[Option<&Ins>], alive: &mut BitSet) {\n\n if alive.contains(var) {\n", "file_path": "src/ssa/passes/gc.rs", "rank": 16, "score": 130470.92631473993 }, { "content": "pub fn rename_vars_and_insert_phis(_: &mut ContextLocalData, context: &mut Context) -> Flow {\n\n let num_blocks = context.blocks.len();\n\n\n\n if num_blocks > 1 {\n\n let mut defsites: Vec<BTreeSet<usize>> = vec![btreeset![]; context.variables.len()];\n\n\n\n for (idx, (block, block_vars)) in context\n\n .blocks\n\n .iter()\n\n .zip(context.block_vars.iter_mut())\n\n .enumerate()\n\n {\n\n let mut vars_used = BTreeSet::new();\n\n for ins in &block.ins {\n\n if let Some(retvar) = ins.retvar() {\n\n let set = &mut defsites[usize::from(retvar)];\n\n set.insert(idx);\n\n }\n\n ins.each_used_var(|used| {\n\n vars_used.insert(used);\n", "file_path": "src/ssa/passes/ssa.rs", "rank": 17, "score": 130470.92631473993 }, { "content": "pub fn mangle_type(typed: &isa::Type, dest: &mut String) {\n\n match typed {\n\n isa::Type::NoReturn => dest.push('R'),\n\n isa::Type::Nil => dest.push('N'),\n\n isa::Type::Bool => dest.push('O'),\n\n isa::Type::I8 => dest.push('B'),\n\n isa::Type::I16 => dest.push('H'),\n\n isa::Type::I32 => dest.push('W'),\n\n isa::Type::I64 => dest.push('Q'),\n\n isa::Type::ISize => dest.push('S'),\n\n isa::Type::F64 => dest.push('D'),\n\n isa::Type::Pointer(x) => {\n\n dest.push('P');\n\n dest.push(match x.tag {\n\n isa::BorrowModifier::Immutable => 'i',\n\n isa::BorrowModifier::Mutable => 'm',\n\n isa::BorrowModifier::Unique => 'u',\n\n });\n\n mangle_type(&x.typed, dest);\n\n }\n", "file_path": "src/codegen/mangler.rs", "rank": 18, "score": 125501.99580479585 }, { "content": "pub trait Visitor {\n\n fn visit_program(&mut self, n: &mut Program) -> VisitorResult;\n\n visit_func!(visit_import, ImportStatement);\n\n visit_func!(visit_class, ClassStatement);\n\n visit_func!(visit_class_init, ClassInitExpr);\n\n visit_func!(visit_defstmt, DefStatement);\n\n visit_func!(visit_return, ReturnExpr);\n\n visit_func!(visit_whileexpr, WhileExpr);\n\n visit_func!(visit_ifexpr, IfExpr);\n\n visit_func!(visit_callexpr, CallExpr);\n\n visit_func!(visit_letexpr, LetExpr);\n\n visit_func!(visit_binexpr, BinExpr);\n\n visit_func!(visit_asexpr, AsExpr);\n\n visit_func!(visit_member_expr, MemberExpr);\n\n visit_func!(visit_value, Value);\n\n visit_func!(visit_typeid, TypeId);\n\n visit_func!(visit_break, BreakExpr);\n\n visit_func!(visit_borrow, BorrowExpr);\n\n visit_func!(visit_deref, DerefExpr);\n\n visit_func!(visit_unary, UnaryExpr);\n\n visit_func!(visit_modstmt, ModStatement);\n\n visit_func!(visit_path, PathExpr);\n\n}\n\n\n", "file_path": "src/ast/mod.rs", "rank": 19, "score": 121314.7550657652 }, { "content": "pub fn const_to_value(builder: &mut FunctionBuilder, c: &isa::Constant) -> Value {\n\n match c {\n\n isa::Constant::Bool(x) => builder.ins().bconst(types::B1, *x),\n\n isa::Constant::I32(x) => builder.ins().iconst(types::I32, *x as i64),\n\n isa::Constant::I64(x) => builder.ins().iconst(types::I64, *x),\n\n isa::Constant::F64(x) => builder.ins().f64const(*x),\n\n }\n\n}\n\n\n", "file_path": "src/codegen/cranelift/translator.rs", "rank": 20, "score": 117830.57767218884 }, { "content": "pub fn parse_and_run(source: &str, runtime: Runtime) -> Result<(), compiler::Error> {\n\n let program = parse_to_ssa(source)?;\n\n let settings = Settings::default();\n\n unsafe { (CraneliftBackend {}).run(&program, &settings, &runtime) }\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 21, "score": 106403.79469963029 }, { "content": "pub fn calculate_block_variable_declaration(\n\n _: &mut ContextLocalData,\n\n context: &mut Context,\n\n) -> Flow {\n\n for (block, block_vars) in context.blocks.iter_mut().zip(context.block_vars.iter_mut()) {\n\n let mut vars_declared_in_this_block = BTreeSet::new();\n\n let mut vars_used = BTreeSet::new();\n\n for ins in block.ins.iter() {\n\n if let Some(retvar) = ins.retvar() {\n\n vars_declared_in_this_block.insert(retvar);\n\n }\n\n ins.each_used_var(|used| {\n\n vars_used.insert(used);\n\n });\n\n }\n\n // Phi assignments are not declared in this block\n\n vars_declared_in_this_block = vars_declared_in_this_block\n\n .difference(&block_vars.vars_phi)\n\n .cloned()\n\n .collect();\n", "file_path": "src/ssa/passes/mem.rs", "rank": 22, "score": 105215.6592821441 }, { "content": "struct ConstPrinter<'a>(pub &'a Constant);\n\n\n\nimpl<'a> std::fmt::Display for ConstPrinter<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self.0 {\n\n Constant::Bool(k) => write!(f, \"Bool({})\", *k),\n\n Constant::I32(k) => write!(f, \"I32({})\", *k),\n\n Constant::I64(k) => write!(f, \"I64({})\", *k),\n\n Constant::F64(k) => write!(f, \"F64({})\", f64::from_bits(*k)),\n\n }\n\n }\n\n}\n\n\n\npub struct InsPrinter<'a>(pub &'a Ins);\n\n\n\nimpl<'a> std::fmt::Display for InsPrinter<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n if let Some(retvar) = self.0.retvar() {\n\n write!(f, \"v{} = \", retvar)?;\n\n }\n", "file_path": "src/ssa/isa/print.rs", "rank": 23, "score": 102025.69139186219 }, { "content": "pub trait LoadFunction = FnMut(LoadFunctionArg);\n\n\n\nimpl<B> Codegen<B>\n\nwhere\n\n B: Backend,\n\n{\n\n fn generate_function_signature_x86_64_sysv<F>(\n\n &self,\n\n program: &isa::Program,\n\n arg_types: &[isa::Type],\n\n return_type: &isa::Type,\n\n sig: &mut Signature,\n\n mut load_function: F,\n\n ) where\n\n F: LoadFunction,\n\n {\n\n match *return_type {\n\n isa::Type::NoReturn => (),\n\n isa::Type::Nil => (),\n\n _ => {\n", "file_path": "src/codegen/cranelift/abi.rs", "rank": 24, "score": 99358.06422455116 }, { "content": "fn usage(program: &str, commands: &BTreeMap<&str, (&str, CommandFn)>) {\n\n println!(\"Usage:\\n\\t{} [options] <command> [file]\\n\", program);\n\n println!(\"Commands:\");\n\n for (name, (desc, _)) in commands {\n\n println!(\"\\t{}: {}\", name, desc);\n\n }\n\n println!(\"\\nOptions:\");\n\n let mut options = [\n\n (\n\n \"-backend\",\n\n \"select backend for code generation (cranelift, c)\",\n\n ),\n\n (\"-obj\", \"generate an object file instead of executable\"),\n\n (\"-O\", \"optimize for size and speed\"),\n\n (\"-Onone\", \"don't optimize at all\"),\n\n (\"-Ospeed\", \"optimize for speed\"),\n\n (\"-prelude\", \"insert a prelude file\"),\n\n ];\n\n options.sort_by_key(|k| k.0);\n\n for (name, desc) in &options {\n\n println!(\"\\t{}: {}\", name, desc)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 25, "score": 92376.09400832622 }, { "content": "pub fn to_var(x: isa::Variable) -> Variable {\n\n Variable::with_u32(x.into())\n\n}\n\n\n", "file_path": "src/codegen/cranelift/translator.rs", "rank": 26, "score": 92265.70585617438 }, { "content": "fn process_program(\n\n sources: &mut Sources,\n\n mut ast: ast::Program,\n\n) -> Result<ssa::isa::Program, compiler::Error> {\n\n let mut program = ssa::visitor::SSAVisitor::generate(&mut ast, &RefCell::new(sources))?;\n\n for context in program.contexts.values_mut() {\n\n let mut data = passes::ContextLocalData::new();\n\n for pass in SSA_PASSES {\n\n match pass(&mut data, context) {\n\n Flow::Continue => (),\n\n Flow::Break => {\n\n break;\n\n }\n\n Flow::Err(code) => {\n\n use crate::compiler::error::Code;\n\n let span = match &code {\n\n Code::MemoryError { position, .. } => sources.get_span(*position),\n\n _ => None,\n\n };\n\n return Err(compiler::Error { error: code, span });\n", "file_path": "src/compiler/mod.rs", "rank": 27, "score": 91679.52566063241 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn struct_init() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(substring: FatPointer<u8>, age: i32) {\n\n assert_eq!(substring.len(), 3);\n\n unsafe {\n\n assert_eq!(std::str::from_utf8(substring.slice()).unwrap(), \"ABC\");\n\n }\n\n assert_eq!(age, 20);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"record_substr\",\n\n record_substr as extern \"C\" fn(FatPointer<u8>, i32),\n\n );\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_substr\\\"(name: &Substring, age: I32): Nil\n\n\n\n struct Person =>\n", "file_path": "tests/compiler/class.rs", "rank": 28, "score": 91496.6440260513 }, { "content": "#[test]\n\nfn move_struct() {\n\n extern \"C\" fn noop(_: i32) {}\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"noop\", noop as extern \"C\" fn(i32));\n\n runtime.insert_func(\"noop_i32\", noop as extern \"C\" fn(i32));\n\n let err = utils::parse_and_run(\n\n \"\\\n\n struct Other =>\n\n a_int: I32\n\n \n\n extern def noop(n: Other): Nil\n\n extern def noop_i32(n: I32): Nil\n\n\n\n x := Other {\n\n a_int: 10,\n\n }\n\n noop(x)\n\n noop_i32(x.a_int)\n\n \",\n\n runtime,\n\n )\n\n .unwrap_err();\n\n match err.error {\n\n error::Code::MemoryError { typed, .. } => assert_eq!(typed, error::MemoryErrorType::Move),\n\n other => panic!(\"error isn't memory error: {}\", other),\n\n }\n\n}\n\n\n", "file_path": "tests/compiler/move.rs", "rank": 29, "score": 91496.6440260513 }, { "content": "#[test]\n\nfn module_struct() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 1);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n mod Thing => \n\n struct Thing =>\n\n n: I32\n\n\n\n thing := Thing::Thing {\n\n n: 1,\n\n }\n\n record(thing.n)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n", "file_path": "tests/compiler/modules.rs", "rank": 30, "score": 91496.6440260513 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn nested_struct() {\n\n extern \"C\" fn noop(_: i32) {}\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"noop\", noop as extern \"C\" fn(i32));\n\n let err = utils::parse_and_run(\n\n \"\\\n\n extern def noop(n: I32): Nil\n\n\n\n struct Other =>\n\n a_int: I32\n\n\n\n struct Thing =>\n\n other: Other\n\n \n\n thing := Thing {\n\n other: Other {\n\n a_int: 10,\n\n },\n\n }\n\n j := thing.other\n", "file_path": "tests/compiler/move.rs", "rank": 31, "score": 91496.6440260513 }, { "content": "#[test]\n\nfn borrow_after_borrow_mut() {\n\n extern \"C\" fn record_i32(i: i32) {\n\n assert_eq!(i, 10);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n let err = utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32): Nil\n\n\n\n i := 10\n\n x := &i\n\n y := &mut i\n\n record(*x)\n\n \",\n\n runtime,\n\n )\n\n .expect_err(\"error out on parse_and_run\");\n\n match err.error {\n\n error::Code::MemoryError { typed, .. } => assert_eq!(typed, error::MemoryErrorType::Borrow),\n\n other => panic!(\"error isn't memory error: {}\", other),\n\n }\n\n}\n\n\n", "file_path": "tests/compiler/borrow.rs", "rank": 32, "score": 88633.90661634895 }, { "content": "#[test]\n\nfn nested_struct_init() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(first_name: FatPointer<u8>, last_name: FatPointer<u8>, age: i32) {\n\n unsafe {\n\n assert_eq!(std::str::from_utf8(first_name.slice()).unwrap(), \"Abc\");\n\n assert_eq!(std::str::from_utf8(last_name.slice()).unwrap(), \"Def\");\n\n }\n\n assert_eq!(age, 20);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"record_substr\",\n\n record_substr as extern \"C\" fn(FatPointer<u8>, FatPointer<u8>, i32),\n\n );\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_substr\\\"(first_name: &Substring, last_name: &Substring, age: I32): Nil\n\n\n\n struct Name =>\n", "file_path": "tests/compiler/class.rs", "rank": 33, "score": 88572.54706211567 }, { "content": "pub fn const_to_type(c: &isa::Constant) -> types::Type {\n\n match c {\n\n isa::Constant::Bool(_) => types::B1,\n\n isa::Constant::I32(_) => types::I32,\n\n isa::Constant::I64(_) => types::I64,\n\n isa::Constant::F64(_) => types::F64,\n\n }\n\n}\n", "file_path": "src/codegen/cranelift/translator.rs", "rank": 34, "score": 85981.60899149382 }, { "content": "pub fn mangle(unmangled: &Rc<isa::FunctionName>) -> String {\n\n let mut name = \"_I\".to_string();\n\n let id = unmangled\n\n .path\n\n .iter()\n\n .map(|path| {\n\n let mut id = String::new();\n\n mangle_string(&path, &mut id);\n\n id\n\n })\n\n .collect::<Vec<String>>()\n\n .join(\"$$\");\n\n write!(name, \"{}{}\", id.len(), id).unwrap();\n\n for typed in &unmangled.arg_types {\n\n mangle_type(typed, &mut name);\n\n }\n\n name\n\n}\n", "file_path": "src/codegen/mangler.rs", "rank": 35, "score": 85981.60899149382 }, { "content": "#[test]\n\nfn nested_slice_struct_init() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record(n: i32, slice: [i32; 2]) {\n\n assert_eq!(n, 1337);\n\n assert_eq!(slice[0], 100);\n\n assert_eq!(slice[1], 200);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record\", record as extern \"C\" fn(i32, [i32; 2]));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record(n: I32, slice: [I32; 2]): Nil\n\n\n\n struct Thing =>\n\n a_int: I32\n\n a_slice: [I32; 2]\n\n \n\n thing := Thing {\n\n a_int: 1337,\n\n a_slice: [100, 200],\n\n }\n\n record(thing.a_int, thing.a_slice)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/class.rs", "rank": 36, "score": 85891.23624733822 }, { "content": "#[test]\n\nfn nested_struct_init_with_forward_decl() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(first_name: FatPointer<u8>, last_name: FatPointer<u8>, age: i32) {\n\n unsafe {\n\n assert_eq!(std::str::from_utf8(first_name.slice()).unwrap(), \"Abc\");\n\n assert_eq!(std::str::from_utf8(last_name.slice()).unwrap(), \"Def\");\n\n }\n\n assert_eq!(age, 20);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"record_substr\",\n\n record_substr as extern \"C\" fn(FatPointer<u8>, FatPointer<u8>, i32),\n\n );\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_substr\\\"(first_name: &Substring, last_name: &Substring, age: I32): Nil\n\n\n\n struct Person =>\n", "file_path": "tests/compiler/class.rs", "rank": 37, "score": 83423.67925187504 }, { "content": "fn compiler_error(opts: Options, sources: Option<Sources>, error: compiler::Error) -> ! {\n\n if let Some(span) = error.span {\n\n let sources = sources.unwrap();\n\n let mut message = String::new();\n\n write!(&mut message, \"{}\", error.error).unwrap();\n\n\n\n let mut labels = vec![Label::primary(span.file, span.start..span.end)];\n\n for (span_idx, msg) in error.error.diagnostics() {\n\n let span = sources.get_span(span_idx).unwrap();\n\n labels.push(\n\n Label::secondary(span.file, span.start..span.end).with_message(msg.to_string()),\n\n );\n\n }\n\n\n\n let diagnostic = Diagnostic::error()\n\n .with_message(message)\n\n .with_labels(labels);\n\n let writer = StandardStream::stderr(ColorChoice::Always);\n\n let config = codespan_reporting::term::Config::default();\n\n codespan_reporting::term::emit(&mut writer.lock(), &config, &sources, &diagnostic).unwrap();\n\n } else {\n\n eprintln!(\"\\x1b[1m\\x1b[38;5;11m{}:\\x1b[0m {:?}\", opts.args[0], error);\n\n }\n\n std::process::exit(-1)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 38, "score": 75722.82733864483 }, { "content": "fn align(value: u32, to: u32) -> u32 {\n\n (value + to - 1) & !(to - 1)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum StructFieldType {\n\n Void,\n\n I8,\n\n I16,\n\n I32,\n\n I64,\n\n F64,\n\n Struct(Rc<StructData>),\n\n}\n\n\n\nimpl StructFieldType {\n\n pub fn size_of(&self) -> u32 {\n\n match self {\n\n StructFieldType::Void => 1,\n\n StructFieldType::I8 => 1,\n", "file_path": "src/codegen/structs.rs", "rank": 39, "score": 75349.9981214826 }, { "content": "struct Options<'a> {\n\n pub backend: Backend,\n\n pub opt_level: OptLevel,\n\n pub output_type: OutputType,\n\n pub args: &'a Vec<String>,\n\n pub command_idx: usize,\n\n pub prelude: Option<&'a str>,\n\n}\n\n\n\nimpl<'a> Options<'a> {\n\n pub fn to_settings(&self) -> Settings {\n\n Settings {\n\n opt_level: self.opt_level,\n\n prelude: self.prelude.map(|x| PathBuf::from(x)),\n\n }\n\n }\n\n\n\n pub fn to_compiler(&self) -> compiler::Compiler {\n\n let settings = self.to_settings();\n\n compiler::Compiler::new(self.backend.to_backend(), settings)\n\n }\n\n\n\n pub fn arg(&self, offset: usize) -> Option<&String> {\n\n self.args.get(self.command_idx + offset)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 40, "score": 64868.386232410805 }, { "content": "struct Codegen<'a> {\n\n program: &'a isa::Program,\n\n mangled_cache: FnvHashMap<Rc<isa::FunctionName>, String>,\n\n function_decls: Vec<String>,\n\n function_bodies: Vec<String>,\n\n typedefs: FnvHashMap<isa::Type, String>,\n\n}\n\n\n\nimpl<'a> Codegen<'a> {\n\n fn new(program: &'a isa::Program) -> Self {\n\n Self {\n\n program,\n\n mangled_cache: fnv_hashmap![],\n\n function_decls: vec![],\n\n function_bodies: vec![],\n\n typedefs: fnv_hashmap![],\n\n }\n\n }\n\n\n\n fn get_struct_data(&self, typed: &isa::Type) -> Option<Rc<StructData>> {\n", "file_path": "src/codegen/c/codegen.rs", "rank": 41, "score": 63092.97795682885 }, { "content": "struct MemberRefData {\n\n pub accessor: String,\n\n pub bounds_checks: Vec<String>,\n\n}\n\n\n", "file_path": "src/codegen/c/codegen.rs", "rank": 42, "score": 62787.027731914 }, { "content": "fn main() {\n\n lalrpop::process_root().unwrap();\n\n}\n", "file_path": "build.rs", "rank": 43, "score": 62706.27517046523 }, { "content": "type CommandFn = fn(Options);\n\n\n", "file_path": "src/main.rs", "rank": 44, "score": 62545.43751722455 }, { "content": "struct InsContext<'a> {\n\n context: &'a isa::Context,\n\n}\n\n\n", "file_path": "src/codegen/c/codegen.rs", "rank": 45, "score": 61478.32856680262 }, { "content": "struct StackLoadIns {\n\n pub slot: StackSlot,\n\n pub loads: Vec<(Offset32, Value)>,\n\n}\n\n\n", "file_path": "src/codegen/cranelift/codegen.rs", "rank": 46, "score": 61434.69855964434 }, { "content": "#[test]\n\nfn if_flat() {\n\n utils::parse_to_ssa(\n\n \"\n\n if true => pass\n\n else => pass\n\n \",\n\n )\n\n .expect(\"able to parse_to_ssa\");\n\n}\n\n\n", "file_path": "tests/compiler/if.rs", "rank": 47, "score": 60778.70535487005 }, { "content": "fn main() {\n\n let args: Vec<String> = std::env::args().collect();\n\n let program = &args[0];\n\n let commands: BTreeMap<&str, (&str, CommandFn)> = btreemap![\n\n \"run\" => (\"Runs the specified program\", run as CommandFn),\n\n \"build\" => (\"Builds the specified program\", build as CommandFn),\n\n ];\n\n let mut opts = Options {\n\n backend: Backend::Cranelift,\n\n opt_level: OptLevel::SpeedAndSize,\n\n output_type: OutputType::Executable,\n\n args: &args,\n\n command_idx: 0,\n\n prelude: None,\n\n };\n\n let mut state = ArgParseState::None;\n\n for (idx, arg) in args.iter().skip(1).enumerate() {\n\n match state {\n\n ArgParseState::None => match arg.as_str() {\n\n \"-backend\" => {\n", "file_path": "src/main.rs", "rank": 48, "score": 60778.70535487005 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn while_loop() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 10);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n mut i := 0\n\n while i < 10 =>\n\n i += 1\n\n record(i)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/while.rs", "rank": 49, "score": 60778.70535487005 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn if_expr() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 1);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n", "file_path": "tests/compiler/if.rs", "rank": 50, "score": 60778.70535487005 }, { "content": "struct InsContext<'a> {\n\n pub context: &'a isa::Context,\n\n pub program: &'a isa::Program,\n\n pub bblocks: &'a Vec<Block>,\n\n pub stack_loads_ins: &'a BTreeMap<usize, StackLoadIns>,\n\n pub struct_return: Option<Value>,\n\n}\n\n\n\n/// Code generator\n\npub struct Codegen<B: Backend> {\n\n pub(super) module: Module<B>,\n\n mangled_cache: FnvHashMap<Rc<isa::FunctionName>, String>,\n\n string_mapping: FnvHashMap<Rc<str>, DataId>,\n\n}\n\n\n\nimpl<B> Codegen<B>\n\nwhere\n\n B: Backend,\n\n{\n\n pub fn from_builder(builder: B::Builder) -> Self {\n", "file_path": "src/codegen/cranelift/codegen.rs", "rank": 51, "score": 60003.54928903972 }, { "content": "#[test]\n\nfn while_expr_nil() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/while.rs", "rank": 52, "score": 59033.94893417937 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn len() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i64(n: i64) {\n\n assert_eq!(n, 3);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i64\", record_i64 as extern \"C\" fn(i64));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i64\\\"(n: ISize): Nil\n\n\n\n record(\\\"ABC\\\".len)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/string.rs", "rank": 53, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn if_flat_with_expr() {\n\n utils::parse_to_ssa(\n\n \"\n\n if true => 1\n\n else => 2\n\n \",\n\n )\n\n .expect(\"able to parse_to_ssa\");\n\n}\n", "file_path": "tests/compiler/if.rs", "rank": 54, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_loop() {\n\n let runtime = Runtime::new();\n\n assert!(utils::parse_and_run(\n\n \"\\\n\n extern def noop(n: I32): Nil\n\n\n\n mut i := 0\n\n while i < 10 =>\n\n i += 1\n\n \",\n\n runtime,\n\n )\n\n .is_ok());\n\n}\n", "file_path": "tests/compiler/move.rs", "rank": 55, "score": 59033.94893417937 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn index() {\n\n extern \"C\" fn record_i32(i: i32, n: i32) {\n\n assert_eq!(i + 1, n);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, n: I32): Nil\n\n\n\n x := [1, 2, 3]\n\n record(0, x[0])\n\n record(1, x[1])\n\n record(2, x[2])\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n}\n\n\n", "file_path": "tests/compiler/slice.rs", "rank": 56, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_expr_type() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/while.rs", "rank": 57, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn if_expr_elsif() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(i: i32, n: i32) {\n\n assert_eq!(i, n);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, n: I32): Nil\n\n\n", "file_path": "tests/compiler/if.rs", "rank": 58, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_loop_set() {\n\n static RUN_IT: AtomicI32 = AtomicI32::new(0);\n\n extern \"C\" fn record_i32(n: i32) {\n\n let prev = RUN_IT.fetch_add(1, Ordering::Relaxed);\n\n assert_eq!(prev, n);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n mut i := 0\n\n while i < 10 =>\n\n record(i)\n\n i += 1\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert_eq!(RUN_IT.load(Ordering::Relaxed), 10);\n\n}\n\n\n", "file_path": "tests/compiler/while.rs", "rank": 59, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_loop_break() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 0);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n mut i := 0\n\n while i < 10 =>\n\n break\n\n record(i)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/while.rs", "rank": 60, "score": 59033.94893417937 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn and_expr() {\n\n extern \"C\" fn record_i32(i: i32, n: i32) {\n\n assert_eq!(i, n);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, n: I32): Nil\n\n\n", "file_path": "tests/compiler/logical.rs", "rank": 61, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn ffi() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(slice: [i32; 4]) {\n\n assert_eq!(slice[0], 10);\n\n assert_eq!(slice[1], 20);\n\n assert_eq!(slice[2], 30);\n\n assert_eq!(slice[3], 40);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_slice\", record_substr as extern \"C\" fn([i32; 4]));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_slice\\\"(n: [I32; 4]): Nil\n\n\n\n x := [10,20,30,40]\n\n record(x)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/slice.rs", "rank": 62, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn or_expr() {\n\n extern \"C\" fn record_i32(i: i32, n: i32) {\n\n assert_eq!(i, n);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, n: I32): Nil\n\n\n", "file_path": "tests/compiler/logical.rs", "rank": 63, "score": 59033.94893417937 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn fib() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 89);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n", "file_path": "tests/compiler/integration.rs", "rank": 64, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_loop_nested_x() {\n\n use std::cell::RefCell;\n\n use std::sync::Mutex;\n\n lazy_static! {\n\n static ref OUTPUT: Mutex<RefCell<Vec<(i32, i32, i32)>>> = Mutex::new(RefCell::new(vec![]));\n\n }\n\n extern \"C\" fn record_i32(i: i32, j: i32, x: i32) {\n\n OUTPUT.lock().unwrap().borrow_mut().push((i, j, x));\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, j: I32, x: I32): Nil\n\n\n\n mut i := 0\n\n mut x := 0\n\n while i < 10 =>\n\n mut j := 0\n\n while j < 5 =>\n", "file_path": "tests/compiler/while.rs", "rank": 65, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn if_expr_nil() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 66, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_loop_nested_with_if() {\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 5);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(x: I32): Nil\n\n\n\n mut i := 0\n\n mut x := 0\n\n while i < 10 =>\n\n if i < 5 =>\n\n x += 1\n\n i += 1\n\n record(x)\n\n \",\n\n runtime,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/compiler/while.rs", "rank": 67, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn if_expr() {\n\n let runtime = Runtime::new();\n\n assert!(utils::parse_and_run(\n\n \"\\\n\n mut i := 0\n\n if i < 10 =>\n\n i += 1\n\n else =>\n\n i += 2\n\n \",\n\n runtime,\n\n )\n\n .is_ok());\n\n}\n\n\n", "file_path": "tests/compiler/move.rs", "rank": 68, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn while_loop_nested() {\n\n use std::cell::RefCell;\n\n use std::sync::Mutex;\n\n lazy_static! {\n\n static ref OUTPUT: Mutex<RefCell<Vec<(i32, i32)>>> = Mutex::new(RefCell::new(vec![]));\n\n }\n\n extern \"C\" fn record_i32(i: i32, j: i32) {\n\n OUTPUT.lock().unwrap().borrow_mut().push((i, j));\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, j: I32): Nil\n\n\n\n mut i := 0\n\n while i < 10 =>\n\n mut j := 0\n\n while j < 5 =>\n\n record(i, j)\n", "file_path": "tests/compiler/while.rs", "rank": 69, "score": 59033.94893417937 }, { "content": "#[test]\n\nfn if_expr_unify() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 70, "score": 59033.94893417937 }, { "content": "pub trait ToGenericFunction {\n\n fn to_generic(&self) -> GenericFunction;\n\n}\n\n\n\n// Impls for function pointers\n\nmacro_rules! fnptr_impls_safety_abi {\n\n ($FnTy: ty, $($Arg: ident),*) => {\n\n impl<Ret, $($Arg),*> ToGenericFunction for $FnTy {\n\n fn to_generic(&self) -> GenericFunction {\n\n GenericFunction(*self as _)\n\n }\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! fnptr_impls_args {\n\n ($($Arg: ident),+) => {\n\n fnptr_impls_safety_abi! { extern \"C\" fn($($Arg),+) -> Ret, $($Arg),+ }\n\n fnptr_impls_safety_abi! { unsafe extern \"C\" fn($($Arg),+) -> Ret, $($Arg),+ }\n\n };\n", "file_path": "src/runtime/mod.rs", "rank": 71, "score": 58850.40248642377 }, { "content": "pub trait JitBackend {\n\n /// # Safety\n\n ///\n\n /// This function calls into possibly unsafe iroha code.\n\n unsafe fn run(\n\n &self,\n\n program: &isa::Program,\n\n settings: &Settings,\n\n runtime: &Runtime,\n\n ) -> Result<(), compiler::Error>;\n\n}\n\n\n", "file_path": "src/codegen/backend.rs", "rank": 72, "score": 58850.40248642377 }, { "content": "pub trait ObjectBackend {\n\n fn generate_object(\n\n &self,\n\n program: &isa::Program,\n\n settings: &Settings,\n\n ) -> Result<Vec<u8>, compiler::Error>;\n\n}\n\n\n\npub struct Backend {\n\n pub jit_backend: Option<Box<dyn JitBackend>>,\n\n pub object_backend: Option<Box<dyn ObjectBackend>>,\n\n}\n\n\n\nimpl Backend {\n\n pub fn with_jit<T: 'static + JitBackend>(backend: T) -> Self {\n\n Self {\n\n jit_backend: Some(Box::new(backend)),\n\n object_backend: None,\n\n }\n\n }\n", "file_path": "src/codegen/backend.rs", "rank": 73, "score": 58850.40248642377 }, { "content": "#[test]\n\nfn substring_ffi() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(substring: FatPointer<u8>) {\n\n assert_eq!(substring.len(), 3);\n\n unsafe {\n\n assert_eq!(std::str::from_utf8(substring.slice()).unwrap(), \"ABC\");\n\n }\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"record_substr\",\n\n record_substr as extern \"C\" fn(FatPointer<u8>),\n\n );\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_substr\\\"(n: &Substring): Nil\n\n\n\n record(\\\"ABC\\\")\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/string.rs", "rank": 74, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn explicit_cast() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i64(n: i64) {\n\n assert_eq!(n, 10);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i64\", record_i64 as extern \"C\" fn(i64));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i64\\\"(n: I64): Nil\n\n\n\n record(10 as I64)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/typing.rs", "rank": 75, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn substring_return() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(substring: FatPointer<u8>) {\n\n assert_eq!(substring.len(), 3);\n\n unsafe {\n\n assert_eq!(std::str::from_utf8(substring.slice()).unwrap(), \"ABC\");\n\n }\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"record_substr\",\n\n record_substr as extern \"C\" fn(FatPointer<u8>),\n\n );\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_substr\\\"(n: &Substring): Nil\n\n \n", "file_path": "tests/compiler/string.rs", "rank": 76, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn slice_return() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(slice: [i32; 4]) {\n\n assert_eq!(slice[0], 10);\n\n assert_eq!(slice[1], 20);\n\n assert_eq!(slice[2], 30);\n\n assert_eq!(slice[3], 40);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_slice\", record_substr as extern \"C\" fn([i32; 4]));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_slice\\\"(n: [I32; 4]): Nil\n\n \n", "file_path": "tests/compiler/slice.rs", "rank": 77, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn while_loop_break_nested_if() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 5);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n mut i := 0\n\n while i < 10 =>\n\n if i == 5 =>\n\n break\n\n i += 1\n\n record(i)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n", "file_path": "tests/compiler/while.rs", "rank": 78, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn while_loop_nested_post_x() {\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 50);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(x: I32): Nil\n\n\n\n mut i := 0\n\n mut x := 0\n\n while i < 10 =>\n\n mut j := 0\n\n while j < 5 =>\n\n x += 1\n\n j += 1\n\n i += 1\n\n record(x)\n\n \",\n\n runtime,\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/compiler/while.rs", "rank": 79, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn ptr_access() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i8(n: i8) {\n\n assert_eq!(n, 'A' as i8);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i8\", record_i8 as extern \"C\" fn(i8));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i8\\\"(n: I8): Nil\n\n\n\n record(\\\"ABC\\\"[0])\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/string.rs", "rank": 80, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn cast_on_binop() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i64(n: i64) {\n\n assert_eq!(n, 10);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i64\", record_i64 as extern \"C\" fn(i64));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i64\\\"(n: I64): Nil\n\n\n", "file_path": "tests/compiler/typing.rs", "rank": 81, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn while_expr_cond_return() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/while.rs", "rank": 82, "score": 57447.17594840565 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn int_borrow() {\n\n extern \"C\" fn record_i32(i: i32) {\n\n assert_eq!(i, 10);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32): Nil\n\n\n\n i := 10\n\n x := &i\n\n record(*x)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n}\n\n\n", "file_path": "tests/compiler/borrow.rs", "rank": 83, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn module_function() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 1);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n mod Thing => \n", "file_path": "tests/compiler/modules.rs", "rank": 84, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn if_expr_both_branch_return() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 85, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn if_expr_cond_return() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 86, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn index_assign() {\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 100);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32): Nil\n\n\n\n x:=[1,2,3]\n\n x[0] = 100\n\n record(x[0])\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n}\n\n\n", "file_path": "tests/compiler/slice.rs", "rank": 87, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn borrow_asg() {\n\n extern \"C\" fn record_i32(i: i32) {\n\n assert_eq!(i, 1000);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32): Nil\n\n\n\n mut i := 10\n\n y := &mut i\n\n *y = 1000\n\n record(i)\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n}\n", "file_path": "tests/compiler/borrow.rs", "rank": 88, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn while_expr_body_return() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/while.rs", "rank": 89, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn uninit_assign() {\n\n extern \"C\" fn record_i32(i: i32, n: i32) {\n\n assert_eq!(i + 1, n);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32, i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(i: I32, n: I32): Nil\n\n\n\n mut x : [I32; 3] = uninitialized\n\n x[0] = 1\n\n x[1] = 2\n\n x[2] = 3\n\n record(0, x[0])\n\n record(1, x[1])\n\n record(2, x[2])\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n}\n", "file_path": "tests/compiler/slice.rs", "rank": 90, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn string_dereference() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i32(n: i32) {\n\n assert_eq!(n, 0x41);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i32\\\"(n: I32): Nil\n\n\n\n record(\\\"A\\\"[0])\n\n \",\n\n runtime,\n\n )\n\n .expect(\"able to parse_and_run\");\n\n assert!(RUN_FLAG.load(Ordering::Relaxed));\n\n}\n\n\n", "file_path": "tests/compiler/string.rs", "rank": 91, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn return_nil() {\n\n let runtime = Runtime::new();\n\n utils::parse_and_run(\n\n \"\\\n", "file_path": "tests/compiler/typing.rs", "rank": 92, "score": 57447.17594840565 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn cast_on_argument() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_i64(n: i64) {\n\n assert_eq!(n, 10);\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\"record_i64\", record_i64 as extern \"C\" fn(i64));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_i64\\\"(n: I64): Nil\n\n\n", "file_path": "tests/compiler/typing.rs", "rank": 93, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn substring_passing() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n extern \"C\" fn record_substr(substring: FatPointer<u8>) {\n\n assert_eq!(substring.len(), 3);\n\n unsafe {\n\n assert_eq!(std::str::from_utf8(substring.slice()).unwrap(), \"ABC\");\n\n }\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"record_substr\",\n\n record_substr as extern \"C\" fn(FatPointer<u8>),\n\n );\n\n utils::parse_and_run(\n\n \"\\\n\n extern def record=\\\"record_substr\\\"(n: &Substring): Nil\n\n\n", "file_path": "tests/compiler/string.rs", "rank": 94, "score": 57447.17594840565 }, { "content": "#[test]\n\nfn uni_alloc() {\n\n static RUN_FLAG: AtomicBool = AtomicBool::new(false);\n\n static mut MALLOC_I32: UnsafeCell<i32> = UnsafeCell::new(0);\n\n extern \"C\" fn fake_malloc(_size: i32, _align: i32) -> *mut i32 {\n\n unsafe { UnsafeCell::get(&MALLOC_I32) }\n\n }\n\n extern \"C\" fn fake_dealloc(address: isize) {\n\n unsafe {\n\n assert_eq!(address, MALLOC_I32.get() as isize);\n\n }\n\n RUN_FLAG.store(true, Ordering::Relaxed);\n\n }\n\n extern \"C\" fn record_i32(i: i32) {\n\n assert_eq!(i, 10);\n\n }\n\n let mut runtime = Runtime::new();\n\n runtime.insert_func(\n\n \"fake_malloc\",\n\n fake_malloc as extern \"C\" fn(i32, i32) -> *mut i32,\n\n );\n\n runtime.insert_func(\"fake_dealloc\", fake_dealloc as extern \"C\" fn(isize));\n\n runtime.insert_func(\"record_i32\", record_i32 as extern \"C\" fn(i32));\n\n utils::parse_and_run(\n\n \"\\\n\n extern def fake_malloc(): ISize\n\n extern def fake_dealloc(address: ISize): Nil\n\n extern def record=\\\"record_i32\\\"(i: I32): Nil\n\n\n\n @[Public]\n", "file_path": "tests/compiler/borrow.rs", "rank": 95, "score": 57447.17594840565 }, { "content": "pub trait Node: Downcast {\n\n fn print(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n Ok(())\n\n }\n\n\n\n fn visit(&self, visitor: &mut dyn Visitor, b: &NodeBox) -> VisitorResult;\n\n}\n\nimpl_downcast!(Node);\n\n\n\nimpl std::fmt::Display for dyn Node {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.print(f)\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for dyn Node {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n self.print(f)\n\n }\n\n}\n", "file_path": "src/ast/mod.rs", "rank": 96, "score": 56630.55578516127 }, { "content": "#[test]\n\nfn if_expr_unify_false_branch() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 97, "score": 55997.85826223293 }, { "content": "#[test]\n\nfn if_expr_true_branch_return() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 98, "score": 55997.85826223293 }, { "content": "#[test]\n\nfn if_expr_unify_true_branch() {\n\n let program = utils::parse_to_ssa(\n\n \"\n", "file_path": "tests/compiler/if.rs", "rank": 99, "score": 55997.85826223293 } ]
Rust
contracts/pylon/gov/src/executions/mod.rs
kyscott18/interest_split
e24541f6ecfb228ab1397a1f058bb45149c413a9
use cosmwasm_std::{ from_binary, to_binary, CanonicalAddr, CosmosMsg, Decimal, DepsMut, Env, MessageInfo, Order, Response, Uint128, WasmMsg, }; use cosmwasm_storage::{ReadonlyBucket, ReadonlySingleton}; use cw20::Cw20ReceiveMsg; use pylon_token::gov_msg::{ AirdropMsg, Cw20HookMsg, ExecuteMsg, InstantiateMsg, MigrateMsg, StakingMsg, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::error::ContractError; use crate::state::config::Config; use crate::state::poll::{ExecuteData, Poll, PollCategory, PollStatus}; use crate::state::state::State; pub type ExecuteResult = Result<Response, ContractError>; pub mod airdrop; pub mod poll; pub mod staking; pub fn instantiate( deps: DepsMut, _env: Env, info: MessageInfo, msg: InstantiateMsg, ) -> ExecuteResult { let response = Response::default().add_attribute("action", "instantiate"); let config = Config { pylon_token: deps.api.addr_canonicalize(msg.voting_token.as_str())?, owner: deps.api.addr_canonicalize(info.sender.as_str())?, quorum: msg.quorum, threshold: msg.threshold, voting_period: msg.voting_period, timelock_period: msg.timelock_period, expiration_period: 0u64, proposal_deposit: msg.proposal_deposit, snapshot_period: msg.snapshot_period, }; config.validate()?; let state = State { poll_count: 0, total_share: Uint128::zero(), total_deposit: Uint128::zero(), total_airdrop_count: 0, airdrop_update_candidates: vec![], }; Config::save(deps.storage, &config)?; State::save(deps.storage, &state)?; Ok(response) } pub fn receive( deps: DepsMut, env: Env, info: MessageInfo, cw20_msg: Cw20ReceiveMsg, ) -> ExecuteResult { let config = Config::load(deps.storage)?; if config.pylon_token != deps.api.addr_canonicalize(info.sender.as_str())? { return Err(ContractError::Unauthorized {}); } match from_binary(&cw20_msg.msg) { Ok(Cw20HookMsg::Stake {}) => Ok(Response::new() .add_message(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::Airdrop(AirdropMsg::Update { target: Some(cw20_msg.sender.to_string()), }))?, funds: vec![], })) .add_message(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::Staking(StakingMsg::StakeInternal { sender: cw20_msg.sender.to_string(), amount: cw20_msg.amount, }))?, funds: vec![], }))), Ok(Cw20HookMsg::CreatePoll { title, category, description, link, execute_msgs, }) => poll::create( deps, env, cw20_msg.sender, cw20_msg.amount, title, category.into(), description, link, execute_msgs, ), _ => Err(ContractError::DataShouldBeGiven {}), } } #[allow(clippy::too_many_arguments)] pub fn update_config( deps: DepsMut, info: MessageInfo, owner: Option<String>, quorum: Option<Decimal>, threshold: Option<Decimal>, voting_period: Option<u64>, timelock_period: Option<u64>, proposal_deposit: Option<Uint128>, snapshot_period: Option<u64>, ) -> ExecuteResult { let response = Response::new().add_attribute("action", "update_config"); let api = deps.api; let mut config = Config::load(deps.storage)?; if config.owner != api.addr_canonicalize(info.sender.as_str())? { return Err(ContractError::Unauthorized {}); } if let Some(owner) = owner { config.owner = api.addr_canonicalize(&owner)?; } if let Some(quorum) = quorum { config.quorum = quorum; } if let Some(threshold) = threshold { config.threshold = threshold; } if let Some(voting_period) = voting_period { config.voting_period = voting_period; } if let Some(timelock_period) = timelock_period { config.timelock_period = timelock_period; } if let Some(proposal_deposit) = proposal_deposit { config.proposal_deposit = proposal_deposit; } if let Some(period) = snapshot_period { config.snapshot_period = period; } Config::save(deps.storage, &config)?; Ok(response) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct LegacyState { pub poll_count: u64, pub total_share: Uint128, pub total_deposit: Uint128, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct LegacyPoll { pub id: u64, pub creator: CanonicalAddr, pub status: PollStatus, pub yes_votes: Uint128, pub no_votes: Uint128, pub end_height: u64, pub title: String, pub description: String, pub link: Option<String>, pub execute_data: Option<Vec<ExecuteData>>, pub deposit_amount: Uint128, pub total_balance_at_end_poll: Option<Uint128>, pub staked_amount: Option<Uint128>, } pub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> ExecuteResult { match msg { MigrateMsg::State {} => { let state: LegacyState = ReadonlySingleton::new(deps.storage, b"state") .load() .unwrap(); State::save( deps.storage, &State { poll_count: state.poll_count, total_share: state.total_share, total_deposit: state.total_deposit, total_airdrop_count: 0, airdrop_update_candidates: vec![], }, ) .unwrap(); let legacy_poll_store: ReadonlyBucket<LegacyPoll> = ReadonlyBucket::new(deps.storage, b"poll"); let legacy_polls: Vec<LegacyPoll> = legacy_poll_store .range(None, None, Order::Descending) .take(100) .map(|item| -> LegacyPoll { let (_, v) = item.unwrap(); v }) .collect(); for poll in legacy_polls.iter() { Poll::save( deps.storage, &poll.id, &Poll { id: poll.id, creator: poll.creator.clone(), status: poll.status.clone(), yes_votes: poll.yes_votes, no_votes: poll.no_votes, end_height: poll.end_height, title: poll.title.clone(), category: PollCategory::None, description: poll.description.clone(), link: poll.link.clone(), execute_data: poll.execute_data.clone(), deposit_amount: poll.deposit_amount, total_balance_at_end_poll: poll.total_balance_at_end_poll, staked_amount: poll.staked_amount, }, )?; } } MigrateMsg::General {} => {} } Ok(Response::default()) }
use cosmwasm_std::{ from_binary, to_binary, CanonicalAddr, CosmosMsg, Decimal, DepsMut, Env, MessageInfo, Order, Response, Uint128, WasmMsg, }; use cosmwasm_storage::{ReadonlyBucket, ReadonlySingleton}; use cw20::Cw20ReceiveMsg; use pylon_token::gov_msg::{ AirdropMsg, Cw20HookMsg, ExecuteMsg, InstantiateMsg, MigrateMsg, StakingMsg, }; use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use crate::error::ContractError; use crate::state::config::Config; use crate::state::poll::{ExecuteData, Poll, PollCategory, PollStatus}; use crate::state::state::State; pub type ExecuteResult = Result<Response, ContractError>; pub mod airdrop; pub mod poll; pub mod staking; pub fn instantiate( deps: DepsMut, _env: Env, info: MessageInfo, msg: InstantiateMsg, ) -> ExecuteResult { let response = Response::default().add_attribute("action", "instantiate"); let config = Config { pylon_token: deps.api.addr_canonicalize(msg.voting_token.as_str())?, owner: deps.api.addr_canonicalize(info.sender.as_str())?, quorum: msg.quorum, threshold: msg.threshold, voting_period: msg.voting_period, timelock_period: msg.timelock_period, expiration_period: 0u64, proposal_deposit: msg.proposal_deposit, snapshot_period: msg.snapshot_period, }; config.validate()?; let state = State { poll_count: 0, total_share: Uint128::zero(), total_deposit: Uint128::zero(), total_airdrop_count: 0, airdrop_update_candidates: vec![], }; Config::save(deps.storage, &config)?; State::save(deps.storage, &state)?; Ok(response) } pub fn receive( deps: DepsMut, env: Env, info: MessageInfo, cw20_msg: Cw20ReceiveMsg, ) -> ExecuteResult { let config = Config::load(deps.storage)?; if config.pylon_token != deps.api.addr_canonicalize(info.sender.as_str())? { return Err(ContractError::Unauthorized {}); } match from_binary(&cw20_msg.msg) { Ok(Cw20HookMsg::Stake {}) => Ok(Response::new() .add_message(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::Airdrop(AirdropMsg::Update { target: Some(cw20_msg.sender.to_string()), }))?, funds: vec![], })) .add_message(CosmosMsg::Wasm(WasmMsg::Execute { contract_addr: env.contract.address.to_string(), msg: to_binary(&ExecuteMsg::Staking(StakingMsg::StakeInternal { sender: cw20_msg.sender.to_string(), amount: cw20_msg.amount, }))?, funds: vec![], }))), Ok(Cw20HookMsg::CreatePoll { title, category, description, link, execute_msgs, }) => poll::create( deps, env, cw20_msg.sender, cw20_msg.amount, title, category.into(), description, link, execute_msgs, ), _ => Err(ContractError::DataShouldBeGiven {}), } } #[allow(clippy::too_many_arguments)] pub fn update_config( deps: DepsMut, info: MessageInfo, owner: Option<String>, quorum: Option<Decimal>, threshold: Option<Decimal>, voting_period: Option<u64>, timelock_period: Option<u64>, proposal_deposit: Option<Uint128>, snapshot_period: Option<u64>, ) -> ExecuteResult { let response = Response::new().add_attribute("action", "update_config"); let api = deps.api; let mut config = Config::load(deps.storage)?; if config.owner != api.addr_canonicalize(info.sender.as_str())? { return Err(ContractError::Unauthorized {}); } if let Some(owner) = owner { config.owner = api.addr_canonicalize(&owner)?; } if let Some(quorum) = quorum { config.quorum = quorum; } if let Some(threshold) = threshold { config.threshold = threshold; } if let Some(voting_period) = voting_period { config.voting_period = voting_period; } if let Some(timelock_period) = timelock_period { config.timelock_period = timelock_period; } if let Some(proposal_deposit) = proposal_deposit { config.proposal_deposit = proposal_deposit; } if let Some(period) = snapshot_period { config.snapshot_period = period; } Config::save(deps.storage, &config)?; Ok(response) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct LegacyState { pub poll_count: u64, pub total_share: Uint128, pub total_deposit: Uint128, } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct LegacyPoll { pub id: u64, pub creator: CanonicalAddr, pub status: PollStatus, pub yes_votes: Uint128, pub no_votes: Uint128, pub end_height: u64, pub title: String, pub description: String, pub link: Option<String>, pub execute_data: Option<Vec<ExecuteData>>, pub deposit_amount: Uint128, pub total_balance_at_end_poll: Option<Uint128>, pub staked_amount: Option<Uint128>, }
pub fn migrate(deps: DepsMut, _env: Env, msg: MigrateMsg) -> ExecuteResult { match msg { MigrateMsg::State {} => { let state: LegacyState = ReadonlySingleton::new(deps.storage, b"state") .load() .unwrap(); State::save( deps.storage, &State { poll_count: state.poll_count, total_share: state.total_share, total_deposit: state.total_deposit, total_airdrop_count: 0, airdrop_update_candidates: vec![], }, ) .unwrap(); let legacy_poll_store: ReadonlyBucket<LegacyPoll> = ReadonlyBucket::new(deps.storage, b"poll"); let legacy_polls: Vec<LegacyPoll> = legacy_poll_store .range(None, None, Order::Descending) .take(100) .map(|item| -> LegacyPoll { let (_, v) = item.unwrap(); v }) .collect(); for poll in legacy_polls.iter() { Poll::save( deps.storage, &poll.id, &Poll { id: poll.id, creator: poll.creator.clone(), status: poll.status.clone(), yes_votes: poll.yes_votes, no_votes: poll.no_votes, end_height: poll.end_height, title: poll.title.clone(), category: PollCategory::None, description: poll.description.clone(), link: poll.link.clone(), execute_data: poll.execute_data.clone(), deposit_amount: poll.deposit_amount, total_balance_at_end_poll: poll.total_balance_at_end_poll, staked_amount: poll.staked_amount, }, )?; } } MigrateMsg::General {} => {} } Ok(Response::default()) }
function_block-full_function
[ { "content": "pub fn claim(deps: DepsMut, env: Env, info: MessageInfo, sender: Option<String>) -> ExecuteResult {\n\n let sender = sender\n\n .map(|x| deps.api.addr_validate(x.as_str()).unwrap())\n\n .unwrap_or(info.sender);\n\n\n\n let state = State::load(deps.storage).unwrap();\n\n let token_manager =\n\n TokenManager::load(deps.storage, &deps.api.addr_canonicalize(sender.as_str())?)?;\n\n\n\n let airdrop_rewards =\n\n AirdropReward::load_range(deps.storage, &sender, None, Some(MAX_QUERY_LIMIT), None)?;\n\n\n\n let response = Response::new().add_message(CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: env.contract.address.to_string(),\n\n msg: to_binary(&ExecuteMsg::Airdrop(AirdropMsg::Update {\n\n target: Some(sender.to_string()),\n\n }))?,\n\n funds: vec![],\n\n }));\n\n\n", "file_path": "contracts/pylon/gov/src/executions/airdrop.rs", "rank": 0, "score": 665639.3745318111 }, { "content": "pub fn execute_messages(deps: DepsMut, env: Env, info: MessageInfo, poll_id: u64) -> ExecuteResult {\n\n let response = Response::new().add_attribute(\"action\", \"execute_poll\");\n\n\n\n if env.contract.address != info.sender {\n\n return Err(ContractError::Unauthorized {});\n\n }\n\n\n\n let mut poll = Poll::load(deps.storage, &poll_id)?;\n\n\n\n poll.status = PollStatus::Executed;\n\n\n\n Poll::deindex_status(deps.storage, &poll_id, &PollStatus::Passed);\n\n Poll::index_status(deps.storage, &poll_id, &PollStatus::Executed)?;\n\n Poll::save(deps.storage, &poll_id, &poll)?;\n\n\n\n let mut messages: Vec<CosmosMsg> = vec![];\n\n if let Some(all_msgs) = poll.execute_data {\n\n let mut msgs = all_msgs;\n\n msgs.sort();\n\n for msg in msgs {\n", "file_path": "contracts/pylon/gov/src/executions/poll.rs", "rank": 1, "score": 648087.5979261093 }, { "content": "pub fn unbond(deps: DepsMut, env: Env, info: MessageInfo, amount: Uint128) -> StdResult<Response> {\n\n let config: Config = read_config(deps.storage)?;\n\n let sender_addr_raw: CanonicalAddr = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n\n\n let mut state: State = read_state(deps.storage)?;\n\n let mut staker_info: StakerInfo = read_staker_info(deps.storage, &sender_addr_raw)?;\n\n\n\n if staker_info.bond_amount < amount {\n\n return Err(StdError::generic_err(\"Cannot unbond more than bond amount\"));\n\n }\n\n\n\n // Compute global reward & staker reward\n\n compute_reward(&config, &mut state, env.block.height);\n\n compute_staker_reward(&state, &mut staker_info)?;\n\n\n\n // Decrease bond_amount\n\n decrease_bond_amount(&mut state, &mut staker_info, amount)?;\n\n\n\n // Store or remove updated rewards info\n\n // depends on the left pending reward and bond amount\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 2, "score": 638843.5849903917 }, { "content": "pub fn exec(deps: &mut MockDeps, env: Env, info: MessageInfo, poll_id: u64) -> ExecuteResult {\n\n snapshot(deps.as_mut(), env, info, poll_id)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_snapshot.rs", "rank": 3, "score": 633980.657391618 }, { "content": "#[allow(dead_code)]\n\npub fn exec(deps: &mut MockDeps, _env: Env, _info: MessageInfo, poll_id: u64) -> ExecuteResult {\n\n fail(deps.as_mut(), poll_id)\n\n}\n\n\n\n// use crate::entrypoints;\n\n// use cosmwasm_std::testing::{mock_env, mock_info, MOCK_CONTRACT_ADDR};\n\n// use cosmwasm_std::{\n\n// attr, coins, from_binary, to_binary, ContractResult, CosmosMsg, Reply, SubMsg, Uint128, WasmMsg,\n\n// };\n\n// use cw20::{Cw20ExecuteMsg, Cw20ReceiveMsg};\n\n// use pylon_token::common::OrderBy;\n\n// use pylon_token::gov_msg::{\n\n// Cw20HookMsg, ExecuteMsg, PollExecuteMsg, PollMsg, PollStatus, QueryMsg, VoteOption,\n\n// };\n\n// use pylon_token::gov_resp::{PollResponse, PollsResponse};\n\n//\n\n// use crate::error::ContractError;\n\n// use crate::testing::assert::{assert_create_poll_result, assert_stake_tokens_result};\n\n// use crate::testing::constants::*;\n\n// use crate::testing::message::create_poll_msg;\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_fail.rs", "rank": 4, "score": 623337.8950607749 }, { "content": "/// SnapshotPoll is used to take a snapshot of the staked amount for quorum calculation\n\npub fn snapshot(deps: DepsMut, env: Env, _info: MessageInfo, poll_id: u64) -> ExecuteResult {\n\n let response = Response::new().add_attribute(\"action\", \"snapshot_poll\");\n\n\n\n let config = Config::load(deps.storage)?;\n\n let state = State::load(deps.storage)?;\n\n let staked_amount = query_token_balance(\n\n &deps.querier,\n\n deps.api.addr_humanize(&config.pylon_token)?,\n\n env.contract.address,\n\n )?\n\n .checked_sub(state.total_deposit)?;\n\n\n\n let mut poll = Poll::load(deps.storage, &poll_id)?;\n\n if poll.status != PollStatus::InProgress {\n\n return Err(ContractError::PollNotInProgress {});\n\n }\n\n\n\n if poll.staked_amount.is_some() {\n\n return Err(ContractError::SnapshotAlreadyOccurred {});\n\n }\n", "file_path": "contracts/pylon/gov/src/executions/poll.rs", "rank": 5, "score": 619955.2741134202 }, { "content": "pub fn exec(deps: &mut MockDeps, _env: Env, info: MessageInfo, msg: Message) -> ExecuteResult {\n\n update_config(\n\n deps.as_mut(),\n\n info,\n\n msg.owner,\n\n msg.quorum,\n\n msg.threshold,\n\n msg.voting_period,\n\n msg.timelock_period,\n\n msg.proposal_deposit,\n\n msg.snapshot_period,\n\n )\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/update_config.rs", "rank": 6, "score": 607387.8074645757 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Receive(msg) => receive_cw20(deps, env, info, msg),\n\n ExecuteMsg::Unbond { amount } => unbond(deps, env, info, amount),\n\n ExecuteMsg::Withdraw {} => withdraw(deps, env, info),\n\n ExecuteMsg::MigrateStaking {\n\n new_staking_contract,\n\n } => migrate_staking(deps, env, info, new_staking_contract),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 7, "score": 605167.6705066136 }, { "content": "#[allow(dead_code)]\n\npub fn exec(deps: &mut MockDeps, env: Env, _info: MessageInfo, poll_id: u64) -> ExecuteResult {\n\n execute(deps.as_mut(), env, poll_id)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_execute.rs", "rank": 8, "score": 602830.340509129 }, { "content": "pub fn exec(deps: &mut MockDeps, env: Env, _info: MessageInfo, poll_id: u64) -> ExecuteResult {\n\n end(deps.as_mut(), env, poll_id)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_end.rs", "rank": 9, "score": 602830.340509129 }, { "content": "pub fn default(deps: &mut MockDeps, end_height: u64, poll_id: u64) -> (Env, MessageInfo, Response) {\n\n let env = mock_env_height(end_height, 0);\n\n let info = mock_info(TEST_CREATOR, &[]);\n\n\n\n let response = exec(deps, env.clone(), info.clone(), poll_id).unwrap();\n\n\n\n (env, info, response)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_end.rs", "rank": 10, "score": 591573.2866773884 }, { "content": "pub fn bond(deps: DepsMut, env: Env, sender_addr: Addr, amount: Uint128) -> StdResult<Response> {\n\n let sender_addr_raw: CanonicalAddr = deps.api.addr_canonicalize(sender_addr.as_str())?;\n\n\n\n let config: Config = read_config(deps.storage)?;\n\n let mut state: State = read_state(deps.storage)?;\n\n let mut staker_info: StakerInfo = read_staker_info(deps.storage, &sender_addr_raw)?;\n\n\n\n // Compute global reward & staker reward\n\n compute_reward(&config, &mut state, env.block.height);\n\n compute_staker_reward(&state, &mut staker_info)?;\n\n\n\n // Increase bond_amount\n\n increase_bond_amount(&mut state, &mut staker_info, amount);\n\n\n\n // Store updated state with staker's staker_info\n\n store_staker_info(deps.storage, &sender_addr_raw, &staker_info)?;\n\n store_state(deps.storage, &state)?;\n\n\n\n Ok(Response::new().add_attributes(vec![\n\n (\"action\", \"bond\"),\n\n (\"owner\", sender_addr.as_str()),\n\n (\"amount\", amount.to_string().as_str()),\n\n ]))\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 12, "score": 565462.8628373243 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::UpdateConfig { reward_factor } => update_config(deps, info, reward_factor),\n\n ExecuteMsg::Sweep { denom } => sweep(deps, env, denom),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/collector/src/contract.rs", "rank": 13, "score": 565142.2896750415 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(deps: DepsMut, env: Env, info: MessageInfo, msg: ExecuteMsg) -> StdResult<Response> {\n\n match msg {\n\n ExecuteMsg::Claim {} => claim(deps, env, info),\n\n _ => {\n\n assert_owner_privilege(deps.storage, deps.api, info.sender)?;\n\n match msg {\n\n ExecuteMsg::UpdateConfig {\n\n owner,\n\n pylon_token,\n\n genesis_time,\n\n } => update_config(deps, owner, pylon_token, genesis_time),\n\n ExecuteMsg::RegisterVestingAccounts { vesting_accounts } => {\n\n register_vesting_accounts(deps, vesting_accounts)\n\n }\n\n _ => panic!(\"DO NOT ENTER HERE\"),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/vesting/src/contract.rs", "rank": 14, "score": 565142.2896750415 }, { "content": "pub fn end(deps: DepsMut, env: Env, poll_id: u64) -> ExecuteResult {\n\n let response = Response::new().add_attribute(\"action\", \"end_poll\");\n\n\n\n let mut poll = Poll::load(deps.storage, &poll_id)?;\n\n if poll.status != PollStatus::InProgress {\n\n return Err(ContractError::PollNotInProgress {});\n\n }\n\n\n\n if poll.end_height > env.block.height {\n\n return Err(ContractError::PollVotingPeriod {});\n\n }\n\n\n\n let no = poll.no_votes.u128();\n\n let yes = poll.yes_votes.u128();\n\n let tallied_weight = yes + no;\n\n\n\n let mut poll_status = PollStatus::Rejected;\n\n let mut rejected_reason = \"\";\n\n let mut passed = false;\n\n\n", "file_path": "contracts/pylon/gov/src/executions/poll.rs", "rank": 15, "score": 557582.6781753842 }, { "content": "pub fn execute(deps: DepsMut, env: Env, poll_id: u64) -> ExecuteResult {\n\n let config = Config::load(deps.storage)?;\n\n let poll = Poll::load(deps.storage, &poll_id)?;\n\n\n\n if poll.status != PollStatus::Passed {\n\n return Err(ContractError::PollNotPassed {});\n\n }\n\n\n\n if poll.end_height + config.timelock_period > env.block.height {\n\n return Err(ContractError::TimelockNotExpired {});\n\n }\n\n\n\n Poll::save_temp_id(deps.storage, &poll.id)?;\n\n\n\n Ok(Response::new().add_submessage(SubMsg::reply_on_error(\n\n CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: env.contract.address.to_string(),\n\n msg: to_binary(&ExecuteMsg::Poll(PollMsg::ExecuteMsgs { poll_id }))?,\n\n funds: vec![],\n\n }),\n\n POLL_EXECUTE_REPLY_ID,\n\n )))\n\n}\n\n\n\n/*\n\n * Execute a msgs of a poll\n\n */\n", "file_path": "contracts/pylon/gov/src/executions/poll.rs", "rank": 16, "score": 557582.6781753842 }, { "content": "pub fn exec(deps: &mut MockDeps, env: Env, _info: MessageInfo, msg: Message) -> ExecuteResult {\n\n create(\n\n deps.as_mut(),\n\n env,\n\n msg.proposer,\n\n msg.deposit,\n\n msg.title,\n\n msg.category,\n\n msg.description,\n\n msg.link,\n\n msg.execute_msg,\n\n )\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_create.rs", "rank": 17, "score": 555370.5514988573 }, { "content": "pub fn default(deps: &mut MockDeps, token: &str, amount: u128) -> (Env, MessageInfo, Response) {\n\n let env = mock_env();\n\n let info = mock_info(TEST_CREATOR, &[]);\n\n\n\n let response = exec(\n\n deps,\n\n env.clone(),\n\n info.clone(),\n\n env.block.time.seconds(),\n\n 86400,\n\n token.to_string(),\n\n Uint128::from(amount),\n\n )\n\n .unwrap();\n\n\n\n (env, info, response)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/airdrop_instantiate.rs", "rank": 18, "score": 534820.8636583718 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/pylon/airdrop/src/contract.rs", "rank": 19, "score": 523627.6646923208 }, { "content": "pub fn deposit_amount(deps: Deps, _env: Env, owner: String) -> StdResult<Binary> {\n\n let config: config::Config = config::read(deps.storage).unwrap();\n\n\n\n to_binary(&resp::DepositAmountResponse {\n\n amount: token::balance_of(\n\n deps,\n\n deps.api\n\n .addr_humanize(&config.dp_token)\n\n .unwrap()\n\n .to_string(),\n\n owner,\n\n )?,\n\n })\n\n}\n\n\n", "file_path": "contracts/core/pool/src/handler/query.rs", "rank": 20, "score": 523232.42908901663 }, { "content": "pub fn deposit_amount(deps: Deps, _env: Env, owner: String) -> StdResult<Binary> {\n\n let config = config::read(deps.storage)?;\n\n\n\n to_binary(&resp::DepositAmountResponse {\n\n amount: token::balance_of(deps, config.dp_token, owner)?,\n\n })\n\n}\n\n\n", "file_path": "contracts/core/pool-v2/src/handler/query.rs", "rank": 21, "score": 519910.85951998376 }, { "content": "pub fn deposit(deps: DepsMut, _env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n let config = config::read(deps.storage).unwrap();\n\n\n\n // check deposit\n\n let received: Uint256 = info\n\n .funds\n\n .iter()\n\n .find(|c| c.denom == config.stable_denom)\n\n .map(|c| Uint256::from(c.amount))\n\n .unwrap_or_else(Uint256::zero);\n\n\n\n if received.is_zero() {\n\n return Err(ContractError::NotAllowZeroAmount {});\n\n }\n\n if info.funds.len() > 1 {\n\n return Err(ContractError::NotAllowOtherDenoms {\n\n denom: config.stable_denom,\n\n });\n\n }\n\n\n", "file_path": "contracts/core/pool/src/handler/core.rs", "rank": 22, "score": 512239.96690246946 }, { "content": "fn increase_bond_amount(state: &mut State, staker_info: &mut StakerInfo, amount: Uint128) {\n\n state.total_bond_amount += amount;\n\n staker_info.bond_amount += amount;\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 23, "score": 511532.1693884275 }, { "content": "// withdraw rewards to executor\n\npub fn withdraw(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let sender_addr_raw = deps.api.addr_canonicalize(info.sender.as_str())?;\n\n\n\n let config: Config = read_config(deps.storage)?;\n\n let mut state: State = read_state(deps.storage)?;\n\n let mut staker_info = read_staker_info(deps.storage, &sender_addr_raw)?;\n\n\n\n // Compute global reward & staker reward\n\n compute_reward(&config, &mut state, env.block.height);\n\n compute_staker_reward(&state, &mut staker_info)?;\n\n\n\n let amount = staker_info.pending_reward;\n\n staker_info.pending_reward = Uint128::zero();\n\n\n\n // Store or remove updated rewards info\n\n // depends on the left pending reward and bond amount\n\n if staker_info.bond_amount.is_zero() {\n\n remove_staker_info(deps.storage, &sender_addr_raw);\n\n } else {\n\n store_staker_info(deps.storage, &sender_addr_raw, &staker_info)?;\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 24, "score": 510669.2325496596 }, { "content": "pub fn deposit(deps: DepsMut, _env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n let config = config::read(deps.storage).unwrap();\n\n\n\n // check deposit\n\n let received: Uint256 = info\n\n .funds\n\n .iter()\n\n .find(|c| c.denom == config.input_denom)\n\n .map(|c| Uint256::from(c.amount))\n\n .unwrap_or_else(Uint256::zero);\n\n\n\n if received.is_zero() {\n\n return Err(ContractError::NotAllowZeroAmount {});\n\n }\n\n if info.funds.len() > 1 {\n\n return Err(ContractError::NotAllowOtherDenoms {\n\n denom: config.input_denom,\n\n });\n\n }\n\n\n", "file_path": "contracts/core/pool-v2/src/handler/core.rs", "rank": 25, "score": 509641.4737335164 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 26, "score": 500034.5485929446 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, env: Env, msg: MigrateMsg) -> Result<Response, ContractError> {\n\n executions::migrate(deps, env, msg)\n\n}\n", "file_path": "contracts/pylon/gov/src/entrypoints.rs", "rank": 27, "score": 493107.4836308657 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(deps: DepsMut, env: Env, msg: MigrateMsg) -> Result<Response, ContractError> {\n\n match msg {\n\n MigrateMsg::Refund {} => MigrateHandler::refund(deps, env),\n\n MigrateMsg::General {} => Ok(Response::default()),\n\n }\n\n}\n", "file_path": "contracts/gateway/swap/src/contract.rs", "rank": 28, "score": 493107.4836308657 }, { "content": "pub fn earn(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n // calculate deduct(total_aust_amount * exchange_rate) - (total_dp_balance)\n\n let config = config::read(deps.storage).unwrap();\n\n if config.beneficiary != deps.api.addr_canonicalize(info.sender.as_str()).unwrap() {\n\n return Err(ContractError::Unauthorized {\n\n action: \"earn\".to_string(),\n\n expected: deps\n\n .api\n\n .addr_humanize(&config.beneficiary)\n\n .unwrap()\n\n .to_string(),\n\n actual: info.sender.to_string(),\n\n });\n\n }\n\n\n\n // assets\n\n let epoch_state = anchor::epoch_state(deps.as_ref(), &config.moneymarket)?;\n\n let atoken_balance = token::balance_of(\n\n deps.as_ref(),\n\n deps.api.addr_humanize(&config.atoken).unwrap().to_string(),\n", "file_path": "contracts/core/pool/src/handler/core.rs", "rank": 29, "score": 490752.2827897478 }, { "content": "pub fn claim(_deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n Ok(Response::new()\n\n .add_message(CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: env.contract.address.to_string(),\n\n msg: to_binary(&ExecuteMsg::Update {\n\n target: Option::Some(info.sender.to_string()),\n\n })?,\n\n funds: vec![],\n\n }))\n\n .add_message(CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: env.contract.address.to_string(),\n\n msg: to_binary(&ExecuteMsg::ClaimInternal {\n\n sender: info.sender.to_string(),\n\n })?,\n\n\n\n funds: vec![],\n\n })))\n\n}\n", "file_path": "contracts/gateway/pool/src/handler/router.rs", "rank": 30, "score": 490752.2827897478 }, { "content": "pub fn deposit(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n let config = config::read(deps.storage).load()?;\n\n let state = state::read(deps.storage).load()?;\n\n\n\n if env.block.time.seconds() < config.start {\n\n return Err(ContractError::SwapNotStarted {\n\n start: config.start,\n\n });\n\n }\n\n if config.finish < env.block.time.seconds() {\n\n return Err(ContractError::SwapFinished {\n\n finish: config.finish,\n\n });\n\n }\n\n\n\n // 1:1\n\n let swapped_in: Uint256 = info\n\n .funds\n\n .iter()\n\n .find(|c| c.denom == state.x_denom)\n", "file_path": "contracts/gateway/swap/src/handler/execute.rs", "rank": 31, "score": 490752.2827897478 }, { "content": "pub fn earn(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n let state = state::read(deps.storage).load().unwrap();\n\n let config = config::read(deps.storage).load().unwrap();\n\n if config.beneficiary != info.sender {\n\n return Err(ContractError::Unauthorized {\n\n action: \"earn\".to_string(),\n\n expected: config.beneficiary,\n\n actual: info.sender.to_string(),\n\n });\n\n }\n\n\n\n if env.block.time.seconds() < config.finish + EARN_LOCK_PERIOD {\n\n return Err(ContractError::NotAllowEarnBeforeLockPeriod {});\n\n }\n\n\n\n Ok(Response::new()\n\n .add_message(CosmosMsg::Bank(BankMsg::Send {\n\n to_address: config.beneficiary,\n\n amount: vec![deduct_tax(\n\n deps.as_ref(),\n\n deps.querier\n\n .query_balance(env.contract.address, state.x_denom)\n\n .unwrap(),\n\n )?],\n\n }))\n\n .add_attribute(\"action\", \"earn\")\n\n .add_attribute(\"sender\", info.sender.to_string()))\n\n}\n", "file_path": "contracts/gateway/swap/src/handler/execute.rs", "rank": 32, "score": 490752.2827897478 }, { "content": "pub fn claim(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n let sender = &deps.api.addr_canonicalize(info.sender.as_str()).unwrap();\n\n let mut user = user::read(deps.storage, sender).unwrap();\n\n let mut state = state::read(deps.storage).load().unwrap();\n\n\n\n let claimable_token = strategy::claimable_token_of(\n\n deps.as_ref(),\n\n env.block.time.seconds(),\n\n info.sender.to_string(),\n\n )?;\n\n\n\n user.swapped_out_claimed += claimable_token;\n\n state.total_claimed += claimable_token;\n\n\n\n user::store(deps.storage, sender, &user)?;\n\n state::store(deps.storage).save(&state)?;\n\n\n\n Ok(Response::new()\n\n .add_message(CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: state.y_addr,\n", "file_path": "contracts/gateway/swap/src/handler/execute.rs", "rank": 33, "score": 490752.2827897478 }, { "content": "pub fn earn(deps: DepsMut, env: Env, info: MessageInfo) -> Result<Response, ContractError> {\n\n // calculate deduct(total_aust_amount * exchange_rate) - (total_dp_balance)\n\n let config = config::read(deps.storage).unwrap();\n\n if config.beneficiary.ne(&info.sender.to_string()) {\n\n return Err(ContractError::Unauthorized {\n\n action: \"earn\".to_string(),\n\n expected: config.beneficiary,\n\n actual: info.sender.to_string(),\n\n });\n\n }\n\n\n\n let adapter_config = adapter::config(deps.as_ref(), config.yield_adapter.clone())?;\n\n let factory_config = factory::config(deps.as_ref(), config.factory.clone())?;\n\n let reward = pool::claimable_rewards(deps.as_ref(), env)?;\n\n let exchange_rate = adapter::exchange_rate(\n\n deps.as_ref(),\n\n config.yield_adapter.clone(),\n\n config.input_denom.clone(),\n\n )?;\n\n\n", "file_path": "contracts/core/pool-v2/src/handler/core.rs", "rank": 34, "score": 488153.7896207947 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/gateway/pool/src/contract.rs", "rank": 35, "score": 482659.85066333995 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/core/pool/src/contract.rs", "rank": 36, "score": 482659.85066333995 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/core/factory/src/contract.rs", "rank": 37, "score": 482659.85066333995 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/gateway/factory/src/contract.rs", "rank": 38, "score": 482659.85066333995 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/mocks/market/src/contract.rs", "rank": 39, "score": 482659.8506633399 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, ContractError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/core/pool-v2/src/contract.rs", "rank": 40, "score": 480010.60223360965 }, { "content": "pub fn fail(deps: DepsMut, poll_id: u64) -> ExecuteResult {\n\n let response = Response::new().add_attribute(\"action\", \"fail_poll\");\n\n\n\n let mut poll = Poll::load(deps.storage, &poll_id)?;\n\n\n\n poll.status = PollStatus::Failed;\n\n\n\n Poll::deindex_status(deps.storage, &poll_id, &PollStatus::Passed);\n\n Poll::index_status(deps.storage, &poll_id, &PollStatus::Failed)?;\n\n Poll::save(deps.storage, &poll_id, &poll)?;\n\n\n\n Ok(response.add_attributes(vec![(\"poll_id\", poll_id.to_string().as_str())]))\n\n}\n\n\n\n/*\n\n * Ends a poll.\n\n */\n", "file_path": "contracts/pylon/gov/src/executions/poll.rs", "rank": 41, "score": 478338.2308256678 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn reply(deps: DepsMut, _env: Env, msg: Reply) -> Result<Response, ContractError> {\n\n match msg.id {\n\n POLL_EXECUTE_REPLY_ID => {\n\n let poll_id: u64 = Poll::load_temp_id(deps.storage)?;\n\n executions::poll::fail(deps, poll_id)\n\n }\n\n _ => Err(ContractError::InvalidReplyId {}),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/entrypoints.rs", "rank": 42, "score": 474176.71844436333 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn reply(deps: DepsMut, _env: Env, msg: Reply) -> Result<Response, ContractError> {\n\n match msg.id {\n\n REPLY_INIT_OUTPUT_TOKEN => {\n\n // output token\n\n // get new token's contract address\n\n let res: MsgInstantiateContractResponse = Message::parse_from_bytes(\n\n msg.result.unwrap().data.unwrap().as_slice(),\n\n )\n\n .map_err(|_| {\n\n ContractError::Std(StdError::parse_err(\n\n \"MsgInstantiateContractResponse\",\n\n \"failed to parse data\",\n\n ))\n\n })?;\n\n let token_addr = Addr::unchecked(res.get_contract_address());\n\n\n\n state::config_w(deps.storage).update(|mut config| {\n\n if !config.output_token.is_empty() {\n\n return Err(ContractError::Unauthorized {\n\n action: \"reply_init_output_token\".to_string(),\n", "file_path": "contracts/mocks/market/src/contract.rs", "rank": 43, "score": 474176.71844436345 }, { "content": "pub fn claim(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<Response> {\n\n let current_time = env.block.time.nanos() / 1_000_000_000;\n\n let address = info.sender;\n\n let address_raw = deps.api.addr_canonicalize(&address.to_string())?;\n\n\n\n let config: Config = read_config(deps.storage)?;\n\n let mut vesting_info: VestingInfo = read_vesting_info(deps.storage, &address_raw)?;\n\n\n\n let claim_amount = compute_claim_amount(current_time, &vesting_info);\n\n let messages: Vec<CosmosMsg> = if claim_amount.is_zero() {\n\n vec![]\n\n } else {\n\n vec![CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.addr_humanize(&config.pylon_token)?.to_string(),\n\n funds: vec![],\n\n msg: to_binary(&Cw20ExecuteMsg::Transfer {\n\n recipient: address.to_string(),\n\n amount: claim_amount,\n\n })?,\n\n })]\n", "file_path": "contracts/pylon/vesting/src/contract.rs", "rank": 44, "score": 469159.0675283016 }, { "content": "pub fn redeem(deps: Deps, adapter: String, amount: Uint256) -> StdResult<Vec<CosmosMsg>> {\n\n deps.querier\n\n .query::<Vec<CosmosMsg>>(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: adapter,\n\n msg: to_binary(&AdapterQueryMsg::Redeem { amount })?,\n\n }))\n\n}\n", "file_path": "contracts/core/pool-v2/src/querier/adapter.rs", "rank": 45, "score": 468321.0639740332 }, { "content": "pub fn deposit(deps: Deps, adapter: String, amount: Uint256) -> StdResult<Vec<CosmosMsg>> {\n\n deps.querier\n\n .query::<Vec<CosmosMsg>>(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: adapter,\n\n msg: to_binary(&AdapterQueryMsg::Deposit { amount })?,\n\n }))\n\n}\n\n\n", "file_path": "contracts/core/pool-v2/src/querier/adapter.rs", "rank": 46, "score": 468321.0639740332 }, { "content": "// compute distributed rewards and update global reward index\n\nfn compute_reward(config: &Config, state: &mut State, block_height: u64) {\n\n if state.total_bond_amount.is_zero() {\n\n state.last_distributed = block_height;\n\n return;\n\n }\n\n\n\n let mut distributed_amount: Uint128 = Uint128::zero();\n\n for s in config.distribution_schedule.iter() {\n\n if s.0 > block_height || s.1 < state.last_distributed {\n\n continue;\n\n }\n\n\n\n // min(s.1, block_height) - max(s.0, last_distributed)\n\n let passed_blocks =\n\n std::cmp::min(s.1, block_height) - std::cmp::max(s.0, state.last_distributed);\n\n\n\n let num_blocks = s.1 - s.0;\n\n let distribution_amount_per_block: Decimal = Decimal::from_ratio(s.2, num_blocks);\n\n distributed_amount += distribution_amount_per_block * Uint128::from(passed_blocks as u128);\n\n }\n\n\n\n state.last_distributed = block_height;\n\n state.global_reward_index = state.global_reward_index\n\n + Decimal::from_ratio(distributed_amount, state.total_bond_amount);\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 47, "score": 459901.23083754303 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/pylon/collector/src/contract.rs", "rank": 48, "score": 458524.38357158663 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/pylon/community/src/contract.rs", "rank": 49, "score": 458524.38357158663 }, { "content": "pub fn default(deps: &mut MockDeps) -> (Env, MessageInfo, Response) {\n\n let env = mock_env();\n\n let info = mock_info(TEST_CREATOR, &[]);\n\n\n\n let response = exec(deps, env.clone(), info.clone(), default_msg()).unwrap();\n\n\n\n (env, info, response)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/instantiate.rs", "rank": 50, "score": 458203.4701151878 }, { "content": "pub fn default(deps: &mut MockDeps) -> (Env, MessageInfo, Response) {\n\n let env = mock_env();\n\n let info = mock_info(TEST_CREATOR, &[]);\n\n\n\n let response = exec(deps, env.clone(), info.clone(), default_msg()).unwrap();\n\n\n\n (env, info, response)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/testing/executions/poll_create.rs", "rank": 51, "score": 454161.7028953115 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/gateway/strategies/cap-min/src/contract.rs", "rank": 52, "score": 453171.3095700882 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/gateway/strategies/cap-fixed/src/contract.rs", "rank": 53, "score": 453171.3095700882 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/gateway/strategies/cap-staged/src/contract.rs", "rank": 54, "score": 453171.3095700882 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn reply(deps: DepsMut, env: Env, msg: Reply) -> Result<Response, ContractError> {\n\n match msg.id {\n\n INSTANTIATE_REPLY_ID => {\n\n // get new token's contract address\n\n let res: MsgInstantiateContractResponse = Message::parse_from_bytes(\n\n msg.result.unwrap().data.unwrap().as_slice(),\n\n )\n\n .map_err(|_| {\n\n ContractError::Std(StdError::parse_err(\n\n \"MsgInstantiateContractResponse\",\n\n \"failed to parse data\",\n\n ))\n\n })?;\n\n let token_addr = Addr::unchecked(res.get_contract_address());\n\n\n\n CoreHandler::register_dp_token(deps, env, token_addr)\n\n }\n\n _ => Err(ContractError::InvalidReplyId { id: msg.id }),\n\n }\n\n}\n\n\n", "file_path": "contracts/core/pool/src/contract.rs", "rank": 55, "score": 452689.03433164174 }, { "content": "/// remove staker_info of the given owner\n\npub fn remove_staker_info(storage: &mut dyn Storage, owner: &CanonicalAddr) {\n\n Bucket::<StakerInfo>::new(storage, PREFIX_REWARD).remove(owner.as_slice())\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/state.rs", "rank": 56, "score": 451905.705996803 }, { "content": "pub fn balance_of(deps: Deps, _env: Env, owner: String) -> StdResult<Binary> {\n\n let user = user::read(\n\n deps.storage,\n\n &deps.api.addr_canonicalize(owner.as_str()).unwrap(),\n\n )\n\n .unwrap();\n\n\n\n to_binary(&resp::BalanceOfResponse {\n\n amount: user.amount,\n\n })\n\n}\n\n\n", "file_path": "contracts/gateway/pool/src/handler/query.rs", "rank": 57, "score": 451069.98221127945 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, StdError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/mocks/adapter/src/contract.rs", "rank": 58, "score": 450670.60424200416 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn reply(deps: DepsMut, env: Env, msg: Reply) -> Result<Response, ContractError> {\n\n match msg.id {\n\n 1 => {\n\n // get new token's contract address\n\n let res: MsgInstantiateContractResponse = Message::parse_from_bytes(\n\n msg.result.unwrap().data.unwrap().as_slice(),\n\n )\n\n .map_err(|_| {\n\n ContractError::Std(StdError::parse_err(\n\n \"MsgInstantiateContractResponse\",\n\n \"failed to parse data\",\n\n ))\n\n })?;\n\n let token_addr = Addr::unchecked(res.get_contract_address());\n\n\n\n register_dp_token(deps, env, token_addr)\n\n }\n\n _ => Err(ContractError::InvalidReplyId { id: msg.id }),\n\n }\n\n}\n\n\n", "file_path": "contracts/core/pool-v2/src/contract.rs", "rank": 59, "score": 449886.6319450628 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(_deps: DepsMut, _env: Env, _msg: MigrateMsg) -> Result<Response, StdError> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/core/adapters/anchor/src/contract.rs", "rank": 60, "score": 448021.35581227386 }, { "content": "pub fn initialize(deps: &mut OwnedDeps<MockStorage, MockApi, MockQuerier>) -> (Env, MessageInfo) {\n\n let env = mock_env();\n\n let info = mock_info(TEST_OWNER, &[]);\n\n let msg = init_msg();\n\n contract::instantiate(deps.as_mut(), env.clone(), info.clone(), msg)\n\n .expect(\"testing: contract initialized\");\n\n\n\n (env, info)\n\n}\n\n\n", "file_path": "contracts/gateway/pool/src/testing/utils.rs", "rank": 61, "score": 435673.07982600527 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::State { block_height } => to_binary(&query_state(deps, block_height)?),\n\n QueryMsg::StakerInfo {\n\n staker,\n\n block_height,\n\n } => to_binary(&query_staker_info(deps, staker, block_height)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/contract.rs", "rank": 62, "score": 428399.17671445815 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n QueryMsg::MerkleRoot { stage } => to_binary(&query_merkle_root(deps, stage)?),\n\n QueryMsg::LatestStage {} => to_binary(&query_latest_stage(deps)?),\n\n QueryMsg::IsClaimed { stage, address } => {\n\n to_binary(&query_is_claimed(deps, stage, address)?)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/airdrop/src/contract.rs", "rank": 63, "score": 428362.95946773887 }, { "content": "/// Sweep\n\n/// Anyone can execute sweep function to swap\n\n/// asset token => ANC token and distribute\n\n/// result ANC token to gov contract\n\npub fn sweep(deps: DepsMut, env: Env, denom: String) -> StdResult<Response> {\n\n let config: Config = read_config(deps.storage)?;\n\n let pylon_token = deps.api.addr_humanize(&config.pylon_token)?;\n\n let terraswap_factory_addr = deps.api.addr_humanize(&config.terraswap_factory)?;\n\n\n\n let pair_info: PairInfo = query_pair_info(\n\n &deps.querier,\n\n terraswap_factory_addr,\n\n &[\n\n AssetInfo::NativeToken {\n\n denom: denom.to_string(),\n\n },\n\n AssetInfo::Token {\n\n contract_addr: pylon_token.to_string(),\n\n },\n\n ],\n\n )?;\n\n\n\n let amount = query_balance(&deps.querier, env.contract.address, denom.to_string())?;\n\n\n", "file_path": "contracts/pylon/collector/src/contract.rs", "rank": 64, "score": 427886.8632774609 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn reply(deps: DepsMut, env: Env, msg: Reply) -> StdResult<Response> {\n\n if msg.id == SWEEP_REPLY_ID {\n\n // send tokens on successful callback\n\n return distribute(deps, env);\n\n }\n\n\n\n Err(StdError::generic_err(\"not supported reply\"))\n\n}\n\n\n", "file_path": "contracts/pylon/collector/src/contract.rs", "rank": 65, "score": 427657.9700310567 }, { "content": "pub fn claimable_reward(deps: Deps, env: Env, owner: String) -> StdResult<Binary> {\n\n let config = config::read(deps.storage).unwrap();\n\n let reward = reward::read(deps.storage).unwrap();\n\n let user = user::read(\n\n deps.storage,\n\n &deps.api.addr_canonicalize(owner.as_str()).unwrap(),\n\n )\n\n .unwrap();\n\n\n\n to_binary(&resp::ClaimableRewardResponse {\n\n amount: util_staking::calculate_rewards(\n\n deps,\n\n &reward,\n\n &user,\n\n config\n\n .distribution_config\n\n .applicable_reward_time(env.block.time.seconds()),\n\n )?,\n\n })\n\n}\n\n\n", "file_path": "contracts/gateway/pool/src/handler/query.rs", "rank": 66, "score": 425656.1067830338 }, { "content": "pub fn remove(storage: &mut dyn Storage, id: u64) {\n\n let key = &id.to_be_bytes()[..];\n\n let mut pool_bucket: Bucket<Pool> = bucket(storage, PREFIX_POOL);\n\n\n\n pool_bucket.remove(key)\n\n}\n\n\n", "file_path": "contracts/core/factory/src/state/pool.rs", "rank": 67, "score": 416002.9980135161 }, { "content": "pub fn refund(deps: DepsMut, _: Env) -> Result<Response, ContractError> {\n\n let config = config::read(deps.storage).load().unwrap();\n\n let state = state::read(deps.storage).load().unwrap();\n\n\n\n Singleton::new(deps.storage, KEY_CONFIG)\n\n .save(&NewRefundConfig {\n\n manager: config.owner.clone(),\n\n swap_price: config.price,\n\n refund_denom: state.x_denom,\n\n })\n\n .unwrap();\n\n\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/gateway/swap/src/handler/migrate.rs", "rank": 68, "score": 405577.51528673037 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/pylon/staking/src/state.rs", "rank": 69, "score": 395731.9477262323 }, { "content": "pub fn store_config(storage: &mut dyn Storage, config: &Config) -> StdResult<()> {\n\n singleton(storage, KEY_CONFIG).save(config)\n\n}\n\n\n", "file_path": "contracts/pylon/airdrop/src/state.rs", "rank": 70, "score": 395695.2712841542 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> Result<Binary, ContractError> {\n\n match msg {\n\n QueryMsg::ApiVersion {} => queries::query_api_version(deps),\n\n QueryMsg::Config {} => queries::config::query_config(deps),\n\n QueryMsg::State {} => queries::state::query_state(deps),\n\n QueryMsg::Staker { address } => queries::bank::query_staker(deps, env, address),\n\n QueryMsg::Stakers {\n\n start_after,\n\n limit,\n\n order,\n\n } => queries::bank::query_stakers(deps, env, start_after, limit, order),\n\n QueryMsg::Airdrop { airdrop_id } => queries::airdrop::query_airdrop(deps, airdrop_id),\n\n QueryMsg::Airdrops {\n\n start_after,\n\n limit,\n\n order_by,\n\n } => queries::airdrop::query_airdrops(deps, start_after, limit, order_by),\n\n QueryMsg::Poll { poll_id } => queries::poll::query_poll(deps, poll_id),\n\n QueryMsg::Polls {\n\n start_after,\n", "file_path": "contracts/pylon/gov/src/entrypoints.rs", "rank": 71, "score": 390421.49330345995 }, { "content": "pub fn store(storage: &mut dyn Storage, id: u64, pool: &Pool) -> StdResult<()> {\n\n let key = &id.to_be_bytes()[..];\n\n let mut pool_bucket: Bucket<Pool> = bucket(storage, PREFIX_POOL);\n\n\n\n pool_bucket.save(key, pool)\n\n}\n\n\n", "file_path": "contracts/core/factory/src/state/pool.rs", "rank": 72, "score": 389697.16433339287 }, { "content": "pub fn config(deps: Deps, _env: Env) -> StdResult<Binary> {\n\n let config = config::read(deps.storage).unwrap();\n\n\n\n to_binary(&config) // TODO: marshal config\n\n}\n\n\n", "file_path": "contracts/gateway/pool/src/handler/query.rs", "rank": 73, "score": 389337.43788800825 }, { "content": "pub fn config(deps: Deps, _env: Env) -> StdResult<Binary> {\n\n let config: config::Config = config::read(deps.storage).unwrap();\n\n\n\n to_binary(&resp::ConfigResponse {\n\n beneficiary: deps\n\n .api\n\n .addr_humanize(&config.beneficiary)\n\n .unwrap()\n\n .to_string(),\n\n fee_collector: deps\n\n .api\n\n .addr_humanize(&config.fee_collector)\n\n .unwrap()\n\n .to_string(),\n\n moneymarket: deps\n\n .api\n\n .addr_humanize(&config.moneymarket)\n\n .unwrap()\n\n .to_string(),\n\n stable_denom: config.stable_denom,\n\n anchor_token: deps.api.addr_humanize(&config.atoken).unwrap().to_string(),\n\n dp_token: deps\n\n .api\n\n .addr_humanize(&config.dp_token)\n\n .unwrap()\n\n .to_string(),\n\n })\n\n}\n\n\n", "file_path": "contracts/core/pool/src/handler/query.rs", "rank": 74, "score": 389337.4378880082 }, { "content": "pub fn config(deps: Deps, _env: Env) -> StdResult<Binary> {\n\n let config = config::read(deps.storage)?;\n\n\n\n to_binary(&resp::ConfigResponse {\n\n id: config.id,\n\n name: config.name,\n\n factory: config.factory,\n\n beneficiary: config.beneficiary,\n\n yield_adapter: config.yield_adapter,\n\n input_denom: config.input_denom,\n\n yield_token: config.yield_token,\n\n dp_token: config.dp_token,\n\n })\n\n}\n\n\n", "file_path": "contracts/core/pool-v2/src/handler/query.rs", "rank": 75, "score": 386887.3511172926 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => Ok(to_binary(&query_config(deps)?)?),\n\n QueryMsg::VestingAccount { address } => {\n\n Ok(to_binary(&query_vesting_account(deps, address)?)?)\n\n }\n\n QueryMsg::VestingAccounts {\n\n start_after,\n\n limit,\n\n order_by,\n\n } => Ok(to_binary(&query_vesting_accounts(\n\n deps,\n\n start_after,\n\n limit,\n\n order_by,\n\n )?)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/vesting/src/contract.rs", "rank": 76, "score": 386369.2972921516 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => {\n\n let config = config::read(deps.storage)?;\n\n\n\n to_binary(&adapter_resp::ConfigResponse {\n\n input_denom: config.input_denom.clone(),\n\n yield_token: config.yield_token,\n\n })\n\n }\n\n QueryMsg::ExchangeRate { input_denom: _ } => {\n\n let config = config::read(deps.storage)?;\n\n let epoch_state = market::epoch_state(deps, config.moneymarket)?;\n\n\n\n to_binary(&adapter_resp::ExchangeRateResponse {\n\n exchange_rate: epoch_state.exchange_rate,\n\n yield_token_supply: Uint256::zero(),\n\n })\n\n }\n\n QueryMsg::Deposit { amount } => {\n", "file_path": "contracts/mocks/adapter/src/contract.rs", "rank": 77, "score": 386369.2972921516 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/community/src/contract.rs", "rank": 78, "score": 386369.2972921516 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => to_binary(&query_config(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/pylon/collector/src/contract.rs", "rank": 79, "score": 386369.2972921516 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => {\n\n let config = state::config_r(deps.storage).load().unwrap();\n\n to_binary(&ConfigResponse {\n\n owner_addr: config.owner,\n\n aterra_contract: config.output_token,\n\n interest_model: \"\".to_string(),\n\n distribution_model: \"\".to_string(),\n\n overseer_contract: \"\".to_string(),\n\n collector_contract: \"\".to_string(),\n\n distributor_contract: \"\".to_string(),\n\n stable_denom: config.input_denom,\n\n max_borrow_factor: Default::default(),\n\n })\n\n }\n\n QueryMsg::EpochState { .. } => {\n\n let config = state::config_r(deps.storage).load().unwrap();\n\n to_binary(&EpochStateResponse {\n\n exchange_rate: config.exchange_rate,\n\n aterra_supply: Default::default(),\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/mocks/market/src/contract.rs", "rank": 80, "score": 386369.2972921516 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => {\n\n let config = config::read(deps.storage)?;\n\n\n\n to_binary(&adapter_resp::ConfigResponse {\n\n input_denom: config.input_denom.clone(),\n\n yield_token: config.yield_token,\n\n })\n\n }\n\n QueryMsg::ExchangeRate { input_denom: _ } => {\n\n let config = config::read(deps.storage)?;\n\n let epoch_state = anchor::epoch_state(deps, config.moneymarket)?;\n\n\n\n to_binary(&adapter_resp::ExchangeRateResponse {\n\n exchange_rate: epoch_state.exchange_rate,\n\n yield_token_supply: epoch_state.aterra_supply,\n\n })\n\n }\n\n QueryMsg::Deposit { amount } => {\n", "file_path": "contracts/core/adapters/anchor/src/contract.rs", "rank": 81, "score": 384129.5432351361 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::AvailableCapOf { address, amount } => {\n\n let config = state::config_r(deps.storage).load().unwrap();\n\n let staked: StakerResponse = deps\n\n .querier\n\n .query_wasm_smart(config.gov, &GovQueryMsg::Staker { address })?;\n\n\n\n let mut max_cap = Uint256::zero();\n\n for stage in config.stages.iter() {\n\n if stage.from <= Uint256::from(staked.balance) {\n\n if let Some(to) = stage.to {\n\n if Uint256::from(staked.balance) < to {\n\n max_cap = max(max_cap, stage.max_cap);\n\n }\n\n } else {\n\n max_cap = max(max_cap, stage.max_cap);\n\n }\n\n }\n\n }\n\n to_binary(&resp::AvailableCapOfResponse {\n\n amount: Option::Some(max_cap - min(max_cap, amount)),\n\n unlimited: false,\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/gateway/strategies/cap-staged/src/contract.rs", "rank": 82, "score": 381933.44040743785 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::AvailableCapOf { address, .. } => {\n\n let config = state::config_r(deps.storage).load().unwrap();\n\n let staked: StakerResponse = deps\n\n .querier\n\n .query_wasm_smart(config.gov, &GovQueryMsg::Staker { address })?;\n\n\n\n if config.minimum_stake_amount <= Uint256::from(staked.balance) {\n\n to_binary(&resp::AvailableCapOfResponse {\n\n amount: Option::None,\n\n unlimited: true,\n\n })\n\n } else {\n\n to_binary(&resp::AvailableCapOfResponse {\n\n amount: Option::Some(Uint256::from(0u64)),\n\n unlimited: false,\n\n })\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/gateway/strategies/cap-min/src/contract.rs", "rank": 83, "score": 381933.44040743785 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::AvailableCapOf { amount, .. } => {\n\n let config = state::config_r(deps.storage).load().unwrap();\n\n to_binary(&resp::AvailableCapOfResponse {\n\n amount: Option::Some(config.max_user_cap - min(config.max_user_cap, amount)),\n\n unlimited: false,\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/gateway/strategies/cap-fixed/src/contract.rs", "rank": 84, "score": 381933.44040743785 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> Result<Binary, StdError> {\n\n match msg {\n\n QueryMsg::Config {} => QueryHandler::config(deps),\n\n QueryMsg::PoolInfo { pool_id } => QueryHandler::pool_info(deps, pool_id),\n\n QueryMsg::PoolInfos { start_after, limit } => {\n\n QueryHandler::pool_infos(deps, start_after, limit)\n\n }\n\n QueryMsg::AdapterInfo { address } => QueryHandler::adapter_info(deps, address),\n\n QueryMsg::AdapterInfos { start_after, limit } => {\n\n QueryHandler::adapter_infos(deps, start_after, limit)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/core/factory/src/contract.rs", "rank": 85, "score": 379919.9309948459 }, { "content": "pub fn available_cap_of(deps: Deps, _env: Env, address: String) -> StdResult<Binary> {\n\n let config = config::read(deps.storage).unwrap();\n\n let user = user::read(\n\n deps.storage,\n\n &deps.api.addr_canonicalize(address.as_str()).unwrap(),\n\n )\n\n .unwrap();\n\n\n\n if let Some(strategy) = config.cap_strategy {\n\n let resp: resp::AvailableCapOfResponse = deps.querier.query_wasm_smart(\n\n strategy,\n\n &QueryMsg::AvailableCapOf {\n\n address,\n\n amount: user.amount,\n\n },\n\n )?;\n\n to_binary(&resp)\n\n } else {\n\n to_binary(&resp::AvailableCapOfResponse {\n\n amount: None,\n\n unlimited: true,\n\n })\n\n }\n\n}\n", "file_path": "contracts/gateway/pool/src/handler/query.rs", "rank": 86, "score": 373275.3823659949 }, { "content": "/// returns rewards owned by this owner\n\n/// (read-only version for queries)\n\npub fn read_staker_info(storage: &dyn Storage, owner: &CanonicalAddr) -> StdResult<StakerInfo> {\n\n match ReadonlyBucket::new(storage, PREFIX_REWARD).may_load(owner.as_slice())? {\n\n Some(staker_info) => Ok(staker_info),\n\n None => Ok(StakerInfo {\n\n reward_index: Decimal::zero(),\n\n bond_amount: Uint128::zero(),\n\n pending_reward: Uint128::zero(),\n\n }),\n\n }\n\n}\n", "file_path": "contracts/pylon/staking/src/state.rs", "rank": 87, "score": 372806.56214237056 }, { "content": "pub fn remove(storage: &mut dyn Storage, owner: &CanonicalAddr) {\n\n let mut user_bucket: Bucket<User> = bucket(storage, PREFIX_USER);\n\n user_bucket.remove(owner.as_slice())\n\n}\n\n\n", "file_path": "contracts/gateway/swap/src/state/user.rs", "rank": 88, "score": 369187.10814142536 }, { "content": "pub fn remove(storage: &mut dyn Storage, owner: &CanonicalAddr) {\n\n let mut user_bucket: Bucket<User> = bucket(storage, PREFIX_USER);\n\n\n\n user_bucket.remove(owner.as_slice())\n\n}\n\n\n", "file_path": "contracts/gateway/pool/src/state/user.rs", "rank": 89, "score": 369187.1081414253 }, { "content": "pub fn query_airdrop(deps: Deps, airdrop_id: u64) -> QueryResult {\n\n let airdrop = Airdrop::load(deps.storage, &airdrop_id).unwrap();\n\n\n\n Ok(to_binary(&AirdropResponse {\n\n start: airdrop.config.start,\n\n period: airdrop.config.period,\n\n reward_token: airdrop.config.reward_token.to_string(),\n\n reward_rate: airdrop.config.reward_rate,\n\n })?)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/queries/airdrop.rs", "rank": 90, "score": 368884.73842041346 }, { "content": "pub fn query_poll(deps: Deps, poll_id: u64) -> QueryResult {\n\n let poll = match Poll::may_load(deps.storage, &poll_id)? {\n\n Some(poll) => Some(poll),\n\n None => return Err(ContractError::PollNotFound {}),\n\n }\n\n .unwrap();\n\n\n\n Ok(to_binary(&to_response(deps, &poll)?)?)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/queries/poll.rs", "rank": 91, "score": 368510.4496352284 }, { "content": "pub fn initialize(deps: &mut OwnedDeps<MockStorage, MockApi, CustomMockQuerier>) -> Env {\n\n let env = mock_env();\n\n let info = mock_info(TEST_FACTORY, &[]);\n\n let msg = init_msg();\n\n let _res = contract::instantiate(deps.as_mut(), env.clone(), info, msg)\n\n .expect(\"testing: contract initialized\");\n\n\n\n let mut token_inst_res = MsgInstantiateContractResponse::new();\n\n token_inst_res.set_contract_address(TEST_TOKEN_POOL.to_string());\n\n let reply_msg = Reply {\n\n id: 1,\n\n result: ContractResult::Ok(SubMsgExecutionResponse {\n\n events: vec![],\n\n data: Some(token_inst_res.write_to_bytes().unwrap().into()),\n\n }),\n\n };\n\n let _res = contract::reply(deps.as_mut(), env.clone(), reply_msg)\n\n .expect(\"testing: dp token address registered\");\n\n\n\n env\n\n}\n", "file_path": "contracts/core/pool-v2/src/testing/utils.rs", "rank": 92, "score": 365961.3723081848 }, { "content": "// Only contract itself can execute distribute function\n\npub fn distribute(deps: DepsMut, env: Env) -> StdResult<Response> {\n\n let config: Config = read_config(deps.storage)?;\n\n let amount = query_token_balance(\n\n &deps.querier,\n\n deps.api.addr_humanize(&config.pylon_token)?,\n\n env.contract.address,\n\n )?;\n\n\n\n let distribute_amount = amount * config.reward_factor;\n\n let left_amount = amount.checked_sub(distribute_amount)?;\n\n\n\n let mut messages: Vec<CosmosMsg> = vec![];\n\n\n\n if !distribute_amount.is_zero() {\n\n messages.push(CosmosMsg::Wasm(WasmMsg::Execute {\n\n contract_addr: deps.api.addr_humanize(&config.pylon_token)?.to_string(),\n\n msg: to_binary(&Cw20ExecuteMsg::Transfer {\n\n recipient: deps.api.addr_humanize(&config.gov_contract)?.to_string(),\n\n amount: distribute_amount,\n\n })?,\n", "file_path": "contracts/pylon/collector/src/contract.rs", "rank": 93, "score": 364393.4796520804 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => QueryHandler::config(deps),\n\n QueryMsg::BalanceOf { owner } => QueryHandler::balance_of(deps, owner),\n\n QueryMsg::IsWhitelisted { address } => QueryHandler::is_whitelisted(deps, address),\n\n QueryMsg::AvailableCapOf { address } => QueryHandler::available_cap_of(deps, address),\n\n QueryMsg::ClaimableTokenOf { address } => {\n\n QueryHandler::claimable_token_of(deps, env, address)\n\n }\n\n QueryMsg::TotalSupply {} => QueryHandler::total_supply(deps),\n\n QueryMsg::CurrentPrice {} => QueryHandler::current_price(deps),\n\n QueryMsg::SimulateWithdraw { amount, address } => {\n\n QueryHandler::simulate_withdraw(deps, address, amount)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/gateway/swap/src/contract.rs", "rank": 94, "score": 363827.9857054072 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::DepositAmountOf { owner } => QueryHandler::deposit_amount(deps, env, owner), // dp_token.balanceOf(msg.sender)\n\n QueryMsg::TotalDepositAmount {} => QueryHandler::total_deposit_amount(deps, env), // dp_token.totalSupply()\n\n QueryMsg::Config {} => QueryHandler::config(deps, env), // config\n\n QueryMsg::ClaimableReward {} => QueryHandler::claimable_reward(deps, env), // config.strategy.reward()\n\n }\n\n}\n\n\n", "file_path": "contracts/core/pool/src/contract.rs", "rank": 95, "score": 363827.9857054072 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => Query::config(deps, env),\n\n QueryMsg::Stakers { start_after, limit } => Query::stakers(deps, env, start_after, limit),\n\n QueryMsg::Reward {} => Query::reward(deps, env),\n\n QueryMsg::BalanceOf { owner } => Query::balance_of(deps, env, owner),\n\n QueryMsg::ClaimableReward { owner } => Query::claimable_reward(deps, env, owner),\n\n QueryMsg::AvailableCapOf { address } => Query::available_cap_of(deps, env, address),\n\n }\n\n}\n\n\n", "file_path": "contracts/gateway/pool/src/contract.rs", "rank": 96, "score": 363827.9857054072 }, { "content": "#[allow(dead_code)]\n\n#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<Binary> {\n\n match msg {\n\n QueryMsg::Config {} => QueryHandler::config(deps, env),\n\n QueryMsg::DepositAmountOf { owner } => QueryHandler::deposit_amount(deps, env, owner),\n\n QueryMsg::TotalDepositAmount {} => QueryHandler::total_deposit_amount(deps, env),\n\n QueryMsg::ClaimableReward {} => QueryHandler::claimable_reward(deps, env),\n\n }\n\n}\n\n\n", "file_path": "contracts/core/pool-v2/src/contract.rs", "rank": 97, "score": 361588.2316483917 }, { "content": "pub fn balance_of(deps: Deps, token: String, owner: String) -> StdResult<Uint256> {\n\n let balance: BalanceResponse = deps.querier.query(&QueryRequest::Wasm(WasmQuery::Smart {\n\n contract_addr: token,\n\n msg: to_binary(&Cw20QueryMsg::Balance { address: owner })?,\n\n }))?;\n\n\n\n Ok(Uint256::from(balance.balance))\n\n}\n\n\n", "file_path": "packages/pylon_utils/src/token.rs", "rank": 98, "score": 356826.7880068789 }, { "content": "pub fn query_staker(deps: Deps, env: Env, address: String) -> QueryResult {\n\n let config = Config::load(deps.storage)?;\n\n let state = State::load(deps.storage)?;\n\n let token_manager = TokenManager::load(deps.storage, &deps.api.addr_canonicalize(&address)?)?;\n\n\n\n let total_balance = query_token_balance(\n\n &deps.querier,\n\n deps.api.addr_humanize(&config.pylon_token)?,\n\n env.contract.address.clone(),\n\n )?\n\n .checked_sub(state.total_deposit)?;\n\n\n\n Ok(to_binary(&to_response(\n\n &deps,\n\n &env,\n\n address.as_str(),\n\n &state.total_share,\n\n &total_balance,\n\n &token_manager,\n\n ))?)\n\n}\n\n\n", "file_path": "contracts/pylon/gov/src/queries/bank.rs", "rank": 99, "score": 356411.29884163226 } ]
Rust
src/util/conversions.rs
paigereeves/mmtk-core
d45c5155d3f20bdc4f667c3b08a78f641507966c
use crate::util::constants::*; use crate::util::heap::layout::vm_layout_constants::*; use crate::util::Address; /* Alignment */ pub fn is_address_aligned(addr: Address) -> bool { addr.is_aligned_to(BYTES_IN_ADDRESS) } pub fn page_align_down(address: Address) -> Address { address.align_down(BYTES_IN_PAGE) } pub fn is_page_aligned(address: Address) -> bool { address.is_aligned_to(BYTES_IN_PAGE) } pub const fn chunk_align_up(addr: Address) -> Address { addr.align_up(BYTES_IN_CHUNK) } pub const fn chunk_align_down(addr: Address) -> Address { addr.align_down(BYTES_IN_CHUNK) } pub const fn mmap_chunk_align_up(addr: Address) -> Address { addr.align_up(MMAP_CHUNK_BYTES) } pub const fn mmap_chunk_align_down(addr: Address) -> Address { addr.align_down(MMAP_CHUNK_BYTES) } pub fn bytes_to_chunks_up(bytes: usize) -> usize { (bytes + BYTES_IN_CHUNK - 1) >> LOG_BYTES_IN_CHUNK } pub fn address_to_chunk_index(addr: Address) -> usize { addr >> LOG_BYTES_IN_CHUNK } pub fn chunk_index_to_address(chunk: usize) -> Address { unsafe { Address::from_usize(chunk << LOG_BYTES_IN_CHUNK) } } pub const fn raw_align_up(val: usize, align: usize) -> usize { val.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1) } pub const fn raw_align_down(val: usize, align: usize) -> usize { val & !align.wrapping_sub(1) } pub const fn raw_is_aligned(val: usize, align: usize) -> bool { val & align.wrapping_sub(1) == 0 } /* Conversion */ pub fn pages_to_bytes(pages: usize) -> usize { pages << LOG_BYTES_IN_PAGE } pub fn bytes_to_pages_up(bytes: usize) -> usize { (bytes + BYTES_IN_PAGE - 1) >> LOG_BYTES_IN_PAGE } pub fn bytes_to_pages(bytes: usize) -> usize { let pages = bytes_to_pages_up(bytes); if cfg!(debug = "true") { let computed_extent = pages << LOG_BYTES_IN_PAGE; let bytes_match_pages = computed_extent == bytes; assert!( bytes_match_pages, "ERROR: number of bytes computed from pages must match original byte amount!\ bytes = {}\ pages = {}\ bytes computed from pages = {}", bytes, pages, computed_extent ); } pages } pub fn bytes_to_formatted_string(bytes: usize) -> String { const UNITS: [&str; 6] = ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]; let mut i = 0; let mut num = bytes; while i < UNITS.len() - 1 { let new_num = num >> 10; if new_num == 0 { return format!("{}{}", num, UNITS[i]); } num = new_num; i += 1; } return format!("{}{}", num, UNITS.last().unwrap()); } #[cfg(test)] mod tests { use crate::util::conversions::*; use crate::util::Address; #[test] fn test_page_align() { let addr = unsafe { Address::from_usize(0x2345_6789) }; assert_eq!(page_align_down(addr), unsafe { Address::from_usize(0x2345_6000) }); assert!(!is_page_aligned(addr)); assert!(is_page_aligned(page_align_down(addr))); } #[test] fn test_chunk_align() { let addr = unsafe { Address::from_usize(0x2345_6789) }; assert_eq!(chunk_align_down(addr), unsafe { Address::from_usize(0x2340_0000) }); assert_eq!(chunk_align_up(addr), unsafe { Address::from_usize(0x2380_0000) }); } #[test] fn test_bytes_to_formatted_string() { assert_eq!(bytes_to_formatted_string(0), "0B"); assert_eq!(bytes_to_formatted_string(1023), "1023B"); assert_eq!(bytes_to_formatted_string(1024), "1KiB"); assert_eq!(bytes_to_formatted_string(1025), "1KiB"); assert_eq!(bytes_to_formatted_string(1 << 20), "1MiB"); assert_eq!(bytes_to_formatted_string(1 << 30), "1GiB"); #[cfg(target_pointer_width = "64")] { assert_eq!(bytes_to_formatted_string(1 << 40), "1TiB"); assert_eq!(bytes_to_formatted_string(1 << 50), "1PiB"); assert_eq!(bytes_to_formatted_string(1 << 60), "1024PiB"); assert_eq!(bytes_to_formatted_string(1 << 63), "8192PiB"); } } }
use crate::util::constants::*; use crate::util::heap::layout::vm_layout_constants::*; use crate::util::Address; /* Alignment */ pub fn is_address_aligned(addr: Address) -> bool { addr.is_aligned_to(BYTES_IN_ADDRESS) } pub fn page_align_down(ad
o_chunk_index(addr: Address) -> usize { addr >> LOG_BYTES_IN_CHUNK } pub fn chunk_index_to_address(chunk: usize) -> Address { unsafe { Address::from_usize(chunk << LOG_BYTES_IN_CHUNK) } } pub const fn raw_align_up(val: usize, align: usize) -> usize { val.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1) } pub const fn raw_align_down(val: usize, align: usize) -> usize { val & !align.wrapping_sub(1) } pub const fn raw_is_aligned(val: usize, align: usize) -> bool { val & align.wrapping_sub(1) == 0 } /* Conversion */ pub fn pages_to_bytes(pages: usize) -> usize { pages << LOG_BYTES_IN_PAGE } pub fn bytes_to_pages_up(bytes: usize) -> usize { (bytes + BYTES_IN_PAGE - 1) >> LOG_BYTES_IN_PAGE } pub fn bytes_to_pages(bytes: usize) -> usize { let pages = bytes_to_pages_up(bytes); if cfg!(debug = "true") { let computed_extent = pages << LOG_BYTES_IN_PAGE; let bytes_match_pages = computed_extent == bytes; assert!( bytes_match_pages, "ERROR: number of bytes computed from pages must match original byte amount!\ bytes = {}\ pages = {}\ bytes computed from pages = {}", bytes, pages, computed_extent ); } pages } pub fn bytes_to_formatted_string(bytes: usize) -> String { const UNITS: [&str; 6] = ["B", "KiB", "MiB", "GiB", "TiB", "PiB"]; let mut i = 0; let mut num = bytes; while i < UNITS.len() - 1 { let new_num = num >> 10; if new_num == 0 { return format!("{}{}", num, UNITS[i]); } num = new_num; i += 1; } return format!("{}{}", num, UNITS.last().unwrap()); } #[cfg(test)] mod tests { use crate::util::conversions::*; use crate::util::Address; #[test] fn test_page_align() { let addr = unsafe { Address::from_usize(0x2345_6789) }; assert_eq!(page_align_down(addr), unsafe { Address::from_usize(0x2345_6000) }); assert!(!is_page_aligned(addr)); assert!(is_page_aligned(page_align_down(addr))); } #[test] fn test_chunk_align() { let addr = unsafe { Address::from_usize(0x2345_6789) }; assert_eq!(chunk_align_down(addr), unsafe { Address::from_usize(0x2340_0000) }); assert_eq!(chunk_align_up(addr), unsafe { Address::from_usize(0x2380_0000) }); } #[test] fn test_bytes_to_formatted_string() { assert_eq!(bytes_to_formatted_string(0), "0B"); assert_eq!(bytes_to_formatted_string(1023), "1023B"); assert_eq!(bytes_to_formatted_string(1024), "1KiB"); assert_eq!(bytes_to_formatted_string(1025), "1KiB"); assert_eq!(bytes_to_formatted_string(1 << 20), "1MiB"); assert_eq!(bytes_to_formatted_string(1 << 30), "1GiB"); #[cfg(target_pointer_width = "64")] { assert_eq!(bytes_to_formatted_string(1 << 40), "1TiB"); assert_eq!(bytes_to_formatted_string(1 << 50), "1PiB"); assert_eq!(bytes_to_formatted_string(1 << 60), "1024PiB"); assert_eq!(bytes_to_formatted_string(1 << 63), "8192PiB"); } } }
dress: Address) -> Address { address.align_down(BYTES_IN_PAGE) } pub fn is_page_aligned(address: Address) -> bool { address.is_aligned_to(BYTES_IN_PAGE) } pub const fn chunk_align_up(addr: Address) -> Address { addr.align_up(BYTES_IN_CHUNK) } pub const fn chunk_align_down(addr: Address) -> Address { addr.align_down(BYTES_IN_CHUNK) } pub const fn mmap_chunk_align_up(addr: Address) -> Address { addr.align_up(MMAP_CHUNK_BYTES) } pub const fn mmap_chunk_align_down(addr: Address) -> Address { addr.align_down(MMAP_CHUNK_BYTES) } pub fn bytes_to_chunks_up(bytes: usize) -> usize { (bytes + BYTES_IN_CHUNK - 1) >> LOG_BYTES_IN_CHUNK } pub fn address_t
random
[ { "content": "/// Is the address in the mapped memory? The runtime can use this function to check\n\n/// if an address is mapped by MMTk. Note that this is different than is_mapped_object().\n\n/// For malloc spaces, MMTk does not map those addresses (malloc does the mmap), so\n\n/// this function will return false, but is_mapped_object will return true if the address\n\n/// is actually a valid object in malloc spaces. To check if an object is in our heap,\n\n/// the runtime should always use is_mapped_object(). This function is_mapped_address()\n\n/// may get removed at some point.\n\n///\n\n/// Arguments:\n\n/// * `address`: The address to query.\n\n// TODO: Do we really need this function? Can a runtime always use is_mapped_object()?\n\npub fn is_mapped_address(address: Address) -> bool {\n\n address.is_mapped()\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 2, "score": 294525.0153673882 }, { "content": "pub fn is_alloced_object(address: Address) -> bool {\n\n side_metadata::load_atomic(&ALLOC_SIDE_METADATA_SPEC, address, Ordering::SeqCst) == 1\n\n}\n\n\n\n/// # Safety\n\n///\n\n/// This is unsafe: check the comment on `side_metadata::load`\n\n///\n\npub unsafe fn is_alloced_object_unsafe(address: Address) -> bool {\n\n side_metadata::load(&ALLOC_SIDE_METADATA_SPEC, address) == 1\n\n}\n\n\n", "file_path": "src/util/alloc_bit.rs", "rank": 3, "score": 285093.5472023228 }, { "content": "pub fn is_alloced_object(address: Address) -> bool {\n\n alloc_bit::is_alloced_object(address)\n\n}\n\n\n\npub unsafe fn is_alloced_object_unsafe(address: Address) -> bool {\n\n alloc_bit::is_alloced_object_unsafe(address)\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 4, "score": 285093.5472023228 }, { "content": "pub fn is_alloced_object(address: Address) -> bool {\n\n alloc_bit::is_alloced_object(address)\n\n}\n\n\n\npub unsafe fn is_alloced_object_unsafe(address: Address) -> bool {\n\n alloc_bit::is_alloced_object_unsafe(address)\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 5, "score": 285093.5472023228 }, { "content": "pub fn is_meta_space_mapped(address: Address) -> bool {\n\n let chunk_start = conversions::chunk_align_down(address);\n\n is_chunk_mapped(chunk_start) && is_chunk_marked(chunk_start)\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 6, "score": 281402.929717236 }, { "content": "/// allocate `size` bytes, which is aligned to `align` at `offset`\n\n/// return the address, and whether it is an offset allocation\n\npub fn alloc<VM: VMBinding>(size: usize, align: usize, offset: isize) -> (Address, bool) {\n\n let address: Address;\n\n let mut is_offset_malloc = false;\n\n // malloc returns 16 bytes aligned address.\n\n // So if the alignment is smaller than 16 bytes, we do not need to align.\n\n if align <= 16 && offset == 0 {\n\n let raw = unsafe { calloc(1, size) };\n\n address = Address::from_mut_ptr(raw);\n\n debug_assert!(address.is_aligned_to(align));\n\n } else if align > 16 && offset == 0 {\n\n address = align_alloc::<VM>(size, align);\n\n #[cfg(feature = \"malloc_hoard\")]\n\n {\n\n is_offset_malloc = true;\n\n }\n\n debug_assert!(\n\n address.is_aligned_to(align),\n\n \"Address: {:x} is not aligned to the given alignment: {}\",\n\n address,\n\n align\n", "file_path": "src/util/malloc.rs", "rank": 7, "score": 276663.05521124817 }, { "content": "/// Check if metadata is mapped for a range [addr, addr + size). Metadata is mapped per chunk,\n\n/// we will go through all the chunks for [address, address + size), and check if they are mapped.\n\n/// If any of the chunks is not mapped, return false. Otherwise return true.\n\npub fn is_meta_space_mapped(address: Address, size: usize) -> bool {\n\n let mut chunk = conversions::chunk_align_down(address);\n\n while chunk < address + size {\n\n if !is_meta_space_mapped_for_address(chunk) {\n\n return false;\n\n }\n\n chunk += BYTES_IN_CHUNK;\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 9, "score": 265929.08546067204 }, { "content": "/// get malloc usable size of an address\n\n/// is_offset_malloc: whether the address is allocated with some offset\n\npub fn get_malloc_usable_size(address: Address, is_offset_malloc: bool) -> usize {\n\n if is_offset_malloc {\n\n offset_malloc_usable_size(address)\n\n } else {\n\n unsafe { malloc_usable_size(address.to_mut_ptr()) }\n\n }\n\n}\n\n\n", "file_path": "src/util/malloc.rs", "rank": 10, "score": 262690.494122141 }, { "content": "pub fn is_chunk_marked(chunk_start: Address) -> bool {\n\n side_metadata::load_atomic(&ACTIVE_CHUNK_METADATA_SPEC, chunk_start, Ordering::SeqCst) == 1\n\n}\n\n\n\npub unsafe fn is_chunk_marked_unsafe(chunk_start: Address) -> bool {\n\n side_metadata::load(&ACTIVE_CHUNK_METADATA_SPEC, chunk_start) == 1\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 11, "score": 260861.0882324981 }, { "content": "pub fn is_chunk_mapped(chunk_start: Address) -> bool {\n\n // Since `address_to_meta_address` will translate a data address to a metadata address without caring\n\n // if it goes across metadata boundaries, we have to check if we have accidentally gone over the bounds\n\n // of the active chunk metadata spec before we check if the metadata has been mapped or not\n\n let meta_address =\n\n side_metadata::address_to_meta_address(&ACTIVE_CHUNK_METADATA_SPEC, chunk_start);\n\n if meta_address < *MAX_METADATA_ADDRESS {\n\n meta_address.is_mapped()\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 12, "score": 260861.08823249806 }, { "content": "pub fn is_chunk_mapped(chunk_start: Address) -> bool {\n\n side_metadata::address_to_meta_address(&ACTIVE_CHUNK_METADATA_SPEC, chunk_start).is_mapped()\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 13, "score": 260861.08823249806 }, { "content": "pub fn is_chunk_marked(chunk_start: Address) -> bool {\n\n side_metadata::load_atomic(&ACTIVE_CHUNK_METADATA_SPEC, chunk_start, Ordering::SeqCst) == 1\n\n}\n\n\n\npub unsafe fn is_chunk_marked_unsafe(chunk_start: Address) -> bool {\n\n side_metadata::load(&ACTIVE_CHUNK_METADATA_SPEC, chunk_start) == 1\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 14, "score": 260861.0882324981 }, { "content": "/// Check if metadata is mapped for a given address. We check if the active chunk metadata is mapped,\n\n/// and if the active chunk bit is marked as well. If the chunk is mapped and marked, we consider the\n\n/// metadata for the chunk is properly mapped.\n\nfn is_meta_space_mapped_for_address(address: Address) -> bool {\n\n let chunk_start = conversions::chunk_align_down(address);\n\n is_chunk_mapped(chunk_start) && is_chunk_marked(chunk_start)\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 15, "score": 229608.46233723016 }, { "content": "#[inline(always)]\n\npub fn fill_alignment_gap<VM: VMBinding>(immut_start: Address, end: Address) {\n\n let mut start = immut_start;\n\n\n\n if VM::MAX_ALIGNMENT - VM::MIN_ALIGNMENT == BYTES_IN_INT {\n\n // At most a single hole\n\n if end - start != 0 {\n\n unsafe {\n\n start.store(VM::ALIGNMENT_VALUE);\n\n }\n\n }\n\n } else {\n\n while start < end {\n\n unsafe {\n\n start.store(VM::ALIGNMENT_VALUE);\n\n }\n\n start += BYTES_IN_INT;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/util/alloc/allocator.rs", "rank": 16, "score": 226240.28182895377 }, { "content": "/// free an address that is allocated with some offset\n\npub fn offset_free(address: Address) {\n\n let malloc_res_ptr: *mut usize = (address - BYTES_IN_ADDRESS).to_mut_ptr();\n\n let malloc_res = unsafe { *malloc_res_ptr } as *mut libc::c_void;\n\n unsafe { free(malloc_res) };\n\n}\n\n\n", "file_path": "src/util/malloc.rs", "rank": 17, "score": 223940.85152554704 }, { "content": "/// Return the ending address of the heap. *Note that currently MMTk uses\n\n/// a fixed address range as heap.*\n\npub fn last_heap_address() -> Address {\n\n HEAP_END\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 18, "score": 219737.75483616354 }, { "content": "/// Return the starting address of the heap. *Note that currently MMTk uses\n\n/// a fixed address range as heap.*\n\npub fn starting_heap_address() -> Address {\n\n HEAP_START\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 19, "score": 219737.75483616354 }, { "content": "#[inline]\n\npub fn get_metadata_base(address: Address) -> Address {\n\n address.align_down(BYTES_IN_REGION)\n\n}\n\n\n", "file_path": "src/util/alloc/embedded_meta_data.rs", "rank": 20, "score": 217033.66079182227 }, { "content": "#[inline]\n\npub fn get_metadata_offset(address: Address, log_coverage: usize, log_align: usize) -> usize {\n\n ((address & REGION_MASK) >> (log_coverage + log_align)) << log_align\n\n}\n", "file_path": "src/util/alloc/embedded_meta_data.rs", "rank": 21, "score": 215462.9021729938 }, { "content": "pub fn unset_addr_alloc_bit(address: Address) {\n\n debug_assert!(\n\n is_alloced_object(address),\n\n \"{:x}: alloc bit not set\",\n\n address\n\n );\n\n side_metadata::store_atomic(&ALLOC_SIDE_METADATA_SPEC, address, 0, Ordering::SeqCst);\n\n}\n\n\n", "file_path": "src/util/alloc_bit.rs", "rank": 22, "score": 215386.316483289 }, { "content": "/// Logs an edge.\n\n/// Panics if the edge was already logged.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `edge` - The edge to log.\n\n///\n\npub fn log_edge(edge: Address) {\n\n trace!(\"log_edge({})\", edge);\n\n let mut edge_log = EDGE_LOG.write().unwrap();\n\n assert!(edge_log.insert(edge), \"duplicate edge ({}) detected\", edge);\n\n}\n\n\n", "file_path": "src/util/edge_logger.rs", "rank": 25, "score": 200998.56036752486 }, { "content": "pub fn result_is_mapped(result: Result<()>) -> bool {\n\n match result {\n\n Ok(_) => false,\n\n Err(err) => err.raw_os_error().unwrap() == libc::EEXIST,\n\n }\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 26, "score": 197986.3864835213 }, { "content": "#[cfg(feature = \"malloc_hoard\")]\n\nfn align_alloc<VM: VMBinding>(size: usize, align: usize) -> Address {\n\n align_offset_alloc::<VM>(size, align, 0)\n\n}\n\n\n", "file_path": "src/util/malloc.rs", "rank": 27, "score": 196968.68794149885 }, { "content": "/// Is the object in the mapped memory? The runtime can use this function to check\n\n/// if an object is in MMTk heap.\n\n///\n\n/// Arguments:\n\n/// * `object`: The object reference to query.\n\npub fn is_mapped_object(object: ObjectReference) -> bool {\n\n object.is_mapped()\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 28, "score": 194972.94324549235 }, { "content": "pub fn is_alloced(object: ObjectReference) -> bool {\n\n is_alloced_object(object.to_address())\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 29, "score": 194968.20069268064 }, { "content": "/// Is the object alive?\n\n///\n\n/// Arguments:\n\n/// * `object`: The object reference to query.\n\npub fn is_live_object(object: ObjectReference) -> bool {\n\n object.is_live()\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 30, "score": 194968.20069268064 }, { "content": "pub fn is_alloced(object: ObjectReference) -> bool {\n\n is_alloced_object(object.to_address())\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 31, "score": 194968.20069268064 }, { "content": "pub fn is_alloced(object: ObjectReference) -> bool {\n\n is_alloced_object(object.to_address())\n\n}\n\n\n", "file_path": "src/util/alloc_bit.rs", "rank": 32, "score": 194968.20069268064 }, { "content": "pub fn zero(start: Address, len: usize) {\n\n let ptr = start.to_mut_ptr();\n\n wrap_libc_call(&|| unsafe { libc::memset(ptr, 0, len) }, ptr).unwrap()\n\n}\n\n\n\n/// Demand-zero mmap:\n\n/// This function mmaps the memory and guarantees to zero all mapped memory.\n\n/// This function WILL overwrite existing memory mapping. The user of this function\n\n/// needs to be aware of this, and use it cautiously.\n\n///\n\n/// # Safety\n\n/// This function WILL overwrite existing memory mapping if there is any. So only use this function if you know\n\n/// the memory has been reserved by mmtk (e.g. after the use of mmap_noreserve()). Otherwise using this function\n\n/// may corrupt others' data.\n\n#[allow(clippy::let_and_return)] // Zeroing is not neceesary for some OS/s\n\npub unsafe fn dzmmap(start: Address, size: usize) -> Result<()> {\n\n let prot = PROT_READ | PROT_WRITE | PROT_EXEC;\n\n let flags = libc::MAP_ANON | libc::MAP_PRIVATE | libc::MAP_FIXED;\n\n let ret = mmap_fixed(start, size, prot, flags);\n\n // We do not need to explicitly zero for Linux (memory is guaranteed to be zeroed)\n\n #[cfg(not(target_os = \"linux\"))]\n\n if ret.is_ok() {\n\n zero(start, size)\n\n }\n\n ret\n\n}\n\n\n\n/// Demand-zero mmap (no replace):\n\n/// This function mmaps the memory and guarantees to zero all mapped memory.\n\n/// This function will not overwrite existing memory mapping, and it will result Err if there is an existing mapping.\n", "file_path": "src/util/memory.rs", "rank": 33, "score": 194012.4991408107 }, { "content": "pub fn state_is_being_forwarded(forwarding_bits: usize) -> bool {\n\n forwarding_bits == BEING_FORWARDED\n\n}\n\n\n", "file_path": "src/util/object_forwarding.rs", "rank": 34, "score": 192095.64004341362 }, { "content": "// Check if a given object was allocated by malloc\n\npub fn is_alloced_by_malloc(object: ObjectReference) -> bool {\n\n is_meta_space_mapped_for_address(object.to_address()) && is_alloced(object)\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 35, "score": 192095.64004341362 }, { "content": "// Check if a given object was allocated by malloc\n\npub fn is_alloced_by_malloc(object: ObjectReference) -> bool {\n\n is_meta_space_mapped(object.to_address()) && is_alloced(object)\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 36, "score": 192095.64004341362 }, { "content": "/// Checks if the memory has already been mapped. If not, we panic.\n\n// Note that the checking has a side effect that it will map the memory if it was unmapped. So we panic if it was unmapped.\n\n// Be very careful about using this function.\n\npub fn panic_if_unmapped(start: Address, size: usize) {\n\n let prot = PROT_READ | PROT_WRITE;\n\n // MAP_FIXED_NOREPLACE returns EEXIST if already mapped\n\n let flags = libc::MAP_ANON | libc::MAP_PRIVATE | libc::MAP_FIXED_NOREPLACE;\n\n match mmap_fixed(start, size, prot, flags) {\n\n Ok(_) => panic!(\"{} of size {} is not mapped\", start, size),\n\n Err(e) => {\n\n assert!(\n\n e.kind() == std::io::ErrorKind::AlreadyExists,\n\n \"Failed to check mapped: {:?}\",\n\n e\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 37, "score": 191003.15462982515 }, { "content": "pub fn state_is_forwarded_or_being_forwarded(forwarding_bits: usize) -> bool {\n\n forwarding_bits != FORWARDING_NOT_TRIGGERED_YET\n\n}\n\n\n", "file_path": "src/util/object_forwarding.rs", "rank": 38, "score": 189358.41335498643 }, { "content": "pub fn mprotect(start: Address, size: usize) -> Result<()> {\n\n wrap_libc_call(\n\n &|| unsafe { libc::mprotect(start.to_mut_ptr(), size, PROT_NONE) },\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 39, "score": 187948.72781706252 }, { "content": "pub fn munprotect(start: Address, size: usize) -> Result<()> {\n\n wrap_libc_call(\n\n &|| unsafe { libc::mprotect(start.to_mut_ptr(), size, PROT_READ | PROT_WRITE | PROT_EXEC) },\n\n 0,\n\n )\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 40, "score": 187948.72781706252 }, { "content": "pub fn munmap(start: Address, size: usize) -> Result<()> {\n\n wrap_libc_call(&|| unsafe { libc::munmap(start.to_mut_ptr(), size) }, 0)\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 41, "score": 187948.72781706252 }, { "content": "pub fn bzero_alloc_bit(start: Address, size: usize) {\n\n side_metadata::bzero_metadata(&ALLOC_SIDE_METADATA_SPEC, start, size);\n\n}\n", "file_path": "src/util/alloc_bit.rs", "rank": 42, "score": 185396.33939811093 }, { "content": "/// mmap with no swap space reserve:\n\n/// This function does not reserve swap space for this mapping, which means there is no guarantee that writes to the\n\n/// mapping can always be successful. In case of out of physical memory, one may get a segfault for writing to the mapping.\n\n/// We can use this to reserve the address range, and then later overwrites the mapping with dzmmap().\n\npub fn mmap_noreserve(start: Address, size: usize) -> Result<()> {\n\n let prot = PROT_NONE;\n\n let flags =\n\n libc::MAP_ANON | libc::MAP_PRIVATE | libc::MAP_FIXED_NOREPLACE | libc::MAP_NORESERVE;\n\n mmap_fixed(start, size, prot, flags)\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 43, "score": 185088.52399655478 }, { "content": "#[allow(clippy::let_and_return)] // Zeroing is not neceesary for some OS/s\n\npub fn dzmmap_noreplace(start: Address, size: usize) -> Result<()> {\n\n let prot = PROT_READ | PROT_WRITE | PROT_EXEC;\n\n let flags = libc::MAP_ANON | libc::MAP_PRIVATE | libc::MAP_FIXED_NOREPLACE;\n\n let ret = mmap_fixed(start, size, prot, flags);\n\n // We do not need to explicitly zero for Linux (memory is guaranteed to be zeroed)\n\n #[cfg(not(target_os = \"linux\"))]\n\n if ret.is_ok() {\n\n zero(start, size)\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 44, "score": 185080.10026619287 }, { "content": "// Beside returning the allocation result,\n\n// this will store the malloc result at (result - BYTES_IN_ADDRESS)\n\nfn align_offset_alloc<VM: VMBinding>(size: usize, align: usize, offset: isize) -> Address {\n\n // we allocate extra `align` bytes here, so we are able to handle offset\n\n let actual_size = size + align + BYTES_IN_ADDRESS;\n\n let raw = unsafe { calloc(1, actual_size) };\n\n let address = Address::from_mut_ptr(raw);\n\n if address.is_zero() {\n\n return address;\n\n }\n\n let mod_offset = (offset % (align as isize)) as isize;\n\n let mut result =\n\n crate::util::alloc::allocator::align_allocation_no_fill::<VM>(address, align, mod_offset);\n\n if result - BYTES_IN_ADDRESS < address {\n\n result += align;\n\n }\n\n let malloc_res_ptr: *mut usize = (result - BYTES_IN_ADDRESS).to_mut_ptr();\n\n unsafe { *malloc_res_ptr = address.as_usize() };\n\n result\n\n}\n\n\n", "file_path": "src/util/malloc.rs", "rank": 45, "score": 184510.86570807704 }, { "content": "pub fn is_forwarded<VM: VMBinding>(object: ObjectReference) -> bool {\n\n get_forwarding_status::<VM>(object) == FORWARDED\n\n}\n\n\n", "file_path": "src/util/object_forwarding.rs", "rank": 46, "score": 180325.65792152222 }, { "content": "pub fn is_forwarded_or_being_forwarded<VM: VMBinding>(object: ObjectReference) -> bool {\n\n get_forwarding_status::<VM>(object) != FORWARDING_NOT_TRIGGERED_YET\n\n}\n\n\n", "file_path": "src/util/object_forwarding.rs", "rank": 47, "score": 177831.90854218468 }, { "content": "pub fn map_meta_space_for_chunk(metadata: &SideMetadataContext, chunk_start: Address) {\n\n eprintln!(\"map meta space for chunk {}\", chunk_start);\n\n let mmap_metadata_result = metadata.try_map_metadata_space(chunk_start, BYTES_IN_CHUNK);\n\n debug_assert!(\n\n mmap_metadata_result.is_ok(),\n\n \"mmap sidemetadata failed for chunk_start ({})\",\n\n chunk_start\n\n );\n\n}\n\n\n", "file_path": "src/util/alloc_bit.rs", "rank": 48, "score": 175639.34278663507 }, { "content": "// We map the active chunk metadata (if not previously mapped), as well as the alloc bit metadata\n\n// and active page metadata here\n\npub fn map_meta_space_for_chunk(metadata: &SideMetadataContext, chunk_start: Address) {\n\n {\n\n // In order to prevent race conditions, we synchronize on the lock first and then\n\n // check if we need to map the active chunk metadata for `chunk_start`\n\n let _lock = CHUNK_MAP_LOCK.lock().unwrap();\n\n if !is_chunk_mapped(chunk_start) {\n\n map_active_chunk_metadata(chunk_start);\n\n }\n\n }\n\n\n\n if is_chunk_marked(chunk_start) {\n\n return;\n\n }\n\n\n\n set_chunk_mark(chunk_start);\n\n let mmap_metadata_result = metadata.try_map_metadata_space(chunk_start, BYTES_IN_CHUNK);\n\n trace!(\"set chunk mark bit for {}\", chunk_start);\n\n debug_assert!(\n\n mmap_metadata_result.is_ok(),\n\n \"mmap sidemetadata failed for chunk_start ({})\",\n\n chunk_start\n\n );\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 49, "score": 175639.34278663504 }, { "content": "#[cfg(test)]\n\npub fn ensure_munmap_metadata_chunk(start: Address, local_per_chunk: usize) {\n\n if local_per_chunk != 0 {\n\n let policy_meta_start = address_to_meta_chunk_addr(start);\n\n assert!(memory::munmap(policy_meta_start, local_per_chunk).is_ok())\n\n }\n\n}\n\n\n", "file_path": "src/util/metadata/side_metadata/helpers_32.rs", "rank": 50, "score": 173457.11947285986 }, { "content": "/// Add a callback to be notified after the transitive closure is finished.\n\n/// The callback should return true if it add more work packets to the closure bucket.\n\npub fn on_closure_end<VM: VMBinding>(mmtk: &'static MMTK<VM>, f: Box<dyn Send + Fn() -> bool>) {\n\n mmtk.scheduler.on_closure_end(f)\n\n}\n", "file_path": "src/memory_manager.rs", "rank": 51, "score": 172476.9285043531 }, { "content": "// Used only for debugging\n\n// Panics in the required metadata for data_addr is not mapped\n\npub fn ensure_metadata_is_mapped(metadata_spec: &SideMetadataSpec, data_addr: Address) {\n\n let meta_start = address_to_meta_address(metadata_spec, data_addr).align_down(BYTES_IN_PAGE);\n\n\n\n // debug!(\n\n // \"ensure_metadata_is_mapped({}).meta_start({})\",\n\n // data_addr, meta_start\n\n // );\n\n\n\n memory::panic_if_unmapped(meta_start, BYTES_IN_PAGE);\n\n}\n\n\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 52, "score": 171370.04462937912 }, { "content": "/// We map the active chunk metadata (if not previously mapped), as well as the alloc bit metadata\n\n/// and active page metadata here. Note that if [addr, addr + size) crosses multiple chunks, we\n\n/// will map for each chunk.\n\npub fn map_meta_space(metadata: &SideMetadataContext, addr: Address, size: usize) {\n\n // In order to prevent race conditions, we synchronize on the lock first and then\n\n // check if we need to map the active chunk metadata for `chunk_start`\n\n let _lock = CHUNK_MAP_LOCK.lock().unwrap();\n\n\n\n let map_metadata_space_for_chunk = |start: Address| {\n\n debug_assert!(start.is_aligned_to(BYTES_IN_CHUNK));\n\n // Check if the chunk bit metadata is mapped. If it is not mapped, map it.\n\n // Note that the chunk bit metadata is global. It may have been mapped because other policy mapped it.\n\n if !is_chunk_mapped(start) {\n\n map_active_chunk_metadata(start);\n\n }\n\n\n\n // If we have set the chunk bit, return. This is needed just in case another thread has done this before\n\n // we can acquire the lock.\n\n if is_chunk_marked(start) {\n\n return;\n\n }\n\n\n\n // Attempt to map the local metadata for the policy.\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 53, "score": 169787.46732798277 }, { "content": "#[inline(always)]\n\npub fn align_allocation<VM: VMBinding>(\n\n region: Address,\n\n alignment: usize,\n\n offset: isize,\n\n known_alignment: usize,\n\n fillalignmentgap: bool,\n\n) -> Address {\n\n debug_assert!(known_alignment >= VM::MIN_ALIGNMENT);\n\n // Make sure MIN_ALIGNMENT is reasonable.\n\n #[allow(clippy::assertions_on_constants)]\n\n {\n\n debug_assert!(VM::MIN_ALIGNMENT >= BYTES_IN_INT);\n\n }\n\n debug_assert!(!(fillalignmentgap && region.is_zero()));\n\n debug_assert!(alignment <= VM::MAX_ALIGNMENT);\n\n debug_assert!(offset >= 0);\n\n debug_assert!(region.is_aligned_to(VM::ALLOC_END_ALIGNMENT));\n\n debug_assert!((alignment & (VM::MIN_ALIGNMENT - 1)) == 0);\n\n debug_assert!((offset & (VM::MIN_ALIGNMENT - 1) as isize) == 0);\n\n\n", "file_path": "src/util/alloc/allocator.rs", "rank": 54, "score": 166950.1700250518 }, { "content": "#[test]\n\npub fn issue139_alloc_non_multiple_of_min_alignment() {\n\n gc_init(200*1024*1024);\n\n let handle = bind_mutator(VMMutatorThread(VMThread::UNINITIALIZED));\n\n\n\n // Allocate 6 bytes with 8 bytes ailgnment required\n\n let addr = alloc(handle, 14, 8, 0, AllocationSemantics::Default);\n\n assert!(addr.is_aligned_to(8));\n\n // After the allocation, the cursor is not MIN_ALIGNMENT aligned. If we have the assertion in the next allocation to check if the cursor is aligned to MIN_ALIGNMENT, it fails.\n\n // We have to remove that assertion.\n\n let addr2 = alloc(handle, 14, 8, 0, AllocationSemantics::Default);\n\n assert!(addr2.is_aligned_to(8));\n\n}", "file_path": "vmbindings/dummyvm/src/tests/issue139.rs", "rank": 55, "score": 166469.99467424987 }, { "content": "/// Bulk-zero a specific metadata for a chunk.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `metadata_spec` - The specification of the target side metadata.\n\n///\n\n/// * `chunk_start` - The starting address of the chunk whose metadata is being zeroed.\n\n///\n\npub fn bzero_metadata(metadata_spec: &SideMetadataSpec, start: Address, size: usize) {\n\n #[cfg(feature = \"extreme_assertions\")]\n\n let _lock = sanity::SANITY_LOCK.lock().unwrap();\n\n\n\n // yiluowei: Not Sure but this assertion seems too strict for Immix recycled lines\n\n #[cfg(not(feature = \"global_alloc_bit\"))]\n\n debug_assert!(\n\n start.is_aligned_to(BYTES_IN_PAGE) && meta_byte_lshift(metadata_spec, start) == 0\n\n );\n\n\n\n #[cfg(feature = \"extreme_assertions\")]\n\n sanity::verify_bzero(metadata_spec, start, size);\n\n\n\n let meta_start = address_to_meta_address(metadata_spec, start);\n\n if cfg!(target_pointer_width = \"64\") || metadata_spec.is_global {\n\n memory::zero(\n\n meta_start,\n\n address_to_meta_address(metadata_spec, start + size) - meta_start,\n\n );\n\n }\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 56, "score": 165517.7082586378 }, { "content": "#[cfg(feature = \"extreme_assertions\")]\n\npub fn verify_bzero(metadata_spec: &SideMetadataSpec, start: Address, size: usize) {\n\n let sanity_map = &mut CONTENT_SANITY_MAP.write().unwrap();\n\n match sanity_map.get_mut(metadata_spec) {\n\n Some(spec_sanity_map) => {\n\n // zero entries where the key (data_addr) is in the range (start, start+size)\n\n for (k, v) in spec_sanity_map.iter_mut() {\n\n // If the source address is in the bzero's range\n\n if *k >= start && *k < start + size {\n\n *v = 0;\n\n }\n\n }\n\n }\n\n None => {\n\n panic!(\"Invalid Metadata Spec!\");\n\n }\n\n }\n\n}\n\n\n\n/// Ensures a side metadata load operation returns the correct side metadata content.\n\n/// Panics if:\n\n/// 1 - the metadata spec is not valid,\n\n/// 2 - data address is not valid,\n\n/// 3 - the loaded side metadata content is not equal to the correct content.\n\n///\n\n/// Arguments:\n\n/// * `metadata_spec`: the metadata spec to verify the loaded content for\n\n/// * `data_addr`: the address of the source data\n\n/// * `actual_val`: the actual content returned by the side metadata load operation\n\n///\n", "file_path": "src/util/metadata/side_metadata/sanity.rs", "rank": 57, "score": 165513.09948393182 }, { "content": "pub fn is_marked<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) -> bool {\n\n load_metadata::<VM>(\n\n &VM::VMObjectModel::LOCAL_MARK_BIT_SPEC,\n\n object,\n\n None,\n\n ordering,\n\n ) == 1\n\n}\n\n\n\n#[allow(unused)]\n\npub(super) fn is_page_marked(page_addr: Address) -> bool {\n\n side_metadata::load_atomic(&ACTIVE_PAGE_METADATA_SPEC, page_addr, Ordering::SeqCst) == 1\n\n}\n\n\n\n#[allow(unused)]\n\npub(super) unsafe fn is_page_marked_unsafe(page_addr: Address) -> bool {\n\n side_metadata::load(&ACTIVE_PAGE_METADATA_SPEC, page_addr) == 1\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 58, "score": 165342.2271973708 }, { "content": "pub fn is_marked<VM: VMBinding>(object: ObjectReference, ordering: Option<Ordering>) -> bool {\n\n load_metadata::<VM>(\n\n &VM::VMObjectModel::LOCAL_MARK_BIT_SPEC,\n\n object,\n\n None,\n\n ordering,\n\n ) == 1\n\n}\n\n\n\n#[allow(unused)]\n\npub(super) fn is_page_marked(page_addr: Address) -> bool {\n\n side_metadata::load_atomic(&ACTIVE_PAGE_METADATA_SPEC, page_addr, Ordering::SeqCst) == 1\n\n}\n\n\n\n#[allow(unused)]\n\npub(super) unsafe fn is_page_marked_unsafe(page_addr: Address) -> bool {\n\n side_metadata::load(&ACTIVE_PAGE_METADATA_SPEC, page_addr) == 1\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 59, "score": 165342.2271973708 }, { "content": "#[test]\n\nfn test_is_aligned() {\n\n let addr = unsafe { Address::zero() };\n\n assert!(addr.is_aligned_to(8));\n\n\n\n let addr = unsafe { Address::from_usize(8) };\n\n assert!(addr.is_aligned_to(8));\n\n}\n", "file_path": "tests/test_address.rs", "rank": 60, "score": 164338.01900018394 }, { "content": "#[test]\n\nfn test_align_up() {\n\n let addr = unsafe { Address::zero() };\n\n let aligned = addr.align_up(8);\n\n\n\n assert_eq!(addr, aligned);\n\n}\n\n\n", "file_path": "tests/test_address.rs", "rank": 61, "score": 164338.01900018394 }, { "content": "#[inline(always)]\n\npub fn align_allocation_no_fill<VM: VMBinding>(\n\n region: Address,\n\n alignment: usize,\n\n offset: isize,\n\n) -> Address {\n\n align_allocation::<VM>(region, alignment, offset, VM::MIN_ALIGNMENT, false)\n\n}\n\n\n", "file_path": "src/util/alloc/allocator.rs", "rank": 62, "score": 164076.34958355877 }, { "content": "#[cfg(feature = \"extreme_assertions\")]\n\npub fn verify_store(metadata_spec: &SideMetadataSpec, data_addr: Address, metadata: usize) {\n\n verify_metadata_address_bound(metadata_spec, data_addr);\n\n let sanity_map = &mut CONTENT_SANITY_MAP.write().unwrap();\n\n match sanity_map.get_mut(metadata_spec) {\n\n Some(spec_sanity_map) => {\n\n // Newly mapped memory including the side metadata memory is zeroed\n\n let content = spec_sanity_map.entry(data_addr).or_insert(0);\n\n *content = metadata;\n\n }\n\n None => panic!(\"Invalid Metadata Spec: {:#?}\", metadata_spec),\n\n }\n\n}\n\n\n\n/// A helper function encapsulating the common parts of addition and subtraction\n", "file_path": "src/util/metadata/side_metadata/sanity.rs", "rank": 63, "score": 163505.56905585638 }, { "content": "/// Whether we should check duplicate edges. This depends on the actual plan.\n\npub fn should_check_duplicate_edges<VM: VMBinding>(plan: &dyn Plan<VM = VM>) -> bool {\n\n // If a plan allows tracing duplicate edges, we should not run this check.\n\n !plan.constraints().may_trace_duplicate_edges\n\n}\n\n\n", "file_path": "src/util/edge_logger.rs", "rank": 64, "score": 163157.01189566203 }, { "content": "#[cfg(feature = \"extreme_assertions\")]\n\npub fn verify_load(metadata_spec: &SideMetadataSpec, data_addr: Address, actual_val: usize) {\n\n verify_metadata_address_bound(metadata_spec, data_addr);\n\n let sanity_map = &mut CONTENT_SANITY_MAP.read().unwrap();\n\n match sanity_map.get(metadata_spec) {\n\n Some(spec_sanity_map) => {\n\n // A content of None is Ok because we may load before store\n\n let expected_val = if let Some(expected_val) = spec_sanity_map.get(&data_addr) {\n\n *expected_val\n\n } else {\n\n 0usize\n\n };\n\n assert!(\n\n expected_val == actual_val,\n\n \"verify_load({:#?}, {}) -> Expected (0x{:x}) but found (0x{:x})\",\n\n metadata_spec,\n\n data_addr,\n\n expected_val,\n\n actual_val\n\n );\n\n }\n", "file_path": "src/util/metadata/side_metadata/sanity.rs", "rank": 65, "score": 161577.62131135818 }, { "content": "#[inline(always)]\n\npub fn get_maximum_aligned_size<VM: VMBinding>(\n\n size: usize,\n\n alignment: usize,\n\n known_alignment: usize,\n\n) -> usize {\n\n trace!(\n\n \"size={}, alignment={}, known_alignment={}, MIN_ALIGNMENT={}\",\n\n size,\n\n alignment,\n\n known_alignment,\n\n VM::MIN_ALIGNMENT\n\n );\n\n debug_assert!(size == size & !(known_alignment - 1));\n\n debug_assert!(known_alignment >= VM::MIN_ALIGNMENT);\n\n\n\n if VM::MAX_ALIGNMENT <= VM::MIN_ALIGNMENT || alignment <= known_alignment {\n\n size\n\n } else {\n\n size + alignment - known_alignment\n\n }\n\n}\n\n\n", "file_path": "src/util/alloc/allocator.rs", "rank": 66, "score": 161337.92245506297 }, { "content": "#[inline(always)]\n\npub fn load_atomic(metadata_spec: &SideMetadataSpec, data_addr: Address, order: Ordering) -> usize {\n\n #[cfg(feature = \"extreme_assertions\")]\n\n let _lock = sanity::SANITY_LOCK.lock().unwrap();\n\n\n\n let meta_addr = address_to_meta_address(metadata_spec, data_addr);\n\n if cfg!(debug_assertions) {\n\n ensure_metadata_is_mapped(metadata_spec, data_addr);\n\n }\n\n\n\n let bits_num_log = metadata_spec.log_num_of_bits;\n\n\n\n let res = if bits_num_log <= 3 {\n\n let lshift = meta_byte_lshift(metadata_spec, data_addr);\n\n let mask = meta_byte_mask(metadata_spec) << lshift;\n\n let byte_val = unsafe { meta_addr.atomic_load::<AtomicU8>(order) };\n\n\n\n ((byte_val & mask) as usize) >> lshift\n\n } else if bits_num_log == 4 {\n\n unsafe { meta_addr.atomic_load::<AtomicU16>(order) as usize }\n\n } else if bits_num_log == 5 {\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 67, "score": 158994.05662491766 }, { "content": "/// Process MMTk run-time options.\n\n///\n\n/// Arguments:\n\n/// * `mmtk`: A reference to an MMTk instance.\n\n/// * `name`: The name of the option.\n\n/// * `value`: The value of the option (as a string).\n\npub fn process<VM: VMBinding>(mmtk: &'static MMTK<VM>, name: &str, value: &str) -> bool {\n\n // Note that currently we cannot process options for setting plan,\n\n // as we have set plan when creating an MMTK instance, and processing options is after creating on an instance.\n\n // The only way to set plan is to use the env var 'MMTK_PLAN'.\n\n // FIXME: We should remove this function, and ask for options when creating an MMTk instance.\n\n assert!(name != \"plan\");\n\n\n\n unsafe { mmtk.options.process(name, value) }\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 68, "score": 156916.68239231396 }, { "content": "fn get_super_page(cell: Address) -> Address {\n\n cell.align_down(BYTES_IN_PAGE)\n\n}\n", "file_path": "src/policy/largeobjectspace.rs", "rank": 69, "score": 156499.73363402515 }, { "content": "fn offset_malloc_usable_size(address: Address) -> usize {\n\n let malloc_res_ptr: *mut usize = (address - BYTES_IN_ADDRESS).to_mut_ptr();\n\n let malloc_res = unsafe { *malloc_res_ptr } as *mut libc::c_void;\n\n unsafe { malloc_usable_size(malloc_res) }\n\n}\n\n\n", "file_path": "src/util/malloc.rs", "rank": 70, "score": 154654.00114100197 }, { "content": "/// Return used memory in bytes.\n\n///\n\n/// Arguments:\n\n/// * `mmtk`: A reference to an MMTk instance.\n\npub fn used_bytes<VM: VMBinding>(mmtk: &MMTK<VM>) -> usize {\n\n mmtk.plan.get_pages_used() << LOG_BYTES_IN_PAGE\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 71, "score": 151017.9828328553 }, { "content": "pub fn mmap_fixed(\n\n start: Address,\n\n size: usize,\n\n prot: libc::c_int,\n\n flags: libc::c_int,\n\n) -> Result<()> {\n\n let ptr = start.to_mut_ptr();\n\n wrap_libc_call(\n\n &|| unsafe { libc::mmap(start.to_mut_ptr(), size, prot, flags, -1, 0) },\n\n ptr,\n\n )\n\n}\n\n\n", "file_path": "src/util/memory.rs", "rank": 72, "score": 140611.2939730739 }, { "content": "/// Reset the edge logger by clearing the hash-set of edges.\n\n/// This function is called at the end of each GC iteration.\n\n///\n\npub fn reset() {\n\n let mut edge_log = EDGE_LOG.write().unwrap();\n\n edge_log.clear();\n\n}\n", "file_path": "src/util/edge_logger.rs", "rank": 73, "score": 140611.2939730739 }, { "content": "fn always_valid<T>(_: &T) -> bool {\n\n true\n\n}\n\n\n\nmacro_rules! options {\n\n ($($name:ident: $type:ty[$validator:expr] = $default:expr),*,) => [\n\n options!($($name: $type[$validator] = $default),*);\n\n ];\n\n ($($name:ident: $type:ty[$validator:expr] = $default:expr),*) => [\n\n pub struct Options {\n\n $(pub $name: $type),*\n\n }\n\n impl Options {\n\n pub fn set_from_str(&mut self, s: &str, val: &str)->bool {\n\n match s {\n\n // Parse the given value from str (by env vars or by calling process()) to the right type\n\n $(stringify!($name) => if let Ok(ref val) = val.parse::<$type>() {\n\n // Validate\n\n let validate_fn = $validator;\n\n let is_valid = validate_fn(val);\n", "file_path": "src/util/options.rs", "rank": 74, "score": 136758.79320318135 }, { "content": "pub fn scan_region() {\n\n loop {\n\n let mut buf = String::new();\n\n println!(\"start end <value>\");\n\n let bytes = std::io::stdin().read_line(&mut buf).unwrap();\n\n let mut iter = buf.split_whitespace();\n\n let start = iter.next();\n\n let end = iter.next();\n\n let value = iter.next();\n\n if start.is_none() || bytes == 0 {\n\n break;\n\n }\n\n let mut start = usize::from_str_radix(&start.unwrap()[2..], 16).unwrap();\n\n let end = usize::from_str_radix(&end.unwrap()[2..], 16).unwrap();\n\n\n\n while start < end {\n\n let slot = unsafe { Address::from_usize(start) };\n\n let object: ObjectReference = unsafe { slot.load() };\n\n if let Some(value) = value {\n\n let value = usize::from_str_radix(&value[2..], 16).unwrap();\n", "file_path": "src/util/sanity/memory_scan.rs", "rank": 75, "score": 136052.13851028442 }, { "content": "#[inline(always)]\n\npub fn load_metadata(\n\n metadata_spec: &HeaderMetadataSpec,\n\n object: ObjectReference,\n\n optional_mask: Option<usize>,\n\n atomic_ordering: Option<Ordering>,\n\n) -> usize {\n\n debug_assert!(optional_mask.is_none() || metadata_spec.num_of_bits >= 8,\"optional_mask is only supported for 8X-bits in-header metadata. Problematic MetadataSpec: ({:?})\", metadata_spec);\n\n\n\n // metadata smaller than 8-bits is special in that more than one metadata value may be included in one AtomicU8 operation, and extra shift and mask is required\n\n let res: usize = if metadata_spec.num_of_bits < BITS_IN_BYTE {\n\n debug_assert!(\n\n (metadata_spec.bit_offset >> LOG_BITS_IN_BYTE)\n\n == ((metadata_spec.bit_offset + metadata_spec.num_of_bits as isize - 1)\n\n >> LOG_BITS_IN_BYTE),\n\n \"Metadata << 8-bits: ({:?}) stretches over two bytes!\",\n\n metadata_spec\n\n );\n\n let byte_offset = metadata_spec.bit_offset >> LOG_BITS_IN_BYTE;\n\n let bit_shift = metadata_spec.bit_offset - (byte_offset << LOG_BITS_IN_BYTE);\n\n let mask = ((1u8 << metadata_spec.num_of_bits) - 1) << bit_shift;\n", "file_path": "src/util/metadata/header_metadata.rs", "rank": 76, "score": 136052.13851028442 }, { "content": "#[inline(always)]\n\npub fn store_metadata(\n\n metadata_spec: &HeaderMetadataSpec,\n\n object: ObjectReference,\n\n val: usize,\n\n optional_mask: Option<usize>,\n\n atomic_ordering: Option<Ordering>,\n\n) {\n\n debug_assert!(optional_mask.is_none() || metadata_spec.num_of_bits >= 8,\"optional_mask is only supported for 8X-bits in-header metadata. Problematic MetadataSpec: ({:?})\", metadata_spec);\n\n\n\n // metadata smaller than 8-bits is special in that more than one metadata value may be included in one AtomicU8 operation, and extra shift and mask, and compare_exchange is required\n\n if metadata_spec.num_of_bits < 8 {\n\n debug_assert!(\n\n (metadata_spec.bit_offset >> LOG_BITS_IN_BYTE)\n\n == ((metadata_spec.bit_offset + metadata_spec.num_of_bits as isize - 1)\n\n >> LOG_BITS_IN_BYTE),\n\n \"Metadata << 8-bits: ({:?}) stretches over two bytes!\",\n\n metadata_spec\n\n );\n\n let byte_offset = metadata_spec.bit_offset >> LOG_BITS_IN_BYTE;\n\n let bit_shift = metadata_spec.bit_offset - (byte_offset << LOG_BITS_IN_BYTE);\n", "file_path": "src/util/metadata/header_metadata.rs", "rank": 77, "score": 136052.13851028442 }, { "content": "/// Eagerly map the active chunk metadata surrounding `chunk_start`\n\nfn map_active_chunk_metadata(chunk_start: Address) {\n\n debug_assert!(chunk_start.is_aligned_to(BYTES_IN_CHUNK));\n\n // We eagerly map 16Gb worth of space for the chunk mark bytes on 64-bits\n\n // We require saturating subtractions in order to not overflow the chunk_start by\n\n // accident when subtracting if we have been allocated a very low base address by `malloc()`\n\n #[cfg(target_pointer_width = \"64\")]\n\n let start = chunk_start.saturating_sub(2048 * BYTES_IN_CHUNK);\n\n #[cfg(target_pointer_width = \"64\")]\n\n let size = 4096 * BYTES_IN_CHUNK;\n\n\n\n // We eagerly map 2Gb (i.e. half the address space) worth of space for the chunk mark bytes on 32-bits\n\n #[cfg(target_pointer_width = \"32\")]\n\n let start = chunk_start.saturating_sub(256 * BYTES_IN_CHUNK);\n\n #[cfg(target_pointer_width = \"32\")]\n\n let size = 512 * BYTES_IN_CHUNK;\n\n\n\n debug!(\n\n \"chunk_start = {} mapping space for {} -> {}\",\n\n chunk_start,\n\n start,\n\n chunk_start + (size / 2)\n\n );\n\n\n\n assert!(\n\n CHUNK_METADATA.try_map_metadata_space(start, size).is_ok(),\n\n \"failed to mmap meta memory\"\n\n );\n\n}\n\n\n", "file_path": "src/policy/mallocspace/metadata.rs", "rank": 78, "score": 135419.30467025674 }, { "content": "// Eagerly map the active chunk metadata surrounding `chunk_start`\n\nfn map_active_chunk_metadata(chunk_start: Address) {\n\n // We eagerly map 16Gb worth of space for the chunk mark bytes on 64-bits\n\n // We require saturating subtractions in order to not overflow the chunk_start by\n\n // accident when subtracting if we have been allocated a very low base address by `malloc()`\n\n #[cfg(target_pointer_width = \"64\")]\n\n let start = chunk_start.saturating_sub(2048 * BYTES_IN_CHUNK);\n\n #[cfg(target_pointer_width = \"64\")]\n\n let size = 4096 * BYTES_IN_CHUNK;\n\n\n\n // We eagerly map 2Gb (i.e. half the address space) worth of space for the chunk mark bytes on 32-bits\n\n #[cfg(target_pointer_width = \"32\")]\n\n let start = chunk_start.saturating_sub(256 * BYTES_IN_CHUNK);\n\n #[cfg(target_pointer_width = \"32\")]\n\n let size = 512 * BYTES_IN_CHUNK;\n\n\n\n debug!(\n\n \"chunk_start = {} mapping space for {} -> {}\",\n\n chunk_start,\n\n start,\n\n chunk_start + (size / 2)\n\n );\n\n\n\n if CHUNK_METADATA.try_map_metadata_space(start, size).is_err() {\n\n panic!(\"failed to mmap meta memory\");\n\n }\n\n}\n\n\n", "file_path": "src/policy/marksweepspace/metadata.rs", "rank": 79, "score": 135419.30467025674 }, { "content": "#[inline(always)]\n\npub fn fetch_sub_metadata(\n\n metadata_spec: &HeaderMetadataSpec,\n\n object: ObjectReference,\n\n val: usize,\n\n order: Ordering,\n\n) -> usize {\n\n // metadata smaller than 8-bits is special in that more than one metadata value may be included in one AtomicU8 operation, and extra shift and mask is required\n\n if metadata_spec.num_of_bits < 8 {\n\n debug_assert!(\n\n (metadata_spec.bit_offset >> LOG_BITS_IN_BYTE)\n\n == ((metadata_spec.bit_offset + metadata_spec.num_of_bits as isize - 1)\n\n >> LOG_BITS_IN_BYTE),\n\n \"Metadata << 8-bits: ({:?}) stretches over two bytes!\",\n\n metadata_spec\n\n );\n\n let byte_offset = metadata_spec.bit_offset >> LOG_BITS_IN_BYTE;\n\n let bit_shift = metadata_spec.bit_offset - (byte_offset << LOG_BITS_IN_BYTE);\n\n let mask = ((1u8 << metadata_spec.num_of_bits) - 1) << bit_shift;\n\n\n\n // let new_metadata = ((val as u8) << bit_shift);\n", "file_path": "src/util/metadata/header_metadata.rs", "rank": 80, "score": 133945.90637732667 }, { "content": "#[cfg(feature = \"extreme_assertions\")]\n\npub fn verify_add(\n\n metadata_spec: &SideMetadataSpec,\n\n data_addr: Address,\n\n val_to_add: usize,\n\n actual_old_val: usize,\n\n) {\n\n verify_metadata_address_bound(metadata_spec, data_addr);\n\n match do_math(metadata_spec, data_addr, val_to_add, MathOp::Add) {\n\n Ok(expected_old_val) => {\n\n assert!(\n\n actual_old_val == expected_old_val,\n\n \"Expected (0x{:x}) but found (0x{:x})\",\n\n expected_old_val,\n\n actual_old_val\n\n );\n\n }\n\n Err(e) => panic!(\"{}\", e),\n\n }\n\n}\n\n\n", "file_path": "src/util/metadata/side_metadata/sanity.rs", "rank": 81, "score": 133945.90637732667 }, { "content": "#[inline(always)]\n\npub fn compare_exchange_metadata(\n\n metadata_spec: &HeaderMetadataSpec,\n\n object: ObjectReference,\n\n old_metadata: usize,\n\n new_metadata: usize,\n\n optional_mask: Option<usize>,\n\n success_order: Ordering,\n\n failure_order: Ordering,\n\n) -> bool {\n\n // metadata smaller than 8-bits is special in that more than one metadata value may be included in one AtomicU8 operation, and extra shift and mask is required\n\n if metadata_spec.num_of_bits < 8 {\n\n debug_assert!(\n\n (metadata_spec.bit_offset >> LOG_BITS_IN_BYTE as isize)\n\n == ((metadata_spec.bit_offset + metadata_spec.num_of_bits as isize - 1)\n\n >> LOG_BITS_IN_BYTE),\n\n \"Metadata << 8-bits: ({:?}) stretches over two bytes!\",\n\n metadata_spec\n\n );\n\n let byte_offset = metadata_spec.bit_offset >> LOG_BITS_IN_BYTE;\n\n let bit_shift = metadata_spec.bit_offset - (byte_offset << LOG_BITS_IN_BYTE);\n", "file_path": "src/util/metadata/header_metadata.rs", "rank": 82, "score": 133945.90637732667 }, { "content": "#[inline(always)]\n\npub fn fetch_add_metadata(\n\n metadata_spec: &HeaderMetadataSpec,\n\n object: ObjectReference,\n\n val: usize,\n\n order: Ordering,\n\n) -> usize {\n\n // metadata smaller than 8-bits is special in that more than one metadata value may be included in one AtomicU8 operation, and extra shift and mask is required\n\n if metadata_spec.num_of_bits < 8 {\n\n debug_assert!(\n\n (metadata_spec.bit_offset >> LOG_BITS_IN_BYTE)\n\n == ((metadata_spec.bit_offset + metadata_spec.num_of_bits as isize - 1)\n\n >> LOG_BITS_IN_BYTE),\n\n \"Metadata << 8-bits: ({:?}) stretches over two bytes!\",\n\n metadata_spec\n\n );\n\n let byte_offset = metadata_spec.bit_offset >> LOG_BITS_IN_BYTE;\n\n let bit_shift = metadata_spec.bit_offset - (byte_offset << LOG_BITS_IN_BYTE);\n\n let mask = ((1u8 << metadata_spec.num_of_bits) - 1) << bit_shift;\n\n\n\n // let new_metadata = ((val as u8) << bit_shift);\n", "file_path": "src/util/metadata/header_metadata.rs", "rank": 83, "score": 133945.90637732667 }, { "content": "#[cfg(feature = \"extreme_assertions\")]\n\npub fn verify_sub(\n\n metadata_spec: &SideMetadataSpec,\n\n data_addr: Address,\n\n val_to_sub: usize,\n\n actual_old_val: usize,\n\n) {\n\n verify_metadata_address_bound(metadata_spec, data_addr);\n\n match do_math(metadata_spec, data_addr, val_to_sub, MathOp::Sub) {\n\n Ok(expected_old_val) => {\n\n assert!(\n\n actual_old_val == expected_old_val,\n\n \"Expected (0x{:x}) but found (0x{:x})\",\n\n expected_old_val,\n\n actual_old_val\n\n );\n\n }\n\n Err(e) => panic!(\"{}\", e),\n\n }\n\n}\n\n\n", "file_path": "src/util/metadata/side_metadata/sanity.rs", "rank": 84, "score": 133945.90637732667 }, { "content": "#[inline(always)]\n\npub fn store_atomic(\n\n metadata_spec: &SideMetadataSpec,\n\n data_addr: Address,\n\n metadata: usize,\n\n order: Ordering,\n\n) {\n\n #[cfg(feature = \"extreme_assertions\")]\n\n let _lock = sanity::SANITY_LOCK.lock().unwrap();\n\n trace!(\"addr = {}\", data_addr);\n\n let meta_addr = address_to_meta_address(metadata_spec, data_addr);\n\n trace!(\"addr = {}, meta_addr = {}\", data_addr, meta_addr);\n\n if cfg!(debug_assertions) {\n\n ensure_metadata_is_mapped(metadata_spec, data_addr);\n\n }\n\n\n\n let bits_num_log = metadata_spec.log_num_of_bits;\n\n\n\n if bits_num_log < 3 {\n\n let lshift = meta_byte_lshift(metadata_spec, data_addr);\n\n let mask = meta_byte_mask(metadata_spec) << lshift;\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 85, "score": 133945.90637732667 }, { "content": "/// This verifies two things:\n\n/// 1. Check if data_addr is within the address space that we are supposed to use (LOG_ADDRESS_SPACE). If this fails, we log a warning.\n\n/// 2. Check if metadata address is out of bounds. If this fails, we will panic.\n\nfn verify_metadata_address_bound(spec: &SideMetadataSpec, data_addr: Address) {\n\n #[cfg(target_pointer_width = \"32\")]\n\n assert_eq!(LOG_ADDRESS_SPACE, 32, \"We assume we use all address space in 32 bits. This seems not true any more, we need a proper check here.\");\n\n #[cfg(target_pointer_width = \"32\")]\n\n let data_addr_in_address_space = true;\n\n #[cfg(target_pointer_width = \"64\")]\n\n let data_addr_in_address_space =\n\n data_addr <= unsafe { Address::from_usize(1usize << LOG_ADDRESS_SPACE) };\n\n\n\n if !data_addr_in_address_space {\n\n warn!(\n\n \"We try get metadata {} for {}, which is not within the address space we should use\",\n\n data_addr, spec.name\n\n );\n\n }\n\n\n\n let metadata_addr =\n\n crate::util::metadata::side_metadata::address_to_meta_address(spec, data_addr);\n\n let metadata_addr_bound = if spec.is_absolute_offset() {\n\n spec.upper_bound_address_for_contiguous()\n", "file_path": "src/util/metadata/side_metadata/sanity.rs", "rank": 86, "score": 132825.60979217637 }, { "content": "#[inline(always)]\n\npub fn fetch_add_atomic(\n\n metadata_spec: &SideMetadataSpec,\n\n data_addr: Address,\n\n val: usize,\n\n order: Ordering,\n\n) -> usize {\n\n #[cfg(feature = \"extreme_assertions\")]\n\n let _lock = sanity::SANITY_LOCK.lock().unwrap();\n\n\n\n let meta_addr = address_to_meta_address(metadata_spec, data_addr);\n\n if cfg!(debug_assertions) {\n\n ensure_metadata_is_mapped(metadata_spec, data_addr);\n\n }\n\n\n\n let bits_num_log = metadata_spec.log_num_of_bits;\n\n\n\n #[allow(clippy::let_and_return)]\n\n let old_val = if bits_num_log < 3 {\n\n let lshift = meta_byte_lshift(metadata_spec, data_addr);\n\n let mask = meta_byte_mask(metadata_spec) << lshift;\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 87, "score": 131943.8105726484 }, { "content": "#[inline(always)]\n\npub fn fetch_sub_atomic(\n\n metadata_spec: &SideMetadataSpec,\n\n data_addr: Address,\n\n val: usize,\n\n order: Ordering,\n\n) -> usize {\n\n #[cfg(feature = \"extreme_assertions\")]\n\n let _lock = sanity::SANITY_LOCK.lock().unwrap();\n\n\n\n let meta_addr = address_to_meta_address(metadata_spec, data_addr);\n\n if cfg!(debug_assertions) {\n\n ensure_metadata_is_mapped(metadata_spec, data_addr);\n\n }\n\n\n\n let bits_num_log = metadata_spec.log_num_of_bits;\n\n\n\n #[allow(clippy::let_and_return)]\n\n let old_val = if bits_num_log < 3 {\n\n let lshift = meta_byte_lshift(metadata_spec, data_addr);\n\n let mask = meta_byte_mask(metadata_spec) << lshift;\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 88, "score": 131943.8105726484 }, { "content": "#[inline(always)]\n\npub fn compare_exchange_atomic(\n\n metadata_spec: &SideMetadataSpec,\n\n data_addr: Address,\n\n old_metadata: usize,\n\n new_metadata: usize,\n\n success_order: Ordering,\n\n failure_order: Ordering,\n\n) -> bool {\n\n #[cfg(feature = \"extreme_assertions\")]\n\n let _lock = sanity::SANITY_LOCK.lock().unwrap();\n\n\n\n debug!(\n\n \"compare_exchange_atomic({:?}, {}, {}, {})\",\n\n metadata_spec, data_addr, old_metadata, new_metadata\n\n );\n\n let meta_addr = address_to_meta_address(metadata_spec, data_addr);\n\n if cfg!(debug_assertions) {\n\n ensure_metadata_is_mapped(metadata_spec, data_addr);\n\n }\n\n\n", "file_path": "src/util/metadata/side_metadata/global.rs", "rank": 89, "score": 131943.8105726484 }, { "content": "#[cfg(debug_assertions)]\n\n#[cfg(target_os = \"linux\")]\n\npub fn get_process_memory_maps() -> String {\n\n // print map\n\n use std::fs::File;\n\n use std::io::Read;\n\n let mut data = String::new();\n\n let mut f = File::open(\"/proc/self/maps\").unwrap();\n\n f.read_to_string(&mut data).unwrap();\n\n data\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::util::constants::BYTES_IN_PAGE;\n\n use crate::util::test_util::MEMORY_TEST_REGION;\n\n use crate::util::test_util::{serial_test, with_cleanup};\n\n\n\n // In the tests, we will mmap this address. This address should not be in our heap (in case we mess up with other tests)\n\n const START: Address = MEMORY_TEST_REGION.start;\n\n\n", "file_path": "src/util/memory.rs", "rank": 90, "score": 131249.20442403315 }, { "content": "/// Allocate memory for an object. For performance reasons, a VM should\n\n/// implement the allocation fast-path on their side rather than just calling this function.\n\n///\n\n/// Arguments:\n\n/// * `mutator`: The mutator to perform this allocation request.\n\n/// * `size`: The number of bytes required for the object.\n\n/// * `align`: Required alignment for the object.\n\n/// * `offset`: Offset associated with the alignment.\n\n/// * `semantics`: The allocation semantic required for the allocation.\n\npub fn alloc<VM: VMBinding>(\n\n mutator: &mut Mutator<VM>,\n\n size: usize,\n\n align: usize,\n\n offset: isize,\n\n semantics: AllocationSemantics,\n\n) -> Address {\n\n // MMTk has assumptions about minimal object size.\n\n // We need to make sure that all allocations comply with the min object size.\n\n // Ideally, we check the allocation size, and if it is smaller, we transparently allocate the min\n\n // object size (the VM does not need to know this). However, for the VM bindings we support at the moment,\n\n // their object sizes are all larger than MMTk's min object size, so we simply put an assertion here.\n\n // If you plan to use MMTk with a VM with its object size smaller than MMTk's min object size, you should\n\n // meet the min object size in the fastpath.\n\n debug_assert!(size >= MIN_OBJECT_SIZE);\n\n mutator.alloc(size, align, offset, semantics)\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 91, "score": 130925.92423423941 }, { "content": "#[test]\n\n#[should_panic(expected = \"block_for_gc is not implemented\")]\n\npub fn allocate_with_initialize_collection() {\n\n const MB: usize = 1024 * 1024;\n\n // 1MB heap\n\n gc_init(MB);\n\n initialize_collection(VMThread::UNINITIALIZED);\n\n let handle = bind_mutator(VMMutatorThread(VMThread::UNINITIALIZED));\n\n // Attempt to allocate 2MB. This will trigger GC.\n\n let addr = alloc(handle, 2 * MB, 8, 0, AllocationSemantics::Default);\n\n assert!(!addr.is_zero());\n\n}\n", "file_path": "vmbindings/dummyvm/src/tests/allocate_with_initialize_collection.rs", "rank": 92, "score": 130038.31434970727 }, { "content": "#[test]\n\npub fn allocate_with_disable_collection() {\n\n const MB: usize = 1024 * 1024;\n\n // 1MB heap\n\n gc_init(MB);\n\n initialize_collection(VMThread::UNINITIALIZED);\n\n let handle = bind_mutator(VMMutatorThread(VMThread::UNINITIALIZED));\n\n // Allocate 1MB. It should be fine.\n\n let addr = alloc(handle, MB, 8, 0, AllocationSemantics::Default);\n\n assert!(!addr.is_zero());\n\n // Disable GC\n\n disable_collection();\n\n // Allocate another MB. This exceeds the heap size. But as we have disabled GC, MMTk will not trigger a GC, and allow this allocation.\n\n let addr = alloc(handle, MB, 8, 0, AllocationSemantics::Default);\n\n assert!(!addr.is_zero());\n\n}\n", "file_path": "vmbindings/dummyvm/src/tests/allcoate_with_disable_collection.rs", "rank": 93, "score": 130038.31434970727 }, { "content": "// Try to map side metadata for the chunk starting at `start`\n\npub fn try_mmap_metadata_chunk(\n\n start: Address,\n\n local_per_chunk: usize,\n\n no_reserve: bool,\n\n) -> Result<()> {\n\n debug_assert!(start.is_aligned_to(BYTES_IN_CHUNK));\n\n\n\n let policy_meta_start = address_to_meta_chunk_addr(start);\n\n let pages = crate::util::conversions::bytes_to_pages_up(local_per_chunk);\n\n if !no_reserve {\n\n // We have reserved the memory\n\n MMAPPER.ensure_mapped(policy_meta_start, pages)\n\n } else {\n\n MMAPPER.quarantine_address_range(policy_meta_start, pages)\n\n }\n\n}\n", "file_path": "src/util/metadata/side_metadata/helpers_32.rs", "rank": 94, "score": 130038.31434970727 }, { "content": "/// Run the main loop of a GC worker. This method does not return.\n\n///\n\n/// Arguments:\n\n/// * `tls`: The thread that will be used as the GC worker.\n\n/// * `worker`: A reference to the GC worker.\n\n/// * `mmtk`: A reference to an MMTk instance.\n\npub fn start_worker<VM: VMBinding>(\n\n tls: VMWorkerThread,\n\n worker: &mut GCWorker<VM>,\n\n mmtk: &'static MMTK<VM>,\n\n) {\n\n worker.init(tls);\n\n worker.set_local(mmtk.plan.create_worker_local(tls, mmtk));\n\n worker.run(mmtk);\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 95, "score": 128818.38110884855 }, { "content": "/// Perform post-allocation actions, usually initializing object metadata. For many allocators none are\n\n/// required. For performance reasons, a VM should implement the post alloc fast-path on their side\n\n/// rather than just calling this function.\n\n///\n\n/// Arguments:\n\n/// * `mutator`: The mutator to perform post-alloc actions.\n\n/// * `refer`: The newly allocated object.\n\n/// * `bytes`: The size of the space allocated for the object (in bytes).\n\n/// * `semantics`: The allocation semantics used for the allocation.\n\npub fn post_alloc<VM: VMBinding>(\n\n mutator: &mut Mutator<VM>,\n\n refer: ObjectReference,\n\n bytes: usize,\n\n semantics: AllocationSemantics,\n\n) {\n\n mutator.post_alloc(refer, bytes, semantics);\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 96, "score": 128817.73719980777 }, { "content": "/// Request MMTk to create a mutator for the given thread. For performance reasons, A VM should\n\n/// store the returned mutator in a thread local storage that can be accessed efficiently.\n\n///\n\n/// Arguments:\n\n/// * `mmtk`: A reference to an MMTk instance.\n\n/// * `tls`: The thread that will be associated with the mutator.\n\npub fn bind_mutator<VM: VMBinding>(\n\n mmtk: &'static MMTK<VM>,\n\n tls: VMMutatorThread,\n\n) -> Box<Mutator<VM>> {\n\n crate::plan::create_mutator(tls, mmtk)\n\n}\n\n\n", "file_path": "src/memory_manager.rs", "rank": 97, "score": 128813.83415813977 }, { "content": "\n\n /// aligns down the address to the given alignment\n\n #[inline(always)]\n\n pub const fn align_down(self, align: ByteSize) -> Address {\n\n use crate::util::conversions;\n\n Address(conversions::raw_align_down(self.0, align))\n\n }\n\n\n\n /// is this address aligned to the given alignment\n\n pub fn is_aligned_to(self, align: usize) -> bool {\n\n use crate::util::conversions;\n\n conversions::raw_is_aligned(self.0, align)\n\n }\n\n\n\n /// converts the Address into an ObjectReference\n\n /// # Safety\n\n /// We would expect ObjectReferences point to valid objects,\n\n /// but an arbitrary Address may not reside an object. This conversion is unsafe,\n\n /// and it is the user's responsibility to ensure the safety.\n\n #[inline(always)]\n", "file_path": "src/util/address.rs", "rank": 98, "score": 25.840980962945277 }, { "content": " new: T::Type,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<T::Type, T::Type> {\n\n let loc = &*(self.0 as *const T);\n\n loc.compare_exchange(old, new, success, failure)\n\n }\n\n\n\n /// is this address zero?\n\n #[inline(always)]\n\n pub fn is_zero(self) -> bool {\n\n self.0 == 0\n\n }\n\n\n\n /// aligns up the address to the given alignment\n\n #[inline(always)]\n\n pub const fn align_up(self, align: ByteSize) -> Address {\n\n use crate::util::conversions;\n\n Address(conversions::raw_align_up(self.0, align))\n\n }\n", "file_path": "src/util/address.rs", "rank": 99, "score": 24.82544458276081 } ]
Rust
src/threaded.rs
jneem/dfa-runner
b3e926f79274e0254ecc61c86ec7d8b9874948b9
use Engine; use prefix::{Prefix, PrefixSearcher}; use program::{Program, Instructions}; use std::mem; use std::cell::RefCell; use std::ops::DerefMut; #[derive(Clone, Debug, PartialEq)] struct Thread { state: usize, start_idx: usize, } #[derive(Clone, Debug, PartialEq)] struct Threads { threads: Vec<Thread>, states: Vec<u8>, } impl Threads { fn with_capacity(n: usize) -> Threads { Threads { threads: Vec::with_capacity(n), states: vec![0; n], } } fn add(&mut self, state: usize, start_idx: usize) { if self.states[state] == 0 { self.states[state] = 1; self.threads.push(Thread { state: state, start_idx: start_idx }); } } fn starts_after(&self, start_idx: usize) -> bool { self.threads.is_empty() || self.threads[0].start_idx >= start_idx } } #[derive(Clone, Debug, PartialEq)] struct ProgThreads { cur: Threads, next: Threads, } impl ProgThreads { fn with_capacity(n: usize) -> ProgThreads { ProgThreads { cur: Threads::with_capacity(n), next: Threads::with_capacity(n), } } fn swap(&mut self) { mem::swap(&mut self.cur, &mut self.next); self.next.threads.clear(); } fn clear(&mut self) { self.cur.threads.clear(); self.next.threads.clear(); for s in &mut self.cur.states { *s = 0; } for s in &mut self.next.states { *s = 0; } } } #[derive(Clone, Debug)] pub struct ThreadedEngine<Insts: Instructions> { prog: Program<Insts>, threads: RefCell<ProgThreads>, prefix: Prefix, } impl<Insts: Instructions> ThreadedEngine<Insts> { pub fn new(prog: Program<Insts>, pref: Prefix) -> ThreadedEngine<Insts> { let len = prog.num_states(); ThreadedEngine { prog: prog, threads: RefCell::new(ProgThreads::with_capacity(len)), prefix: pref, } } fn advance_thread(&self, threads: &mut ProgThreads, acc: &mut Option<(usize, usize)>, i: usize, input: &[u8], pos: usize) { let state = threads.cur.threads[i].state; let start_idx = threads.cur.threads[i].start_idx; threads.cur.states[state] = 0; let (next_state, accept) = self.prog.step(state, &input[pos..]); if let Some(bytes_ago) = accept { let acc_idx = start_idx.saturating_sub(bytes_ago as usize); if acc.is_none() || acc_idx < acc.unwrap().0 { *acc = Some((acc_idx, pos)); } } if let Some(next_state) = next_state { threads.next.add(next_state, start_idx); } } fn shortest_match_from_searcher<'a>(&'a self, s: &[u8], skip: &mut PrefixSearcher) -> Option<(usize, usize)> { let mut acc: Option<(usize, usize)> = None; let mut pos = match skip.search() { Some(x) => x.start_pos, None => return None, }; let mut threads_guard = self.threads.borrow_mut(); let threads = threads_guard.deref_mut(); threads.clear(); threads.cur.threads.push(Thread { state: 0, start_idx: pos }); while pos < s.len() { for i in 0..threads.cur.threads.len() { self.advance_thread(threads, &mut acc, i, s, pos); } threads.swap(); if acc.is_some() && threads.cur.starts_after(acc.unwrap().0) { return acc; } pos += 1; if threads.cur.threads.is_empty() { skip.skip_to(pos); if let Some(search_result) = skip.search() { pos = search_result.start_pos; threads.cur.add(0, pos); } else { return None } } else { threads.cur.add(0, pos); } } for th in &threads.cur.threads { if let Some(bytes_ago) = self.prog.check_eoi(th.state) { return Some((th.start_idx, s.len().saturating_sub(bytes_ago))); } } None } } impl<I: Instructions + 'static> Engine for ThreadedEngine<I> { fn shortest_match(&self, s: &str) -> Option<(usize, usize)> { if self.prog.num_states() == 0 { return None; } let s = s.as_bytes(); let mut searcher = self.prefix.make_searcher(s); self.shortest_match_from_searcher(s, &mut *searcher) } fn clone_box(&self) -> Box<Engine> { Box::new(self.clone()) } }
use Engine; use prefix::{Prefix, PrefixSearcher}; use program::{Program, Instructions}; use std::mem; use std::cell::RefCell; use std::ops::DerefMut; #[derive(Clone, Debug, PartialEq)] struct Thread { state: usize, start_idx: usize, } #[derive(Clone, Debug, PartialEq)] struct Threads { threads: Vec<Thread>, states: Vec<u8>, } impl Threads { fn with_capacity(n: usize) -> Threads { Threads { threads: Vec::with_capacity(n), states: vec![0; n], } } fn add(&mut self, state: usize, start_idx: usize) { if self.states[state] == 0 { self.states[state] = 1; self.threads.push(Thread { state: state, start_idx: start_idx }); } } fn starts_after(&self, start_idx: usize) -> bool { self.threads.is_empty() || self.threads[0].start_idx >= start_idx } } #[derive(Clone, Debug, PartialEq)] struct ProgThreads { cur: Threads, next: Threads, } impl ProgThreads { fn with_capacity(n: usize) -> ProgThreads { ProgThreads { cur: Threads::with_capacity(n), next: Threads::with_capacity(n), } } fn swap(&mut self) { mem::swap(&mut self.cur, &mut self.next); self.next.threads.clear(); } fn clear(&mut self) { self.cur.threads.clear(); self.next.threads.clear(); for s in &mut self.cur.states { *s = 0; } for s in &mut self.next.states { *s = 0; } } } #[derive(Clone, Debug)] pub struct ThreadedEngine<Insts: Instructions> { prog: Program<Insts>, threads: RefCell<ProgThreads>, prefix: Prefix, } impl<Insts: Instructions> ThreadedEngine<Insts> { pub fn new(prog: Program<Insts>, pref: Prefix) -> ThreadedEngine<Insts> { let len = prog.num_states(); ThreadedEngine { prog: prog, threads: RefCell::new(ProgThreads::with_capacity(len)), prefix: pref, } } fn advance_thread(&self, threads: &mut ProgThreads, acc: &mut Option<(usize, usize)>, i: usize, input: &[u8], pos: usize) { let state = thre
_bytes(); let mut searcher = self.prefix.make_searcher(s); self.shortest_match_from_searcher(s, &mut *searcher) } fn clone_box(&self) -> Box<Engine> { Box::new(self.clone()) } }
ads.cur.threads[i].state; let start_idx = threads.cur.threads[i].start_idx; threads.cur.states[state] = 0; let (next_state, accept) = self.prog.step(state, &input[pos..]); if let Some(bytes_ago) = accept { let acc_idx = start_idx.saturating_sub(bytes_ago as usize); if acc.is_none() || acc_idx < acc.unwrap().0 { *acc = Some((acc_idx, pos)); } } if let Some(next_state) = next_state { threads.next.add(next_state, start_idx); } } fn shortest_match_from_searcher<'a>(&'a self, s: &[u8], skip: &mut PrefixSearcher) -> Option<(usize, usize)> { let mut acc: Option<(usize, usize)> = None; let mut pos = match skip.search() { Some(x) => x.start_pos, None => return None, }; let mut threads_guard = self.threads.borrow_mut(); let threads = threads_guard.deref_mut(); threads.clear(); threads.cur.threads.push(Thread { state: 0, start_idx: pos }); while pos < s.len() { for i in 0..threads.cur.threads.len() { self.advance_thread(threads, &mut acc, i, s, pos); } threads.swap(); if acc.is_some() && threads.cur.starts_after(acc.unwrap().0) { return acc; } pos += 1; if threads.cur.threads.is_empty() { skip.skip_to(pos); if let Some(search_result) = skip.search() { pos = search_result.start_pos; threads.cur.add(0, pos); } else { return None } } else { threads.cur.add(0, pos); } } for th in &threads.cur.threads { if let Some(bytes_ago) = self.prog.check_eoi(th.state) { return Some((th.start_idx, s.len().saturating_sub(bytes_ago))); } } None } } impl<I: Instructions + 'static> Engine for ThreadedEngine<I> { fn shortest_match(&self, s: &str) -> Option<(usize, usize)> { if self.prog.num_states() == 0 { return None; } let s = s.as
random
[ { "content": "fn loop_searcher<'i, 'lo>(loop_while: &'lo [bool], input: &'i [u8])\n\n-> SimpleSearcher<'i, LoopWhile<'lo>> {\n\n SimpleSearcher {\n\n skip_fn: LoopWhile(loop_while),\n\n input: input,\n\n pos: 0,\n\n }\n\n}\n\n\n\nimpl<'a, Sk: SkipFn> PrefixSearcher for SimpleSearcher<'a, Sk> {\n\n fn search(&mut self) -> Option<PrefixResult> {\n\n if self.pos > self.input.len() {\n\n None\n\n } else if let Some((start_off, end_off)) = self.skip_fn.skip(&self.input[self.pos..]) {\n\n let start = self.pos + start_off;\n\n let end = self.pos + end_off;\n\n self.pos += end_off + 1;\n\n\n\n Some(PrefixResult {\n\n start_pos: start,\n", "file_path": "src/prefix.rs", "rank": 0, "score": 89697.29223506487 }, { "content": "fn lit_searcher<'i, 'lit>(lit: &'lit [u8], input: &'i [u8])\n\n-> SimpleSearcher<'i, TwoWaySearcher<'lit>> {\n\n SimpleSearcher {\n\n skip_fn: TwoWaySearcher::new(lit),\n\n input: input,\n\n pos: 0,\n\n }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 1, "score": 80546.63442638828 }, { "content": "pub trait Engine: Debug {\n\n fn shortest_match(&self, s: &str) -> Option<(usize, usize)>;\n\n fn clone_box(&self) -> Box<Engine>;\n\n}\n\n\n\npub mod backtracking;\n\npub mod prefix;\n\npub mod program;\n\npub mod threaded;\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 72670.58556162674 }, { "content": "struct LoopWhile<'a>(&'a [bool]);\n\nimpl<'a> SkipFn for LoopWhile<'a> {\n\n fn skip(&self, input: &[u8]) -> Option<(usize, usize)> {\n\n Some((0, input.iter().position(|c| !self.0[*c as usize]).unwrap_or(input.len())))\n\n }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 5, "score": 70478.31904033154 }, { "content": "pub trait Instructions: Clone + Debug {\n\n /// Returns (next_state, accept), where\n\n /// - next_state is the next state to try\n\n /// - accept gives some data associated with the acceptance.\n\n fn step(&self, state: usize, input: &[u8]) -> (Option<usize>, Option<usize>);\n\n\n\n /// The number of states in this program.\n\n fn num_states(&self) -> usize;\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Program<Insts: Instructions> {\n\n pub accept_at_eoi: Vec<usize>,\n\n pub instructions: Insts,\n\n pub is_anchored: bool,\n\n}\n\n\n\nimpl<Insts: Instructions> Instructions for Program<Insts> {\n\n fn step(&self, state: usize, input: &[u8]) -> (Option<usize>, Option<usize>) {\n\n self.instructions.step(state, input)\n", "file_path": "src/program.rs", "rank": 6, "score": 67452.82609180236 }, { "content": "struct SimpleSearcher<'a, Skip: SkipFn> {\n\n skip_fn: Skip,\n\n input: &'a [u8],\n\n pos: usize,\n\n}\n\n\n\nimpl<'a, Sk: SkipFn> SimpleSearcher<'a, Sk> {\n\n fn new(skip_fn: Sk, input: &'a [u8]) -> SimpleSearcher<'a, Sk> {\n\n SimpleSearcher {\n\n skip_fn: skip_fn,\n\n input: input,\n\n pos: 0,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 8, "score": 51106.60654987109 }, { "content": "/// Encapsulates the `Prefix` and the input string, and allows iteration over all matches.\n\npub trait PrefixSearcher {\n\n /// Moves the \"cursor\" to the given position in the input.\n\n fn skip_to(&mut self, pos: usize);\n\n /// From the current position in the input, finds the next substring matching the `Prefix`\n\n /// and advances the \"cursor\" past that point.\n\n fn search(&mut self) -> Option<PrefixResult>;\n\n}\n\n\n\nimpl Prefix {\n\n /// Converts a set of `(string, state)` pairs into a `Prefix` that matches any of the strings.\n\n ///\n\n /// The `state` part of each `(string, state)` pair is the DFA state that we would be in after\n\n /// matching the `string`.\n\n pub fn from_strings<P: AsRef<[u8]>, I: Iterator<Item=(P, usize)>>(it: I) -> Prefix {\n\n let strings: Vec<(Vec<u8>, usize)> = it\n\n .filter(|x| !x.0.as_ref().is_empty())\n\n .map(|(s, x)| (s.as_ref().to_vec(), x))\n\n .collect();\n\n\n\n if strings.is_empty() {\n", "file_path": "src/prefix.rs", "rank": 9, "score": 49148.4339296891 }, { "content": "struct AcSearcher<'ac, 'i, 'st> {\n\n ac: &'ac FullAcAutomaton<Vec<u8>>,\n\n state_map: &'st [usize],\n\n input: &'i [u8],\n\n pos: usize,\n\n iter: MatchesOverlapping<'ac, 'i, Vec<u8>, FullAcAutomaton<Vec<u8>>>,\n\n}\n\n\n\nimpl<'ac, 'i, 'st> AcSearcher<'ac, 'i, 'st> {\n\n fn new(ac: &'ac FullAcAutomaton<Vec<u8>>, state_map: &'st [usize], input: &'i [u8])\n\n -> AcSearcher<'ac, 'i, 'st> {\n\n AcSearcher {\n\n ac: ac,\n\n state_map: state_map,\n\n input: input,\n\n pos: 0,\n\n iter: ac.find_overlapping(input),\n\n }\n\n }\n\n}\n", "file_path": "src/prefix.rs", "rank": 10, "score": 38230.114580176945 }, { "content": "trait SkipFn {\n\n fn skip(&self, input: &[u8]) -> Option<(usize, usize)>;\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 11, "score": 32846.42045096507 }, { "content": "trait SimpleSkipFn {\n\n fn simple_skip(&self, input: &[u8]) -> Option<usize>;\n\n}\n\n\n\nimpl<Sk: SimpleSkipFn> SkipFn for Sk {\n\n fn skip(&self, input: &[u8]) -> Option<(usize, usize)> {\n\n self.simple_skip(input).map(|x| (x, x))\n\n }\n\n}\n\n\n\nimpl SimpleSkipFn for () {\n\n fn simple_skip(&self, _: &[u8]) -> Option<usize> { Some(0) }\n\n}\n\n\n\nimpl SimpleSkipFn for u8 {\n\n fn simple_skip(&self, input: &[u8]) -> Option<usize> { memchr(*self, input) }\n\n}\n\n\n\nimpl<'a> SimpleSkipFn for TwoWaySearcher<'a> {\n\n fn simple_skip(&self, input: &[u8]) -> Option<usize> { self.search_in(input) }\n\n}\n\n\n\nimpl<'a> SimpleSkipFn for &'a [bool] {\n\n fn simple_skip(&self, input: &[u8]) -> Option<usize> {\n\n input.iter().position(|c| self[*c as usize])\n\n }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 12, "score": 30769.381245505418 }, { "content": "pub trait RegexSearcher {\n\n fn shortest_match(&self, haystack: &str) -> Option<(usize, usize)>;\n\n}\n\n\n\n// TODO: get rid of this in favor of a bool saying whether we're anchored. From now on,\n\n// start state is always zero.\n\n#[derive(Clone, Debug)]\n\npub enum InitStates {\n\n Anchored(usize),\n\n Constant(usize),\n\n}\n\n\n\nimpl InitStates {\n\n /// Returns the starting state if we are at the given pos in the input.\n\n pub fn state_at_pos(&self, _: &[u8], pos: usize) -> Option<usize> {\n\n use program::InitStates::*;\n\n\n\n match self {\n\n &Anchored(s) => if pos == 0 { Some(s) } else { None },\n\n &Constant(s) => Some(s),\n", "file_path": "src/program.rs", "rank": 13, "score": 28930.385289855738 }, { "content": "\n\nimpl<'ac, 'i, 'st> PrefixSearcher for AcSearcher<'ac, 'i, 'st> {\n\n fn skip_to(&mut self, pos: usize) {\n\n self.pos = pos;\n\n let input: &'i [u8] = if pos > self.input.len() {\n\n &[]\n\n } else {\n\n &self.input[self.pos..]\n\n };\n\n self.iter = self.ac.find_overlapping(input);\n\n }\n\n\n\n fn search(&mut self) -> Option<PrefixResult> {\n\n self.iter.next().map(|mat| PrefixResult {\n\n start_pos: mat.start,\n\n end_pos: mat.end,\n\n end_state: self.state_map[mat.pati],\n\n })\n\n }\n\n}\n", "file_path": "src/prefix.rs", "rank": 21, "score": 17834.562120808117 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use ::prefix::*;\n\n\n\n impl<'a> Iterator for Box<PrefixSearcher + 'a> {\n\n type Item = PrefixResult;\n\n fn next(&mut self) -> Option<PrefixResult> {\n\n self.search()\n\n }\n\n }\n\n\n\n fn search(pref: Prefix, input: &str) -> Vec<PrefixResult> {\n\n pref.make_searcher(input.as_bytes()).collect::<Vec<_>>()\n\n }\n\n\n\n fn result(pos: usize) -> PrefixResult {\n\n PrefixResult {\n\n start_pos: pos,\n\n end_pos: pos,\n", "file_path": "src/prefix.rs", "rank": 22, "score": 17833.464787370598 }, { "content": " // match from starting at the beginning of the sequence.\n\n LoopWhile(Vec<bool>),\n\n}\n\n\n\n/// The result of scanning through the input for a `Prefix`.\n\n///\n\n/// The semi-open interval `[start_pos, end_pos)` is the part of the interval that was consumed by\n\n/// the `Prefix`. The state `end_state` is the DFA state at which we should start to continue\n\n/// matching; that is, the DFA should begin at position `end_pos` in state `end_state`.\n\n///\n\n/// Note that some `Prefix`es return empty intervals (`start_pos == end_pos`). This doesn't mean\n\n/// necessarily that the `Prefix` didn't match any input, only that it's simpler (and fast) just\n\n/// to have the DFA re-match from the beginning.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct PrefixResult {\n\n pub start_pos: usize,\n\n pub end_pos: usize,\n\n pub end_state: usize,\n\n}\n\n\n\n/// Encapsulates the `Prefix` and the input string, and allows iteration over all matches.\n", "file_path": "src/prefix.rs", "rank": 23, "score": 17832.22689276366 }, { "content": " Prefix::Empty\n\n } else if strings.len() == 1 {\n\n if strings[0].0.len() == 1 {\n\n Prefix::Byte(strings[0].0[0])\n\n } else {\n\n Prefix::Lit(strings.into_iter().next().unwrap().0)\n\n }\n\n } else if strings.iter().map(|x| x.0.len()).min() == Some(1) {\n\n let mut bs = vec![false; 256];\n\n for (s, _) in strings.into_iter() {\n\n bs[s[0] as usize] = true;\n\n }\n\n Prefix::ByteSet(bs)\n\n } else {\n\n let state_map: Vec<_> = strings.iter().map(|x| x.1).collect();\n\n let ac = FullAcAutomaton::new(AcAutomaton::new(strings.into_iter().map(|x| x.0)));\n\n Prefix::Ac(ac, state_map)\n\n }\n\n }\n\n\n", "file_path": "src/prefix.rs", "rank": 24, "score": 17829.91751200995 }, { "content": " PrefixResult { start_pos: 4, end_pos: 6, end_state: 1 },\n\n PrefixResult { start_pos: 5, end_pos: 7, end_state: 1 },\n\n ]);\n\n assert_eq!(search(ac_pref(vec![\"baa\", \"aa\"]), \"\"), vec![]);\n\n }\n\n\n\n #[test]\n\n fn test_prefix_choice() {\n\n use ::prefix::Prefix::*;\n\n\n\n fn pref(strs: Vec<&str>) -> Prefix {\n\n let len = strs.len();\n\n Prefix::from_strings(strs.into_iter().zip(0..len))\n\n }\n\n\n\n assert!(matches!(pref(vec![]), Empty));\n\n assert!(matches!(pref(vec![\"\"]), Empty));\n\n assert!(matches!(pref(vec![\"a\"]), Byte(_)));\n\n assert!(matches!(pref(vec![\"\", \"a\", \"\"]), Byte(_)));\n\n assert!(matches!(pref(vec![\"abc\"]), Lit(_)));\n\n assert!(matches!(pref(vec![\"abc\", \"\"]), Lit(_)));\n\n assert!(matches!(pref(vec![\"a\", \"b\", \"c\"]), ByteSet(_)));\n\n assert!(matches!(pref(vec![\"a\", \"b\", \"\", \"c\"]), ByteSet(_)));\n\n assert!(matches!(pref(vec![\"a\", \"baa\", \"\", \"c\"]), ByteSet(_)));\n\n assert!(matches!(pref(vec![\"ab\", \"baa\", \"\", \"cb\"]), Ac(_, _)));\n\n }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 25, "score": 17829.302654539224 }, { "content": " assert_eq!(search(bs_pref(\"aeiou\"), \"quick brown\"), results(vec![1, 2, 8]));\n\n assert_eq!(search(bs_pref(\"aeiou\"), \"aabaa\"), results(vec![0, 1, 3, 4]));\n\n assert_eq!(search(bs_pref(\"aeiou\"), \"\"), vec![]);\n\n }\n\n\n\n fn pair_results(posns: Vec<(usize, usize)>) -> Vec<PrefixResult> {\n\n posns.into_iter()\n\n .map(|(s, e)| PrefixResult { start_pos: s, end_pos: e, end_state: 0 })\n\n .collect()\n\n }\n\n\n\n #[test]\n\n fn test_loop_search() {\n\n fn loop_pref(s: &str) -> Prefix {\n\n let mut bytes = vec![false; 256];\n\n for &b in s.as_bytes().iter() {\n\n bytes[b as usize] = true;\n\n }\n\n Prefix::LoopWhile(bytes)\n\n }\n", "file_path": "src/prefix.rs", "rank": 26, "score": 17828.773266834403 }, { "content": " assert_eq!(search(loop_pref(\"aeiou\"), \"quick\"),\n\n pair_results(vec![(0, 0), (1, 3), (4, 4), (5, 5)]));\n\n assert_eq!(search(loop_pref(\"aeiou\"), \"aabaa\"),\n\n pair_results(vec![(0, 2), (3, 5)]));\n\n assert_eq!(search(loop_pref(\"aeiou\"), \"\"), pair_results(vec![(0, 0)]));\n\n }\n\n\n\n #[test]\n\n fn test_ac_search() {\n\n fn ac_pref(strs: Vec<&str>) -> Prefix {\n\n let len = strs.len();\n\n let pref = Prefix::from_strings(strs.into_iter().zip(0..len));\n\n assert!(matches!(pref, Prefix::Ac(_, _)));\n\n pref\n\n }\n\n\n\n assert_eq!(search(ac_pref(vec![\"baa\", \"aa\"]), \"baa aaa black sheep\"),\n\n vec![\n\n PrefixResult { start_pos: 0, end_pos: 3, end_state: 0 },\n\n PrefixResult { start_pos: 1, end_pos: 3, end_state: 1 },\n", "file_path": "src/prefix.rs", "rank": 27, "score": 17828.586565718626 }, { "content": " /// Takes an input string and prepares for quickly finding matches in it.\n\n pub fn make_searcher<'a>(&'a self, input: &'a [u8]) -> Box<PrefixSearcher + 'a> {\n\n use prefix::Prefix::*;\n\n\n\n match self {\n\n &Empty => Box::new(SimpleSearcher::new((), input)),\n\n &ByteSet(ref bs) => Box::new(SimpleSearcher::new(&bs[..], input)),\n\n &Byte(b) => Box::new(SimpleSearcher::new(b, input)),\n\n &Lit(ref l) => Box::new(lit_searcher(l, input)),\n\n &LoopWhile(ref bs) => Box::new(loop_searcher(&bs[..], input)),\n\n &Ac(ref ac, ref map) => Box::new(AcSearcher::new(ac, map, input)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 28, "score": 17828.184760078053 }, { "content": " Empty,\n\n // Matches a single byte in a particular set.\n\n ByteSet(Vec<bool>),\n\n // Matches one specific byte.\n\n Byte(u8),\n\n // Matches a specific sequence of bytes.\n\n Lit(Vec<u8>),\n\n // Matches one of several sequences of bytes. The sequences are contained in the\n\n // `FullAcAutomaton`. The `Vec<usize>` tells us which state the DFA should start in after\n\n // matching each sequence. That is, `vec[i] == s` if after finding sequence `i` we should\n\n // start in state `s`.\n\n Ac(FullAcAutomaton<Vec<u8>>, Vec<usize>),\n\n // Matches a maximal (but possibly non-empty) sequence of bytes each of which belong to a\n\n // particular set of bytes.\n\n //\n\n // The interesting thing about this prefix is that it will only look for non-overlapping\n\n // matches (whereas most prefixes look for overlapping matches). This is intended for use when\n\n // the first state of a DFA contains self-transitions. In that case, if there's a sequence of\n\n // bytes that keeps us in the first state then there's no point in trying to start in the\n\n // middle of that sequence of bytes: even if that would give a match, we would get an earlier\n", "file_path": "src/prefix.rs", "rank": 29, "score": 17828.10889243504 }, { "content": "// Copyright 2015 Joe Neeman.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse aho_corasick::{Automaton, AcAutomaton, FullAcAutomaton, MatchesOverlapping};\n\nuse memchr::memchr;\n\nuse memmem::{Searcher, TwoWaySearcher};\n\n\n\n/// A `Prefix` is the first part of a DFA. Anything matching the DFA should start with\n\n/// something matching the `Prefix`.\n\n///\n\n/// The purpose of a `Prefix` is that scanning through the input looking for the `Prefix` should be\n\n/// much faster than running the DFA naively.\n\n#[derive(Clone, Debug)]\n\npub enum Prefix {\n\n // Matches every position.\n", "file_path": "src/prefix.rs", "rank": 30, "score": 17827.449335417343 }, { "content": "\n\n #[test]\n\n fn test_str_search() {\n\n fn lit_pref(s: &str) -> Prefix {\n\n Prefix::Lit(s.as_bytes().to_vec())\n\n }\n\n assert_eq!(search(lit_pref(\"aa\"), \"baa baa black sheep aa\"), results(vec![1, 5, 20]));\n\n assert_eq!(search(lit_pref(\"aa\"), \"aaa baaa black sheep\"), results(vec![0, 1, 5, 6]));\n\n assert_eq!(search(lit_pref(\"aa\"), \"\"), vec![]);\n\n }\n\n\n\n #[test]\n\n fn test_byteset_search() {\n\n fn bs_pref(s: &str) -> Prefix {\n\n let mut bytes = vec![false; 256];\n\n for &b in s.as_bytes().iter() {\n\n bytes[b as usize] = true;\n\n }\n\n Prefix::ByteSet(bytes)\n\n }\n", "file_path": "src/prefix.rs", "rank": 31, "score": 17827.29043932839 }, { "content": " end_pos: end,\n\n end_state: 0,\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn skip_to(&mut self, pos: usize) { self.pos = pos; }\n\n}\n\n\n", "file_path": "src/prefix.rs", "rank": 32, "score": 17826.574584804428 }, { "content": " end_state: 0,\n\n }\n\n }\n\n\n\n fn results(posns: Vec<usize>) -> Vec<PrefixResult> {\n\n posns.into_iter().map(result).collect()\n\n }\n\n\n\n #[test]\n\n fn test_empty_search() {\n\n assert_eq!(search(Prefix::Empty, \"blah\"), results(vec![0, 1, 2, 3, 4]));\n\n assert_eq!(search(Prefix::Empty, \"\"), results(vec![0]));\n\n }\n\n\n\n #[test]\n\n fn test_byte_search() {\n\n assert_eq!(search(Prefix::Byte(b'a'), \"abracadabra\"), results(vec![0, 3, 5, 7, 10]));\n\n assert_eq!(search(Prefix::Byte(b'a'), \"abracadabr\"), results(vec![0, 3, 5, 7]));\n\n assert_eq!(search(Prefix::Byte(b'a'), \"\"), vec![]);\n\n }\n", "file_path": "src/prefix.rs", "rank": 33, "score": 17825.097239317212 }, { "content": " pub fn new(prog: Program<Insts>, pref: Prefix) -> BacktrackingEngine<Insts> {\n\n BacktrackingEngine {\n\n prog: prog,\n\n prefix: pref,\n\n }\n\n }\n\n\n\n fn shortest_match_from<'a>(&self, input: &[u8], pos: usize, mut state: usize)\n\n -> Option<usize> {\n\n for pos in pos..input.len() {\n\n let (next_state, accepted) = self.prog.step(state, &input[pos..]);\n\n if let Some(bytes_ago) = accepted {\n\n // We need to use saturating_sub here because Nfa::determinize_for_shortest_match\n\n // makes it so that bytes_ago can be positive even when start_idx == 0.\n\n return Some(pos.saturating_sub(bytes_ago));\n\n } else if let Some(next_state) = next_state {\n\n state = next_state;\n\n } else {\n\n return None;\n\n }\n", "file_path": "src/backtracking.rs", "rank": 34, "score": 19.946558005569067 }, { "content": "pub struct VmInsts {\n\n pub byte_sets: Vec<bool>,\n\n pub branch_table: Vec<u32>,\n\n pub insts: Vec<Inst>,\n\n}\n\n\n\nimpl Instructions for VmInsts {\n\n #[inline(always)]\n\n fn step(&self, state: usize, input: &[u8]) -> (Option<usize>, Option<usize>) {\n\n use program::Inst::*;\n\n match self.insts[state] {\n\n Acc(a) => {\n\n return (Some(state + 1), Some(a));\n\n },\n\n Byte(b) => {\n\n if b == input[0] {\n\n return (Some(state + 1), None);\n\n }\n\n },\n\n ByteSet(bs_idx) => {\n", "file_path": "src/program.rs", "rank": 35, "score": 15.97588250324366 }, { "content": "\n\nimpl<I: Instructions + 'static> Engine for BacktrackingEngine<I> {\n\n fn shortest_match(&self, s: &str) -> Option<(usize, usize)> {\n\n let input = s.as_bytes();\n\n if self.prog.num_states() == 0 {\n\n return None;\n\n } else if self.prog.is_anchored {\n\n return self.shortest_match_from(input, 0, 0).map(|x| (0, x));\n\n }\n\n\n\n let mut searcher = self.prefix.make_searcher(input);\n\n self.shortest_match_from_searcher(input, &mut *searcher)\n\n }\n\n\n\n fn clone_box(&self) -> Box<Engine> {\n\n Box::new(self.clone())\n\n }\n\n}\n", "file_path": "src/backtracking.rs", "rank": 36, "score": 15.328847881569281 }, { "content": " if val != usize::MAX {\n\n try!(f.write_fmt(format_args!(\"{} -> {}, \", idx, val)));\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n\n\nimpl Instructions for TableInsts {\n\n #[inline(always)]\n\n fn step(&self, state: usize, input: &[u8]) -> (Option<usize>, Option<usize>) {\n\n let accept = self.accept[state];\n\n let next_state = self.table[state * 256 + input[0] as usize];\n\n\n\n let accept = if accept != usize::MAX { Some(accept) } else { None };\n\n let next_state = if next_state != u32::MAX { Some(next_state as usize) } else { None };\n\n\n\n (next_state, accept)\n\n }\n\n\n\n fn num_states(&self) -> usize {\n\n self.accept.len()\n\n }\n\n}\n\n\n", "file_path": "src/program.rs", "rank": 37, "score": 13.40518360508337 }, { "content": "// Copyright 2015 Joe Neeman.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse aho_corasick::Automaton;\n\nuse Engine;\n\nuse prefix::{Prefix, PrefixSearcher};\n\nuse program::{Instructions, Program};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct BacktrackingEngine<Insts: Instructions> {\n\n prog: Program<Insts>,\n\n prefix: Prefix,\n\n}\n\n\n\nimpl<Insts: Instructions> BacktrackingEngine<Insts> {\n", "file_path": "src/backtracking.rs", "rank": 38, "score": 12.658456699783402 }, { "content": " }\n\n\n\n if let Some(bytes_ago) = self.prog.check_eoi(state) {\n\n Some(input.len().saturating_sub(bytes_ago))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn shortest_match_from_searcher(&self, input: &[u8], search: &mut PrefixSearcher)\n\n -> Option<(usize, usize)> {\n\n while let Some(res) = search.search() {\n\n if let Some(end) = self.shortest_match_from(input, res.end_pos, res.end_state) {\n\n return Some((res.start_pos, end));\n\n }\n\n }\n\n\n\n None\n\n }\n\n}\n", "file_path": "src/backtracking.rs", "rank": 39, "score": 12.415380356917687 }, { "content": " }\n\n }\n\n\n\n /// If we can start only at the beginning of the input, return the start state.\n\n pub fn anchored(&self) -> Option<usize> {\n\n match self {\n\n &InitStates::Anchored(s) => Some(s),\n\n _ => None,\n\n }\n\n }\n\n\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Inst {\n\n Byte(u8),\n\n ByteSet(usize),\n\n Acc(usize),\n\n Branch(usize),\n\n}\n\n\n", "file_path": "src/program.rs", "rank": 40, "score": 12.403013259745428 }, { "content": "impl Debug for VmInsts {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n\n try!(f.write_fmt(format_args!(\"VmInsts ({} instructions):\\n\", self.insts.len())));\n\n\n\n for (idx, inst) in self.insts.iter().enumerate() {\n\n try!(f.write_fmt(format_args!(\"\\tInst {}: {:?}\\n\", idx, inst)));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub type TableStateIdx = u32;\n\n\n\n/// A DFA program implemented as a lookup table.\n\n#[derive(Clone)]\n\npub struct TableInsts {\n\n /// A `256 x num_instructions`-long table.\n\n pub table: Vec<TableStateIdx>,\n\n /// If `accept[st]` is not `usize::MAX`, then it gives some data to return if we match the\n\n /// input when we're in state `st`.\n", "file_path": "src/program.rs", "rank": 41, "score": 11.35359865190117 }, { "content": " pub accept: Vec<usize>,\n\n}\n\n\n\nimpl Debug for TableInsts {\n\n fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {\n\n try!(f.write_fmt(format_args!(\"TableInsts ({} instructions):\\n\", self.accept.len())));\n\n\n\n for idx in 0..self.accept.len() {\n\n try!(f.write_fmt(format_args!(\"State {}:\\n\", idx)));\n\n try!(f.debug_map()\n\n .entries((0usize..255)\n\n .map(|c| (c, self.table[idx * 256 + c]))\n\n .filter(|x| x.1 != u32::MAX))\n\n .finish());\n\n try!(f.write_str(\"\\n\"));\n\n }\n\n\n\n try!(f.write_str(\"Accept: \"));\n\n for idx in 0..self.accept.len() {\n\n let val = self.accept[idx];\n", "file_path": "src/program.rs", "rank": 42, "score": 11.206878363682632 }, { "content": " }\n\n\n\n fn num_states(&self) -> usize {\n\n self.instructions.num_states()\n\n }\n\n}\n\n\n\nimpl<Insts: Instructions> Program<Insts> {\n\n /// If the program should accept at the end of input in state `state`, returns the data\n\n /// associated with the match.\n\n pub fn check_eoi(&self, state: usize) -> Option<usize> {\n\n if self.accept_at_eoi[state] != usize::MAX {\n\n Some(self.accept_at_eoi[state])\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, PartialEq)]\n", "file_path": "src/program.rs", "rank": 43, "score": 10.658885820748601 }, { "content": " if self.byte_sets[bs_idx + input[0] as usize] {\n\n return (Some(state + 1), None);\n\n }\n\n },\n\n Branch(table_idx) => {\n\n let next_state = self.branch_table[table_idx + input[0] as usize];\n\n if next_state != u32::MAX {\n\n return (Some(next_state as usize), None);\n\n }\n\n },\n\n }\n\n (None, None)\n\n }\n\n\n\n fn num_states(&self) -> usize {\n\n self.insts.len()\n\n }\n\n}\n\n\n\n\n", "file_path": "src/program.rs", "rank": 44, "score": 10.240649887585448 }, { "content": "// Copyright 2015 Joe Neeman.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\nuse std::fmt::{Debug, Formatter, Error as FmtError};\n\nuse std::{u32, usize};\n\n\n", "file_path": "src/program.rs", "rank": 45, "score": 4.3392096474203585 }, { "content": "extern crate aho_corasick;\n\nextern crate memchr;\n\nextern crate memmem;\n\n\n\n#[cfg(test)]\n\n#[macro_use] extern crate matches;\n\n\n\nuse std::fmt::Debug;\n\n\n", "file_path": "src/lib.rs", "rank": 46, "score": 4.079740463558281 }, { "content": "dfa-runner\n\n==========\n\n\n\nThis is a crate for executing deterministic finite automata. So far it's a slight clean-up of things extracted from [regex-dfa](https://github.com/jneem/regex-dfa). Eventually, that code will be removed from `regex-dfa`, which will use the implementation in this crate instead.\n", "file_path": "README.md", "rank": 47, "score": 1.5512117460667592 } ]
Rust
src/graph/stage/source/refreshable.rs
dmrolfs/proctor
9b2fac5e80e4a8874906a85302af7b34b4433f46
use std::fmt::{self, Debug}; use std::future::Future; use async_trait::async_trait; use cast_trait_object::dyn_upcast; use tokio::sync::mpsc; use crate::graph::shape::SourceShape; use crate::graph::{stage, Outlet, Port, Stage, PORT_DATA}; use crate::{AppData, ProctorResult, SharedString}; pub struct RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { name: SharedString, action: A, rx_control: mpsc::Receiver<Ctrl>, outlet: Outlet<Out>, } impl<Ctrl, Out, A, F> RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { pub fn new<S: Into<SharedString>>(name: S, action: A, rx_control: mpsc::Receiver<Ctrl>) -> Self { let name = name.into(); let outlet = Outlet::new(name.clone(), PORT_DATA); Self { name, action, rx_control, outlet } } } impl<Ctrl, Out, A, F> SourceShape for RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { type Out = Out; #[inline] fn outlet(&self) -> Outlet<Self::Out> { self.outlet.clone() } } #[dyn_upcast] #[async_trait] impl<Ctrl, Out, A, F> Stage for RefreshableSource<Ctrl, Out, A, F> where Ctrl: AppData + Copy + Into<i32>, Out: AppData, A: Fn(Option<Ctrl>) -> F + Send + Sync + 'static, F: Future<Output = Option<Out>> + Send + 'static, { #[inline] fn name(&self) -> SharedString { self.name.clone() } #[tracing::instrument(level = "info", skip(self))] async fn check(&self) -> ProctorResult<()> { self.outlet.check_attachment().await?; Ok(()) } #[tracing::instrument(level = "info", name = "run refreshable source", skip(self))] async fn run(&mut self) -> ProctorResult<()> { let mut done = false; let op = &self.action; let operation = op(None); tokio::pin!(operation); let outlet = &self.outlet; let rx = &mut self.rx_control; loop { let _timer = stage::start_stage_eval_time(self.name.as_ref()); tokio::select! { result = &mut operation, if !done => { let op_span = tracing::info_span!("evaluate operation", ?result); let _op_span_guard = op_span.enter(); done = true; if let Some(r) = result { tracing::info!("Completed with result = {:?}", r); let _ = outlet.send(r).await; break; } } Some(control_signal) = rx.recv() => { let ctrl_span = tracing::info_span!("control check", ?control_signal); let _ctrl_span_guard = ctrl_span.enter(); if control_signal.into() % 2 == 0 { tracing::info!("setting operation with control signal.."); operation.set(op(Some(control_signal))); done = false; } } } } Ok(()) } async fn close(mut self: Box<Self>) -> ProctorResult<()> { tracing::info!("closing refreshable source outlet."); self.outlet.close().await; Ok(()) } } impl<Ctrl, Out, A, F> Debug for RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RefreshableSource") .field("name", &self.name) .field("outlet", &self.outlet) .finish() } }
use std::fmt::{self, Debug}; use std::future::Future; use async_trait::async_trait; use cast_trait_object::dyn_upcast; use tokio::sync::mpsc; use crate::graph::shape::SourceShape; use crate::graph::{stage, Outlet, Port, Stage, PORT_DATA}; use crate::{AppData, ProctorResult, SharedString}; pub struct RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { name: SharedString, action: A, rx_control: mpsc::Receiver<Ctrl>, outlet: Outlet<Out>, } impl<Ctrl, Out, A, F> RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { pub fn new<S: Into<SharedString>>(name: S, action: A, rx_control: mpsc::Receiver<Ctrl>) -> Self { let name = name.into(); let outlet = Outlet::new(name.clone(), PORT_DATA); Self { name, action, rx_control, outlet } } } impl<Ctrl, Out, A, F> SourceShape for RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { type Out = Out; #[inline] fn outlet(&self) -> Outlet<Self::Out> { self.outlet.clone() } } #[dyn_upcast] #[async_trait] impl<Ctrl, Out, A, F> Stage for RefreshableSource<Ctrl, Out, A, F> where Ctrl: AppData + Copy + Into<i32>, Out: AppData, A: Fn(Option<Ctrl>) -> F + Send + Sync + 'static, F: Future<Output = Option<Out>> + Send + 'static, { #[inline] fn name(&self) -> SharedString { self.name.clone() } #[tracing::instrument(level = "info", skip(self))] async fn check(&self) -> ProctorResult<()> { self.outlet.check_attachment().await?; Ok(()) } #[tracing::instrument(level = "info", name = "run refreshable source", skip(self))]
async fn close(mut self: Box<Self>) -> ProctorResult<()> { tracing::info!("closing refreshable source outlet."); self.outlet.close().await; Ok(()) } } impl<Ctrl, Out, A, F> Debug for RefreshableSource<Ctrl, Out, A, F> where A: Fn(Option<Ctrl>) -> F, F: Future<Output = Option<Out>>, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RefreshableSource") .field("name", &self.name) .field("outlet", &self.outlet) .finish() } }
async fn run(&mut self) -> ProctorResult<()> { let mut done = false; let op = &self.action; let operation = op(None); tokio::pin!(operation); let outlet = &self.outlet; let rx = &mut self.rx_control; loop { let _timer = stage::start_stage_eval_time(self.name.as_ref()); tokio::select! { result = &mut operation, if !done => { let op_span = tracing::info_span!("evaluate operation", ?result); let _op_span_guard = op_span.enter(); done = true; if let Some(r) = result { tracing::info!("Completed with result = {:?}", r); let _ = outlet.send(r).await; break; } } Some(control_signal) = rx.recv() => { let ctrl_span = tracing::info_span!("control check", ?control_signal); let _ctrl_span_guard = ctrl_span.enter(); if control_signal.into() % 2 == 0 { tracing::info!("setting operation with control signal.."); operation.set(op(Some(control_signal))); done = false; } } } } Ok(()) }
function_block-full_function
[ { "content": "#[dyn_upcast]\n\n#[async_trait]\n\npub trait Stage: fmt::Debug + Send + Sync {\n\n fn name(&self) -> SharedString;\n\n async fn check(&self) -> ProctorResult<()>;\n\n async fn run(&mut self) -> ProctorResult<()>;\n\n async fn close(self: Box<Self>) -> ProctorResult<()>;\n\n}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 0, "score": 288695.6698596866 }, { "content": "pub trait AppData: Debug + Clone + Send + Sync + 'static {}\n\n\n\n/// AppData is automatically derived for types compatible with graph stage processing. If needed,\n\n/// the AppData trait may also be included in the #[derive] specification.\n\nimpl<T: Debug + Clone + Send + Sync + 'static> AppData for T {}\n", "file_path": "src/app_data.rs", "rank": 1, "score": 281060.747723805 }, { "content": "#[async_trait]\n\npub trait Planning: Debug + Send + Sync {\n\n type Observation: AppData + Clone;\n\n type Decision: AppData + Clone;\n\n type Out: AppData + Clone;\n\n\n\n fn set_outlet(&mut self, outlet: Outlet<Self::Out>);\n\n fn add_observation(&mut self, observation: Self::Observation);\n\n async fn handle_decision(&mut self, decision: Self::Decision) -> Result<Option<Self::Out>, PlanError>;\n\n async fn close(mut self) -> Result<(), PlanError>;\n\n}\n\n\n\npub struct Plan<P: Planning> {\n\n name: SharedString,\n\n planning: P,\n\n inlet: Inlet<P::Observation>,\n\n decision_inlet: Inlet<P::Decision>,\n\n outlet: Outlet<P::Out>,\n\n pub tx_monitor: broadcast::Sender<Arc<Event<P>>>,\n\n}\n\n\n", "file_path": "src/phases/plan.rs", "rank": 2, "score": 232829.827538976 }, { "content": "pub trait QueryPolicy: Debug + Send + Sync {\n\n type Item: ToPolar + Clone;\n\n type Context: ToPolar + Clone;\n\n type Args: ToPolarList;\n\n type TemplateData: Debug + Serialize + DeserializeOwned;\n\n\n\n fn zero_context(&self) -> Option<Self::Context> {\n\n None\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", skip(engine))]\n\n fn load_policy_engine(&mut self, engine: &mut oso::Oso) -> Result<(), PolicyError> {\n\n engine.clear_rules()?;\n\n let source_paths = self.render_policy_sources()?;\n\n engine.load_files(source_paths)?;\n\n Ok(())\n\n }\n\n\n\n #[tracing::instrument(level = \"info\")]\n\n fn render_policy_sources(&self) -> Result<Vec<PolicySourcePath>, PolicyError> {\n", "file_path": "src/elements/policy_filter/policy.rs", "rank": 3, "score": 220692.665917982 }, { "content": "pub trait SourceStage<Out>: Stage + SourceShape<Out = Out> + 'static {}\n\nimpl<Out, T: 'static + Stage + SourceShape<Out = Out>> SourceStage<Out> for T {}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 4, "score": 218630.79758683342 }, { "content": "/// Register a subscriber as global default to process span data.\n\n///\n\n/// It should be only called once!\n\npub fn init_subscriber(subscriber: impl Subscriber + Sync + Send) {\n\n LogTracer::init().expect(\"Failed to set logger\");\n\n set_global_default(subscriber).expect(\"Failed to set subscriber\");\n\n}\n", "file_path": "src/tracing.rs", "rank": 5, "score": 212606.09201291605 }, { "content": "/// Compose multiple layers into a `tracing`'s subscriber.\n\n///\n\n/// # Implementation Notes\n\n///\n\n/// We are using `impl Subscriber` as return type to avoid having to spell out the actual type of\n\n/// the returned subscriber, which is indeed quite complex.\n\n/// We need to explicitly call out that returned subscriber is `Send` and `Sync` to make it\n\n/// possible to pass it to `init_subscriber` later on.\n\npub fn get_subscriber<S0, S1, W>(name: S0, env_filter: S1, sink: W) -> impl Subscriber + Sync + Send\n\nwhere\n\n S0: Into<String>,\n\n S1: AsRef<str>,\n\n W: for<'a> MakeWriter<'a> + Send + Sync + 'static,\n\n{\n\n let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));\n\n\n\n let (flame_subscriber, _guard) = FlameLayer::with_file(\"./tracing.folded\").unwrap();\n\n\n\n let formatting_layer = BunyanFormattingLayer::new(name.into(), sink);\n\n\n\n Registry::default()\n\n .with(env_filter)\n\n .with(flame_subscriber)\n\n .with(JsonStorageLayer)\n\n .with(formatting_layer)\n\n}\n\n\n", "file_path": "src/tracing.rs", "rank": 6, "score": 200930.25181927084 }, { "content": "#[inline]\n\nfn track_ingress(stage: &str, port_name: &str) {\n\n STAGE_INGRESS_COUNTS.with_label_values(&[stage, port_name]).inc()\n\n}\n\n\n", "file_path": "src/graph/port.rs", "rank": 7, "score": 193489.87537900207 }, { "content": "#[inline]\n\nfn track_egress(stage: &str, port_name: &str) {\n\n STAGE_EGRESS_COUNTS.with_label_values(&[stage, port_name]).inc()\n\n}\n\n\n\npub const PORT_DATA: &str = \"data\";\n\npub const PORT_CONTEXT: &str = \"context\";\n\n\n", "file_path": "src/graph/port.rs", "rank": 8, "score": 193489.87537900207 }, { "content": "pub trait ContinueTicking: Send {\n\n fn next(&mut self) -> bool;\n\n}\n\n\n\n#[derive(fmt::Debug, Clone, Copy, PartialEq)]\n\npub enum Constraint {\n\n None,\n\n ByCount {\n\n count: usize,\n\n limit: usize,\n\n },\n\n ByTime {\n\n stop: Option<tokio::time::Instant>,\n\n limit: Duration,\n\n },\n\n}\n\n\n\nimpl ContinueTicking for Constraint {\n\n fn next(&mut self) -> bool {\n\n match self {\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 9, "score": 192380.54668854704 }, { "content": "#[inline]\n\npub fn start_stage_eval_time(stage: &str) -> HistogramTimer {\n\n STAGE_EVAL_TIME.with_label_values(&[stage]).start_timer()\n\n}\n\n\n\n/// Behavior driving graph stage lifecycle.\n\n///\n\n/// macro dyn_upcast enables the upcast conversion of concrete stages into the base Stage type when\n\n/// placed in a graph. See https://github.com/Lej77/cast_trait_object README for background.\n", "file_path": "src/graph/stage.rs", "rank": 10, "score": 176878.49867679906 }, { "content": "fn type_name_of_val<T>(_val: &T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\nimpl<T, C, A, P, D> stage::WithApi for PolicyFilter<T, C, A, P, D>\n\nwhere\n\n P: QueryPolicy<Item = T, Context = C, Args = A, TemplateData = D>,\n\n{\n\n type Sender = PolicyFilterApi<C, D>;\n\n\n\n #[inline]\n\n fn tx_api(&self) -> Self::Sender {\n\n self.tx_api.clone()\n\n }\n\n}\n\n\n\nimpl<T, C, A, P, D> stage::WithMonitor for PolicyFilter<T, C, A, P, D>\n\nwhere\n\n P: QueryPolicy<Item = T, Context = C, Args = A, TemplateData = D>,\n\n{\n", "file_path": "src/elements/policy_filter.rs", "rank": 11, "score": 173118.89166164148 }, { "content": "#[tracing::instrument(level = \"info\", skip(name))]\n\npub fn make_telemetry_cvs_source<T, S>(name: S, setting: &SourceSetting) -> Result<TelemetrySource, CollectionError>\n\nwhere\n\n T: Serialize + DeserializeOwned + Debug,\n\n S: Into<String>,\n\n{\n\n if let SourceSetting::Csv { path } = setting {\n\n let name = name.into();\n\n let mut telemetry_name = format!(\"telemetry_{}\", name.as_str());\n\n\n\n if let Some(file_name) = path.file_name() {\n\n match file_name.to_str() {\n\n None => (),\n\n Some(file_name) => telemetry_name.push_str(format!(\"_{}\", file_name).as_str()),\n\n }\n\n }\n\n\n\n let csv_span = tracing::info_span!(\"sourcing CSV\", %telemetry_name, ?path);\n\n let _csv_span_guard = csv_span.enter();\n\n\n\n let mut records: Vec<Telemetry> = vec![];\n", "file_path": "src/phases/collection/source.rs", "rank": 12, "score": 172783.3341800697 }, { "content": "pub trait SinkStage<In>: Stage + SinkShape<In = In> + 'static {}\n\nimpl<In, T: 'static + Stage + SinkShape<In = In>> SinkStage<In> for T {}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 13, "score": 172099.86057883984 }, { "content": "pub trait ThroughStage<In, Out>: Stage + ThroughShape<In = In, Out = Out> + 'static {}\n\nimpl<In, Out, T: 'static + Stage + ThroughShape<In = In, Out = Out>> ThroughStage<In, Out> for T {}\n\n\n\npub static STAGE_EVAL_TIME: Lazy<HistogramVec> = Lazy::new(|| {\n\n HistogramVec::new(\n\n HistogramOpts::new(\n\n \"stage_eval_time\",\n\n \"Time spent in a stage's event evaluation cycle in seconds\",\n\n ),\n\n &[\"stage\"],\n\n )\n\n .expect(\"failed creating stage_eval_time metric\")\n\n});\n\n\n", "file_path": "src/graph/stage.rs", "rank": 14, "score": 169983.71254816494 }, { "content": "pub trait FromTelemetryStage<Out>: Stage + ThroughShape<In = Telemetry, Out = Out> + 'static {}\n\n\n\nimpl<Out, T> FromTelemetryStage<Out> for T where T: Stage + ThroughShape<In = Telemetry, Out = Out> + 'static {}\n\n\n\n#[tracing::instrument(level = \"info\", skip(name))]\n\npub async fn make_from_telemetry<Out>(name: impl Into<String>, log_conversion_failure: bool) -> FromTelemetryShape<Out>\n\nwhere\n\n Out: AppData + DeserializeOwned,\n\n{\n\n let name: SharedString = SharedString::Owned(name.into());\n\n let stage_name = name.clone();\n\n let from_telemetry =\n\n stage::FilterMap::<_, Telemetry, Out>::new(format!(\"{}_from_telemetry\", name), move |telemetry| {\n\n let span = tracing::info_span!(\"converting telemetry into data item\", ?telemetry);\n\n let _ = span.enter();\n\n\n\n match telemetry.try_into() {\n\n Ok(converted) => {\n\n tracing::trace!(?converted, \"data item derived from telemetry.\");\n\n Some(converted)\n", "file_path": "src/elements/from_telemetry.rs", "rank": 15, "score": 161233.04167512682 }, { "content": "#[inline]\n\npub fn track_errors(stage: &str, error: &ProctorError) {\n\n GRAPH_ERRORS.with_label_values(&[stage, error.label().as_ref()]).inc()\n\n}\n\n\n\n/// A Graph represents a runnable stream processing graph.\n\n///\n\n/// A Graph has one or `Source` nodes, zero or more `Through` nodes and one or more `Sink` nodes.\n\n/// Each node is connected to each other via their respective `Inlet` and `Outlet` `Ports`.\n\n///\n\n/// In order to use a `Graph`, its nodes must be registered and connected. Then the `Graph` may be\n\n/// ran, which will spawn asynchronous tasks for each node (via `tokio::spawn`). Once run, the\n\n/// underlying graph nodes will executed until they source(s) complete or the graph is aborted.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use proctor::graph::stage::{self, Stage};\n\n/// use proctor::graph::{self, Connect, Graph};\n\n/// use proctor::graph::{SinkShape, SourceShape, ThroughShape};\n\n///\n", "file_path": "src/graph.rs", "rank": 16, "score": 161223.0359910722 }, { "content": "#[async_trait]\n\npub trait Port {\n\n fn stage(&self) -> &str;\n\n fn name(&self) -> &str;\n\n fn full_name(&self) -> String {\n\n format!(\"{}::{}\", self.stage(), self.name())\n\n }\n\n\n\n /// Closes this half of a channel without dropping it.\n\n /// This prevents any further messages from being sent on the port while still enabling the\n\n /// receiver to drain messages that are buffered. Any outstanding Permit values will still be\n\n /// able to send messages.\n\n /// To guarantee that no messages are dropped, after calling close(), recv() must be called\n\n /// until None is returned. If there are outstanding Permit values, the recv method will not\n\n /// return None until those are released\n\n async fn close(&mut self);\n\n}\n\n\n", "file_path": "src/graph/port.rs", "rank": 17, "score": 144881.49240481632 }, { "content": "pub fn serialize_to_str<T, S>(that: T, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n T: AsRef<str>,\n\n S: Serializer,\n\n{\n\n serializer.serialize_str(that.as_ref())\n\n}\n\n\n", "file_path": "src/serde/mod.rs", "rank": 18, "score": 129063.618689792 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n // todo changing DateTime<Utc> serde to match From<TelemetryValue>\n\n // let datetime_table = format!(\"{}\", date.format(FORMAT));\n\n let datetime_table = table_from_datetime(date);\n\n let mut map = serializer.serialize_map(Some(datetime_table.len()))?;\n\n for (k, v) in datetime_table {\n\n map.serialize_entry(&k, &v)?;\n\n }\n\n map.end()\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 19, "score": 126952.02535875878 }, { "content": "pub fn serialize_duration_secs<S: Serializer>(that: &Duration, serializer: S) -> Result<S::Ok, S::Error> {\n\n serializer.serialize_u64(that.as_secs())\n\n}\n\n\n", "file_path": "src/serde/mod.rs", "rank": 20, "score": 126947.11713825444 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_format<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let datetime_rep = format!(\"{}\", date.format(FORMAT));\n\n serializer.serialize_str(datetime_rep.as_str())\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 21, "score": 124930.81998113821 }, { "content": "#[tracing::instrument(level = \"info\", skip())]\n\nfn policy_source_path_for(name: &str, policy: Either<PathBuf, &str>) -> Result<PolicySourcePath, PolicyError> {\n\n match policy {\n\n Either::Left(path) => Ok(PolicySourcePath::File(path)),\n\n Either::Right(rep) => {\n\n let tempdir = std::env::current_dir()?;\n\n\n\n let mut tmp = tempfile::Builder::new()\n\n .prefix(format!(\"policy_{}_\", name).as_str())\n\n .rand_bytes(4)\n\n .suffix(\".polar\")\n\n .tempfile_in(tempdir)?;\n\n\n\n tracing::info!(\"rendered {} policy file for loading: {:?}\", name, tmp);\n\n\n\n write!(tmp.as_file_mut(), \"{}\", rep)?;\n\n\n\n Ok(PolicySourcePath::String(tmp))\n\n }\n\n }\n\n}\n", "file_path": "src/elements/policy_filter/policy.rs", "rank": 22, "score": 123027.53582505476 }, { "content": "// }\n\n//\n\n// impl<Ctrl, A, Out, F> RefreshableSourceStream<Ctrl, A, Out, F>\n\n// where\n\n// Ctrl: Copy + Display + Debug + Into<i32>,\n\n// A: Fn(Option<Ctrl>) -> F,\n\n// Out: Debug + Send + Sized + 'static,\n\n// F: Future<Output = Option<Out>>,\n\n// {\n\n// pub fn new( action: A, rx_control: mpsc::Receiver<Ctrl>, ) -> Self {\n\n// Self {\n\n// action,\n\n// rx_control,\n\n// }\n\n// }\n\n//\n\n// #[tracing::instrument(level=\"info\", skip(self))]\n\n// pub fn spawn(&mut self) -> impl Stream<Item = Out> {\n\n// let mut done = false;\n\n//\n", "file_path": "src/graph/stage/source/refreshable_stream.rs", "rank": 30, "score": 118878.7129308574 }, { "content": "// use std::fmt::{Debug, Display};\n\n// use std::future::Future;\n\n// use std::time::Duration;\n\n// use tokio::sync::mpsc;\n\n// use tokio::time::sleep;\n\n// use async_stream::stream;\n\n// use futures_core::stream::Stream;\n\n// use tracing_futures::Instrument;\n\n// use futures_util::stream::StreamExt;\n\n//\n\n//\n\n// pub struct RefreshableSourceStream<Ctrl, A, Out, F>\n\n// where\n\n// Ctrl: Copy + Display + Debug + Into<i32>,\n\n// A: Fn(Option<Ctrl>) -> F,\n\n// Out: Debug + Send + Sized + 'static,\n\n// F: Future<Output = Option<Out>>,\n\n// {\n\n// action: A,\n\n// rx_control: mpsc::Receiver<Ctrl>,\n", "file_path": "src/graph/stage/source/refreshable_stream.rs", "rank": 31, "score": 118874.20230664036 }, { "content": "// let op = &self.action;\n\n// let operation = op(None);\n\n// tokio::pin!(operation);\n\n//\n\n// let result = async {\n\n// let r2 = loop {\n\n// tokio::select! {\n\n// result = &mut operation, if !done => {\n\n// let op_span = tracing::info_span!(\"operation evaluation\", ?result, done);\n\n// let _op_span_guard = op_span.enter();\n\n//\n\n// done = true;\n\n//\n\n// if let Some(r) = result {\n\n// tracing::info!(\"Completed with result = {:?}\", r);\n\n// break r;\n\n// }\n\n// }\n\n//\n\n// Some(arg) = self.rx_control.recv() => {\n", "file_path": "src/graph/stage/source/refreshable_stream.rs", "rank": 32, "score": 118831.68519563421 }, { "content": "// sleep(Duration::from_secs(1))\n\n// .instrument(tracing::info_span!(\"control check\", %arg))\n\n// .await;\n\n//\n\n// if arg.into() % 2 == 0 {\n\n// tracing::info!(\"setting operation with {}\", arg);\n\n// operation.set(op(Some(arg)));\n\n// done = false;\n\n// }\n\n// }\n\n// }\n\n// };\n\n// r2.await\n\n// };\n\n//\n\n// stream! { yield result; }\n\n// }\n\n// }", "file_path": "src/graph/stage/source/refreshable_stream.rs", "rank": 33, "score": 118824.43230969465 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_optional_datetime_map<S>(date: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let s = date.as_ref().map(table_from_datetime);\n\n\n\n match s {\n\n None => serializer.serialize_none(),\n\n Some(s) => serializer.serialize_some(&s),\n\n }\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 34, "score": 117710.7890765369 }, { "content": "#[tracing::instrument(level = \"debug\", skip(serializer))]\n\npub fn serialize_optional_datetime_format<S>(date: &Option<DateTime<Utc>>, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n let s = date.map(|d| format!(\"{}\", d.format(FORMAT)));\n\n\n\n match s {\n\n None => serializer.serialize_none(),\n\n Some(s) => serializer.serialize_some(&s),\n\n }\n\n}\n\n\n", "file_path": "src/serde/date.rs", "rank": 35, "score": 117710.7890765369 }, { "content": "#[tracing::instrument(level = \"info\", skip(templates))]\n\npub fn render_template_policy<'a, T, D>(templates: T, name: &str, data: Option<&D>) -> Result<String, PolicyError>\n\nwhere\n\n T: IntoIterator<Item = &'a PolicySource>,\n\n D: Serialize + Debug,\n\n{\n\n tracing::info!(\"rendering policy string as template with data.\");\n\n\n\n // I tried to facilitate registry caching in policy, but handlebars' lifetime parameter\n\n // (underlying the PolicyRegistry) hampers the ergonomics of policy definition.\n\n // Not a performance impact since policy loading only happens on bootstrap or during\n\n // a corresponding, intermittent command.\n\n let mut registry = PolicyRegistry::new();\n\n for s in templates {\n\n let policy_template: String = s.try_into()?;\n\n registry.register_template_string(s.name().as_ref(), policy_template)?;\n\n }\n\n tracing::debug!(?registry, \"policy templates registered with handlebars registry\");\n\n let policy = registry.render(name, &data)?;\n\n tracing::info!(rendered_policy=%policy, \"rendered {} policy from template and data.\", name);\n\n Ok(policy)\n\n}\n\n\n", "file_path": "src/elements/policy_filter/policy.rs", "rank": 36, "score": 114519.96615828678 }, { "content": "#[inline]\n\nfn start_policy_timer(stage: &str) -> HistogramTimer {\n\n POLICY_FILTER_EVAL_TIME.with_label_values(&[stage]).start_timer()\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub enum PolicyResult {\n\n Passed,\n\n Blocked,\n\n Failed,\n\n}\n\n\n\nimpl fmt::Display for PolicyResult {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let label = match *self {\n\n Self::Passed => \"passed\",\n\n Self::Blocked => \"blocked\",\n\n Self::Failed => \"failed\",\n\n };\n\n write!(f, \"{}\", label)\n\n }\n", "file_path": "src/elements/policy_filter.rs", "rank": 37, "score": 111977.5920474698 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Data {\n\n #[serde(\n\n default,\n\n rename = \"task.last_failure\",\n\n serialize_with = \"proctor::serde::date::serialize_optional_datetime_format\",\n\n deserialize_with = \"proctor::serde::date::deserialize_optional_datetime\"\n\n )]\n\n pub last_failure: Option<DateTime<Utc>>,\n\n\n\n #[serde(rename = \"cluster.is_deploying\")]\n\n pub is_deploying: bool,\n\n\n\n #[serde(\n\n rename = \"cluster.last_deployment\",\n\n serialize_with = \"proctor::serde::date::serialize_format\",\n\n deserialize_with = \"proctor::serde::date::deserialize\"\n\n )]\n\n pub last_deployment: DateTime<Utc>,\n\n}\n\n\n", "file_path": "tests/elements/test_from_telemetry_stage.rs", "rank": 38, "score": 108199.96480168709 }, { "content": "pub trait WithMonitor {\n\n type Receiver;\n\n fn rx_monitor(&self) -> Self::Receiver;\n\n}\n", "file_path": "src/graph/stage.rs", "rank": 39, "score": 106315.87573609412 }, { "content": "pub trait WithApi {\n\n type Sender;\n\n fn tx_api(&self) -> Self::Sender;\n\n}\n\n\n", "file_path": "src/graph/stage.rs", "rank": 40, "score": 106315.87573609412 }, { "content": "#[tracing::instrument(level = \"info\")]\n\npub fn register_proctor_metrics(registry: &Registry) -> Result<(), ProctorError> {\n\n registry.register(Box::new(graph::GRAPH_ERRORS.clone()))?;\n\n registry.register(Box::new(graph::STAGE_INGRESS_COUNTS.clone()))?;\n\n registry.register(Box::new(graph::STAGE_EGRESS_COUNTS.clone()))?;\n\n registry.register(Box::new(graph::stage::STAGE_EVAL_TIME.clone()))?;\n\n registry.register(Box::new(policy_filter::POLICY_FILTER_EVAL_TIME.clone()))?;\n\n registry.register(Box::new(clearinghouse::SUBSCRIPTIONS_GAUGE.clone()))?;\n\n registry.register(Box::new(clearinghouse::PUBLICATIONS.clone()))?;\n\n Ok(())\n\n}\n", "file_path": "src/metrics.rs", "rank": 41, "score": 105412.77149160887 }, { "content": "#[proc_macro_derive(ProctorContext)]\n\npub fn proctor_context_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n impl_proctor_context_macro(&ast)\n\n}\n\n\n", "file_path": "proctor_derive/src/lib.rs", "rank": 42, "score": 104606.62922403516 }, { "content": "#[proc_macro_derive(AppData)]\n\npub fn app_data_derive(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse(input).unwrap();\n\n impl_app_data_macro(&ast)\n\n}\n\n\n", "file_path": "proctor_derive/src/lib.rs", "rank": 43, "score": 104606.62922403516 }, { "content": "pub trait SourceShape {\n\n type Out;\n\n fn outlet(&self) -> Outlet<Self::Out>;\n\n}\n\n\n", "file_path": "src/graph/shape.rs", "rank": 44, "score": 104410.90061282615 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct Data {\n\n #[serde(\n\n rename = \"task.last_failure\",\n\n default,\n\n serialize_with = \"proctor::serde::date::serialize_optional_datetime_map\",\n\n deserialize_with = \"proctor::serde::date::deserialize_optional_datetime\"\n\n )]\n\n pub last_failure: Option<DateTime<Utc>>,\n\n #[serde(rename = \"cluster.is_deploying\")]\n\n pub is_deploying: bool,\n\n #[serde(rename = \"cluster.last_deployment\", with = \"proctor::serde\")]\n\n pub latest_deployment: DateTime<Utc>,\n\n}\n\n\n\nimpl Default for Data {\n\n fn default() -> Self {\n\n Self {\n\n last_failure: None,\n\n is_deploying: true,\n\n latest_deployment: Utc.datetime_from_str(\"1970-08-30 11:32:09\", \"%Y-%m-%d %H:%M:%S\").unwrap(),\n", "file_path": "tests/elements/test_make_telemetry_cvs_source.rs", "rank": 45, "score": 104174.40845685055 }, { "content": "#[async_trait]\n\npub trait Connect<T> {\n\n async fn connect(self);\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: AppData> Connect<T> for (Outlet<T>, Inlet<T>) {\n\n async fn connect(mut self) {\n\n let outlet = self.0;\n\n let inlet = self.1;\n\n connect_out_to_in(outlet, inlet).await\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<T: AppData> Connect<T> for (&Outlet<T>, &Inlet<T>) {\n\n async fn connect(mut self) {\n\n let outlet = self.0.clone();\n\n let inlet = self.1.clone();\n\n connect_out_to_in(outlet, inlet).await\n\n }\n", "file_path": "src/graph/port.rs", "rank": 46, "score": 102705.66557830694 }, { "content": "#[test]\n\nfn test_data_serde() {\n\n let data = Data {\n\n last_failure: Some(NOW.clone()),\n\n is_deploying: true,\n\n last_deployment: NOW.clone(),\n\n };\n\n\n\n assert_tokens(\n\n &data,\n\n &vec![\n\n Token::Struct { name: \"Data\", len: 3 },\n\n Token::Str(\"task.last_failure\"),\n\n Token::Some,\n\n Token::Str(&NOW_REP),\n\n Token::Str(\"cluster.is_deploying\"),\n\n Token::Bool(true),\n\n Token::Str(\"cluster.last_deployment\"),\n\n Token::Str(&NOW_REP),\n\n Token::StructEnd,\n\n ],\n", "file_path": "tests/elements/test_from_telemetry_stage.rs", "rank": 47, "score": 102480.14766164872 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nstruct Data {\n\n pub last_failure: Option<DateTime<Utc>>,\n\n pub is_deploying: bool,\n\n pub latest_deployment: DateTime<Utc>,\n\n}\n\n\n\nimpl Default for Data {\n\n fn default() -> Self {\n\n Self {\n\n last_failure: None,\n\n is_deploying: true,\n\n latest_deployment: Utc.datetime_from_str(\"1970-08-30 11:32:09\", \"%Y-%m-%d %H:%M:%S\").unwrap(),\n\n }\n\n }\n\n}\n\n\n\n#[tokio::test(flavor = \"multi_thread\", worker_threads = 4)]\n\nasync fn test_make_telemetry_rest_api_source() -> Result<()> {\n\n once_cell::sync::Lazy::force(&proctor::tracing::TEST_TRACING);\n\n // fixtures::init_tracing(\"test_make_telemetry_rest_api_source\");\n", "file_path": "tests/elements/test_make_telemetry_rest_api_source.rs", "rank": 48, "score": 102053.43324732283 }, { "content": "pub trait FanInShape2: SourceShape {\n\n type In0;\n\n type In1;\n\n\n\n fn inlet_0(&self) -> Inlet<Self::In0>;\n\n fn inlet_1(&self) -> Inlet<Self::In1>;\n\n}\n\n\n\npub struct InletsShape<T>(pub Arc<Mutex<Vec<Inlet<T>>>>);\n\n\n\nimpl<T: Send> InletsShape<T> {\n\n pub fn new(inlets: Vec<Inlet<T>>) -> Self {\n\n Self(Arc::new(Mutex::new(inlets)))\n\n }\n\n\n\n pub async fn len(&self) -> usize {\n\n self.0.lock().await.len()\n\n }\n\n\n\n pub async fn get(&self, index: usize) -> Option<Inlet<T>> {\n", "file_path": "src/graph/shape.rs", "rank": 49, "score": 97586.69351966887 }, { "content": "pub trait UniformFanInShape: SourceShape {\n\n type In;\n\n // todo use once associated type defaults are stable\n\n // type InletShape = Arc<Mutex<Inlet<Self::In>>>;\n\n // type InletsShape = Arc<Mutex<Vec<Self::InletShape>>>;\n\n\n\n fn inlets(&self) -> InletsShape<Self::In>;\n\n}\n\n\n\npub type OutletsShape<T> = Vec<Outlet<T>>;\n\n\n", "file_path": "src/graph/shape.rs", "rank": 50, "score": 95469.92763432123 }, { "content": "fn type_error_for(expected: TelemetryType, actual: TelemetryValue) -> TelemetryError {\n\n TelemetryError::TypeError { expected, actual: Some(format!(\"{:?}\", actual)) }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub struct First;\n\n\n\nimpl TelemetryCombinator for First {\n\n fn combine(&self, items: Vec<TelemetryValue>) -> Result<Option<TelemetryValue>, TelemetryError> {\n\n Ok(items.into_iter().next())\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub struct Max;\n\n\n\nimpl TelemetryCombinator for Max {\n\n fn combine(&self, items: Vec<TelemetryValue>) -> Result<Option<TelemetryValue>, TelemetryError> {\n\n self.do_combine(items)\n\n }\n", "file_path": "src/elements/telemetry/combine.rs", "rank": 51, "score": 94376.18746049871 }, { "content": "pub trait ThroughShape: SourceShape + SinkShape {}\n\nimpl<T: SourceShape + SinkShape> ThroughShape for T {}\n\n\n", "file_path": "src/graph/shape.rs", "rank": 52, "score": 93629.04574403609 }, { "content": "fn zero_to_three() -> impl Stream<Item = u32> {\n\n stream! {\n\n for i in 0..3 {\n\n yield i;\n\n }\n\n }\n\n}\n", "file_path": "examples/async_stream.rs", "rank": 53, "score": 93072.38716541501 }, { "content": "#[tracing::instrument(level = \"debug\", skip(deserializer))]\n\npub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n // deserializer.deserialize_str(DateTimeFromIso8601Rfc3339FormatVisitor)\n\n deserializer.deserialize_any(DateTimeVisitor)\n\n}\n\n\n\n// pub fn deserialize_format<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>\n\n// where\n\n// D: de::Deserializer<'de>,\n\n// {\n\n// deserializer.deserialize_str(DateTimeFromIso8601Rfc3339FormatVisitor)\n\n// }\n\n\n", "file_path": "src/serde/date.rs", "rank": 54, "score": 89362.18753875792 }, { "content": "pub fn deserialize_from_str<'de, S, D>(deserializer: D) -> Result<S, D::Error>\n\nwhere\n\n S: FromStr,\n\n S::Err: fmt::Display,\n\n D: Deserializer<'de>,\n\n{\n\n let s: String = Deserialize::deserialize(deserializer)?;\n\n S::from_str(&s).map_err(de::Error::custom)\n\n}\n\n\n", "file_path": "src/serde/mod.rs", "rank": 55, "score": 89357.27931825358 }, { "content": "impl<Out: AppData> Stage for CompositeSource<Out> {\n\n #[inline]\n\n fn name(&self) -> SharedString {\n\n self.name.clone()\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", skip(self))]\n\n async fn check(&self) -> ProctorResult<()> {\n\n self.outlet.check_attachment().await?;\n\n Ok(())\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", name = \"run composite source\", skip(self))]\n\n async fn run(&mut self) -> ProctorResult<()> {\n\n match self.graph.take() {\n\n None => Ok(()),\n\n Some(g) => g.run().await,\n\n }\n\n }\n\n\n\n async fn close(mut self: Box<Self>) -> ProctorResult<()> {\n\n tracing::trace!(\"closing composite graph and outlet.\");\n\n self.outlet.close().await;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 56, "score": 89216.99591588804 }, { "content": "use std::fmt::{self, Debug};\n\n\n\nuse async_trait::async_trait;\n\nuse cast_trait_object::dyn_upcast;\n\nuse tokio::sync::{mpsc, oneshot};\n\n\n\nuse crate::graph::stage::{self, Stage};\n\nuse crate::graph::{Outlet, Port, SourceShape, PORT_DATA};\n\nuse crate::{Ack, AppData, ProctorResult, SharedString};\n\n\n\npub type ActorSourceApi<T> = mpsc::UnboundedSender<ActorSourceCmd<T>>;\n\n\n\n#[derive(Debug)]\n\npub enum ActorSourceCmd<T> {\n\n Push { item: T, tx: oneshot::Sender<Ack> },\n\n Stop(oneshot::Sender<Ack>),\n\n}\n\n\n\nimpl<T> ActorSourceCmd<T> {\n\n #[inline]\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 57, "score": 89214.8473104808 }, { "content": "impl<T> ActorSource<T> {\n\n pub fn new(name: impl Into<SharedString>) -> Self {\n\n let name = name.into();\n\n let outlet = Outlet::new(name.clone(), PORT_DATA);\n\n let (tx_api, rx_api) = mpsc::unbounded_channel();\n\n Self { name, outlet, tx_api, rx_api }\n\n }\n\n}\n\n\n\nimpl<T> SourceShape for ActorSource<T> {\n\n type Out = T;\n\n\n\n fn outlet(&self) -> Outlet<Self::Out> {\n\n self.outlet.clone()\n\n }\n\n}\n\n\n\n#[dyn_upcast]\n\n#[async_trait]\n\nimpl<T: AppData> Stage for ActorSource<T> {\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 58, "score": 89212.87939231539 }, { "content": " fn name(&self) -> SharedString {\n\n self.name.clone()\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", skip(self))]\n\n async fn check(&self) -> ProctorResult<()> {\n\n self.outlet.check_attachment().await?;\n\n Ok(())\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", name = \"run actor source\", skip(self))]\n\n async fn run(&mut self) -> ProctorResult<()> {\n\n while let Some(command) = self.rx_api.recv().await {\n\n let _timer = stage::start_stage_eval_time(self.name.as_ref());\n\n\n\n tracing::info!(?command, \"handling command\");\n\n match command {\n\n ActorSourceCmd::Push { item, tx } => {\n\n let send_span = tracing::info_span!(\"sending item\", ?item);\n\n let _ = send_span.enter();\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 59, "score": 89212.54333639894 }, { "content": " (&graph_outlet, &from_graph).connect().await;\n\n let composite_outlet = Outlet::new(name.clone(), PORT_DATA);\n\n let bridge = stage::Identity::new(format!(\"{}_bridge\", name), from_graph, composite_outlet.clone());\n\n\n\n graph.push_back(Box::new(bridge)).await;\n\n (graph, composite_outlet)\n\n }\n\n}\n\n\n\nimpl<Out> SourceShape for CompositeSource<Out> {\n\n type Out = Out;\n\n\n\n #[inline]\n\n fn outlet(&self) -> Outlet<Self::Out> {\n\n self.outlet.clone()\n\n }\n\n}\n\n\n\n#[dyn_upcast]\n\n#[async_trait]\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 60, "score": 89211.90889613239 }, { "content": "/// ```\n\n#[derive(Debug)]\n\npub struct CompositeSource<Out> {\n\n name: SharedString,\n\n graph: Option<Graph>,\n\n outlet: Outlet<Out>,\n\n}\n\n\n\nimpl<Out: AppData> CompositeSource<Out> {\n\n pub async fn new(name: impl Into<SharedString>, graph: Graph, graph_outlet: Outlet<Out>) -> Self {\n\n let name = name.into();\n\n let (graph, outlet) = Self::extend_graph(name.clone(), graph, graph_outlet).await;\n\n Self { name, graph: Some(graph), outlet }\n\n }\n\n\n\n async fn extend_graph(\n\n name: impl Into<SharedString>, mut graph: Graph, graph_outlet: Outlet<Out>,\n\n ) -> (Graph, Outlet<Out>) {\n\n let name = name.into();\n\n let from_graph = Inlet::new(name.clone(), \"from_graph\");\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 61, "score": 89210.29536698211 }, { "content": "use std::fmt::Debug;\n\n\n\nuse async_trait::async_trait;\n\nuse cast_trait_object::dyn_upcast;\n\n\n\nuse crate::graph::shape::SourceShape;\n\nuse crate::graph::{stage, Connect, Graph, Inlet, Outlet, Port, Stage, PORT_DATA};\n\nuse crate::{AppData, ProctorResult, SharedString};\n\n\n\n/// Source shape that encapsulates externally created stages, supporting graph stage composition.\n\n///\n\n/// Examples\n\n///\n\n/// ```rust\n\n/// #[macro_use]\n\n/// extern crate proctor_derive;\n\n///\n\n/// use std::collections::{BTreeMap, HashMap};\n\n/// use std::iter::FromIterator;\n\n/// use std::sync::Arc;\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 62, "score": 89206.87431687508 }, { "content": " self.outlet().send(item).await?;\n\n let _ignore_failure = tx.send(());\n\n }\n\n\n\n ActorSourceCmd::Stop(tx) => {\n\n tracing::info!(\"stopping actor source.\");\n\n let _ignore_failure = tx.send(());\n\n break;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn close(mut self: Box<Self>) -> ProctorResult<()> {\n\n tracing::info!(\"closing actor source outlet.\");\n\n self.outlet.close().await;\n\n Ok(())\n\n }\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 63, "score": 89206.36587816587 }, { "content": "}\n\n\n\nimpl<T> stage::WithApi for ActorSource<T> {\n\n type Sender = ActorSourceApi<T>;\n\n\n\n fn tx_api(&self) -> Self::Sender {\n\n self.tx_api.clone()\n\n }\n\n}\n\n\n\nimpl<T> Debug for ActorSource<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"ActorSource\")\n\n .field(\"name\", &self.name)\n\n .field(\"outlet\", &self.outlet)\n\n .finish()\n\n }\n\n}\n\n\n\n// /////////////////////////////////////////////////////\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 64, "score": 89204.57106898547 }, { "content": " pub fn push(item: T) -> (Self, oneshot::Receiver<Ack>) {\n\n let (tx, rx) = oneshot::channel();\n\n (Self::Push { item, tx }, rx)\n\n }\n\n\n\n #[inline]\n\n pub fn stop() -> (Self, oneshot::Receiver<Ack>) {\n\n let (tx, rx) = oneshot::channel();\n\n (Self::Stop(tx), rx)\n\n }\n\n}\n\n\n\n/// Actor-based protocol to source items into a graph flow.\n\npub struct ActorSource<T> {\n\n name: SharedString,\n\n outlet: Outlet<T>,\n\n tx_api: ActorSourceApi<T>,\n\n rx_api: mpsc::UnboundedReceiver<ActorSourceCmd<T>>,\n\n}\n\n\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 65, "score": 89199.84363570166 }, { "content": "/// use std::time::Duration;\n\n///\n\n/// use futures::future::FutureExt;\n\n/// use proctor::elements::telemetry::ToTelemetry;\n\n/// use proctor::elements::Telemetry;\n\n/// use proctor::graph::stage::{self, tick, Stage};\n\n/// use proctor::graph::{Connect, Graph, SinkShape, SourceShape};\n\n/// use proctor::tracing::{get_subscriber, init_subscriber};\n\n/// use proctor::AppData;\n\n/// use reqwest::header::HeaderMap;\n\n/// use serde::Deserialize;\n\n/// use tokio::sync::Mutex;\n\n///\n\n/// #[derive(Debug, Clone, Deserialize)]\n\n/// pub struct HttpBinResponse {\n\n/// pub args: HashMap<String, String>,\n\n/// pub headers: HashMap<String, String>,\n\n/// pub origin: String,\n\n/// pub url: String,\n\n/// }\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 66, "score": 89197.98424870706 }, { "content": " tx_api.send(cmd).expect(\"failed to send cmd\");\n\n ack.await.expect(\"command rejected\");\n\n let actual = rx.recv().await;\n\n assert_eq!(actual, None);\n\n })\n\n }\n\n\n\n #[test]\n\n fn test_stop_push_api() {\n\n once_cell::sync::Lazy::force(&crate::tracing::TEST_TRACING);\n\n let main_span = tracing::info_span!(\"test_stop_push_api\");\n\n let _ = main_span.enter();\n\n\n\n let (tx, mut rx) = mpsc::channel(8);\n\n let mut src = ActorSource::new(\"test_source\");\n\n let tx_api = src.tx_api();\n\n\n\n block_on(async move {\n\n src.outlet().attach(\"test_tx\", tx).await;\n\n\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 67, "score": 89197.83579660956 }, { "content": "// // Unit Tests ///////////////////////////////////////\n\n//\n\n#[cfg(test)]\n\nmod tests {\n\n use tokio::sync::mpsc;\n\n use tokio_test::block_on;\n\n\n\n use super::*;\n\n use crate::graph::stage::WithApi;\n\n\n\n #[test]\n\n fn test_push_stop_api() {\n\n once_cell::sync::Lazy::force(&crate::tracing::TEST_TRACING);\n\n let main_span = tracing::info_span!(\"test_push_stop_api\");\n\n let _ = main_span.enter();\n\n\n\n let (tx, mut rx) = mpsc::channel(8);\n\n let mut src = ActorSource::new(\"test_source\");\n\n let tx_api = src.tx_api();\n\n\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 68, "score": 89197.61229119642 }, { "content": " block_on(async move {\n\n src.outlet().attach(\"test_tx\", tx).await;\n\n\n\n tokio::spawn(async move {\n\n src.run().await.expect(\"failed to run actor source\");\n\n });\n\n\n\n let (cmd, ack) = ActorSourceCmd::push(13_i32);\n\n tx_api.send(cmd).expect(\"failed to send cmd\");\n\n ack.await.expect(\"command rejected\");\n\n let actual = rx.recv().await;\n\n assert_eq!(actual, Some(13_i32));\n\n\n\n let (cmd, ack) = ActorSourceCmd::push(17_i32);\n\n tx_api.send(cmd).expect(\"failed to send cmd\");\n\n ack.await.expect(\"command rejected\");\n\n let actual = rx.recv().await;\n\n assert_eq!(actual, Some(17_i32));\n\n\n\n let (cmd, ack) = ActorSourceCmd::stop();\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 69, "score": 89194.90220725001 }, { "content": "///\n\n/// let composite_outlet = generator.outlet().clone();\n\n///\n\n/// (tick.outlet(), generator.inlet()).connect().await;\n\n///\n\n/// let mut cg = Graph::default();\n\n/// cg.push_back(Box::new(tick)).await;\n\n/// cg.push_back(Box::new(generator)).await;\n\n/// let mut composite = stage::CompositeSource::new(\"composite_source\", cg, composite_outlet).await;\n\n///\n\n/// let mut fold = stage::Fold::<_, Telemetry, _>::new(\"gather latest\", Telemetry::new(), |mut acc, mg| {\n\n/// acc.extend(mg);\n\n/// acc\n\n/// });\n\n/// let rx_gather = fold.take_final_rx().unwrap();\n\n///\n\n/// (composite.outlet(), fold.inlet()).connect().await;\n\n///\n\n/// let mut g = Graph::default();\n\n/// g.push_back(Box::new(composite)).await;\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 70, "score": 89192.43449668729 }, { "content": " tokio::spawn(async move {\n\n src.run().await.expect(\"failed to run actor source\");\n\n });\n\n\n\n let (cmd, ack) = ActorSourceCmd::stop();\n\n tx_api.send(cmd).expect(\"failed to send cmd\");\n\n ack.await.expect(\"command rejected\");\n\n let actual = rx.recv().await;\n\n assert_eq!(actual, None);\n\n\n\n let (cmd, _ack) = ActorSourceCmd::push(13_i32);\n\n let actual = tx_api.send(cmd);\n\n assert!(actual.is_err());\n\n })\n\n }\n\n\n\n #[ignore]\n\n #[test]\n\n fn test_push_w_fail() {\n\n todo!()\n\n }\n\n}\n", "file_path": "src/graph/stage/source/actor_source.rs", "rank": 71, "score": 89191.40521363929 }, { "content": "///\n\n/// #[tokio::main]\n\n/// async fn main() -> anyhow::Result<()> {\n\n/// let subscriber = get_subscriber(\"eth_scan\", \"trace\", std::io::stdout);\n\n/// init_subscriber(subscriber);\n\n///\n\n/// let main_span = tracing::info_span!(\"main\");\n\n/// let _main_span_guard = main_span.enter();\n\n///\n\n/// // dmr: this is a hard fought example of how to modify an counter within an async closure.\n\n/// // dmr: important part is two-layer closure.\n\n/// // dmr: https://www.fpcomplete.com/blog/captures-closures-async/\n\n/// let count = Arc::new(Mutex::new(0_usize));\n\n///\n\n/// let mut tick = stage::Tick::with_constraint(\n\n/// \"tick\",\n\n/// Duration::from_nanos(0),\n\n/// Duration::from_millis(50),\n\n/// (),\n\n/// tick::Constraint::by_count(3),\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 72, "score": 89188.60045777209 }, { "content": "/// default_headers.insert(\n\n/// \"x-api-key\",\n\n/// \"fe37af1e07mshd1763d86e5f2a8cp1714cfjsnb6145a35e7ca\".parse().unwrap(),\n\n/// );\n\n/// let client = reqwest::Client::builder().default_headers(default_headers).build()?;\n\n/// let resp = client\n\n/// .get(url)\n\n/// .send()\n\n/// .await?\n\n/// .json::<HttpBinResponse>()\n\n/// .await\n\n/// .map_err::<anyhow::Error, _>(|err| err.into())?;\n\n///\n\n/// let result: anyhow::Result<Telemetry> = Ok(to_telemetry_data(resp).await);\n\n/// result\n\n/// }\n\n/// .map(|r| r.unwrap())\n\n/// };\n\n///\n\n/// let mut generator = stage::TriggeredGenerator::new(\"generator\", gen);\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 73, "score": 89187.18161985658 }, { "content": "/// g.push_back(Box::new(fold)).await;\n\n/// g.run().await?;\n\n///\n\n/// let actual = rx_gather.await.expect(\"fold didn't release\");\n\n/// assert_eq!(\n\n/// actual,\n\n/// maplit::hashmap! {\n\n/// \"args.1.f\" => \"foo\".to_telemetry(),\n\n/// \"args.1.b\" => \"bar\".to_telemetry(),\n\n/// \"args.2.f\" => \"foo\".to_telemetry(),\n\n/// \"args.2.b\" => \"bar\".to_telemetry(),\n\n/// \"args.3.f\" => \"foo\".to_telemetry(),\n\n/// \"args.3.b\" => \"bar\".to_telemetry(),\n\n/// }\n\n/// .into_iter()\n\n/// .collect()\n\n/// );\n\n///\n\n/// Ok(())\n\n/// }\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 74, "score": 89184.46040155349 }, { "content": "/// );\n\n///\n\n/// let gen = move |_| {\n\n/// let cc = Arc::clone(&count);\n\n///\n\n/// let to_telemetry_data = move |r: HttpBinResponse| async move {\n\n/// let mine = cc.clone();\n\n/// let mut cnt = mine.lock().await;\n\n/// *cnt += 1;\n\n/// let mut data = BTreeMap::new();\n\n/// for (k, v) in &r.args {\n\n/// let tv = v.as_str().to_telemetry();\n\n/// data.insert(format!(\"args.{}.{}\", cnt, k), tv);\n\n/// }\n\n/// Telemetry::try_from(&data).unwrap()\n\n/// };\n\n///\n\n/// async move {\n\n/// let url = \"https://httpbin.org/get?f=foo&b=bar\";\n\n/// let mut default_headers = HeaderMap::new();\n", "file_path": "src/graph/stage/source/composite_source.rs", "rank": 75, "score": 89182.16930937066 }, { "content": "pub fn deserialize_duration_secs<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Duration, D::Error> {\n\n let secs: u64 = Deserialize::deserialize(deserializer)?;\n\n Ok(Duration::from_secs(secs))\n\n}\n", "file_path": "src/serde/mod.rs", "rank": 76, "score": 85154.70221139095 }, { "content": "#[tracing::instrument(level = \"debug\", skip(deserializer))]\n\npub fn deserialize_optional_datetime<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>\n\nwhere\n\n D: de::Deserializer<'de>,\n\n{\n\n deserializer.deserialize_option(OptionalDateTimeMapVisitor)\n\n}\n\n\n\n// pub fn deserialize_optional_datetime_format<'de, D>(deserializer: D) ->\n\n// Result<Option<DateTime<Utc>>, D::Error> where\n\n// D: de::Deserializer<'de>,\n\n// {\n\n// deserializer.deserialize_option(OptionalDateTimeFormatVisitor)\n\n// }\n\n\n", "file_path": "src/serde/date.rs", "rank": 77, "score": 83814.54112315894 }, { "content": " T: AppData,\n\n I: Iterator<Item = T> + Send + Sync + 'static,\n\n{\n\n #[inline]\n\n fn name(&self) -> SharedString {\n\n self.name.clone()\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", skip(self))]\n\n async fn check(&self) -> ProctorResult<()> {\n\n self.outlet.check_attachment().await?;\n\n Ok(())\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", name = \"run sequence source\", skip(self))]\n\n async fn run(&mut self) -> ProctorResult<()> {\n\n if let Some(items) = self.items.take() {\n\n for (count, item) in items.enumerate() {\n\n let _timer = stage::start_stage_eval_time(self.name.as_ref());\n\n tracing::trace!(?item, %count, \"sending item\");\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 78, "score": 81173.86382494785 }, { "content": "use std::fmt::{self, Debug};\n\nuse std::time::Duration;\n\n\n\nuse async_stream::stream;\n\nuse async_trait::async_trait;\n\nuse cast_trait_object::dyn_upcast;\n\nuse futures_util::stream::StreamExt;\n\nuse tokio::sync::{mpsc, oneshot};\n\n\n\nuse crate::error::StageError;\n\nuse crate::graph::shape::SourceShape;\n\nuse crate::graph::{stage, Outlet, Port, Stage, PORT_DATA};\n\nuse crate::{AppData, ProctorResult, SharedString};\n\n\n\npub type TickApi = mpsc::UnboundedSender<TickMsg>;\n\n\n\n#[derive(Debug)]\n\npub enum TickMsg {\n\n Stop {\n\n tx: oneshot::Sender<Result<(), StageError>>,\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 79, "score": 81163.73235679937 }, { "content": " }\n\n\n\n #[tracing::instrument(level = \"info\", skip(self))]\n\n async fn close(mut self: Box<Self>) -> ProctorResult<()> {\n\n tracing::info!(\"closing tick source outlet.\");\n\n self.outlet.close().await;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T> stage::WithApi for Tick<T> {\n\n type Sender = TickApi;\n\n\n\n #[inline]\n\n fn tx_api(&self) -> Self::Sender {\n\n self.tx_api.clone()\n\n }\n\n}\n\n\n\nimpl<T> Debug for Tick<T> {\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 80, "score": 81162.6648375054 }, { "content": "impl<T> SourceShape for Tick<T> {\n\n type Out = T;\n\n\n\n #[inline]\n\n fn outlet(&self) -> Outlet<Self::Out> {\n\n self.outlet.clone()\n\n }\n\n}\n\n\n\n#[dyn_upcast]\n\n#[async_trait]\n\nimpl<T> Stage for Tick<T>\n\nwhere\n\n T: AppData + Clone + Unpin + Sync,\n\n{\n\n #[inline]\n\n fn name(&self) -> SharedString {\n\n self.name.clone()\n\n }\n\n\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 81, "score": 81161.30219104882 }, { "content": " outlet: Outlet<T>,\n\n}\n\n\n\nimpl<T, I> Sequence<T, I> {\n\n pub fn new<I0, S>(name: S, data: I0) -> Self\n\n where\n\n I0: IntoIterator<Item = T, IntoIter = I>,\n\n S: Into<SharedString>,\n\n {\n\n let name = name.into();\n\n let outlet = Outlet::new(name.clone(), PORT_DATA);\n\n let items = data.into_iter();\n\n Self { name, items: Some(items), outlet }\n\n }\n\n}\n\n\n\n#[dyn_upcast]\n\n#[async_trait]\n\nimpl<T, I> Stage for Sequence<T, I>\n\nwhere\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 82, "score": 81160.91614704154 }, { "content": "use std::fmt;\n\n\n\nuse async_trait::async_trait;\n\nuse cast_trait_object::dyn_upcast;\n\n\n\nuse crate::graph::shape::SourceShape;\n\nuse crate::graph::{stage, Outlet, Port, Stage, PORT_DATA};\n\nuse crate::{AppData, ProctorResult, SharedString};\n\n\n\n/// Helper to create Source from Iterable. Example usage: Slice::new(vec![1,2,3]).\n\n///\n\n/// Starts a new Source from the given Iterable. This is like starting from an Iterator, but every\n\n/// Subscriber directly attached to the `Outlet` of this source will see an individual flow of\n\n/// elements (always starting from the beginning) regardless of when they subscribed.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use proctor::graph::stage::{self, Stage};\n\n/// use proctor::graph::Connect;\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 83, "score": 81158.94286871886 }, { "content": " self.outlet.send(item).await?\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn close(mut self: Box<Self>) -> ProctorResult<()> {\n\n tracing::trace!(\"closing sequence-source outlet.\");\n\n self.outlet.close().await;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T, I> SourceShape for Sequence<T, I> {\n\n type Out = T;\n\n\n\n #[inline]\n\n fn outlet(&self) -> Outlet<Self::Out> {\n\n self.outlet.clone()\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 84, "score": 81158.61454804691 }, { "content": " #[tracing::instrument(level = \"info\", skip(self))]\n\n async fn check(&self) -> ProctorResult<()> {\n\n self.outlet.check_attachment().await?;\n\n Ok(())\n\n }\n\n\n\n #[tracing::instrument(level = \"info\", name = \"run tick source\", skip(self))]\n\n async fn run(&mut self) -> ProctorResult<()> {\n\n let start = tokio::time::Instant::now() + self.initial_delay;\n\n let interval = tokio::time::interval_at(start, self.interval);\n\n tokio::pin!(interval);\n\n let guard: Box<dyn ContinueTicking> = Box::new(self.constraint);\n\n tokio::pin!(guard);\n\n let tick = &self.tick;\n\n let tick = || tick.clone();\n\n\n\n let ticks = stream! {\n\n while guard.next() {\n\n interval.tick().await;\n\n yield tick();\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 85, "score": 81155.25452750703 }, { "content": " }\n\n}\n\n\n\nimpl<T, I> fmt::Debug for Sequence<T, I> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Sequence\")\n\n .field(\"name\", &self.name)\n\n .field(\"outlet\", &self.outlet)\n\n .finish()\n\n }\n\n}\n\n/////////////////////////////////////////////////////\n\n// Unit Tests ///////////////////////////////////////\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::{Arc, Mutex};\n\n\n\n use tokio::sync::mpsc;\n\n use tokio_test::block_on;\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 86, "score": 81154.71613928708 }, { "content": " /// Tick can be set to stop after a predefined duration.\n\n #[inline]\n\n pub fn by_time(limit: Duration) -> Constraint {\n\n Constraint::ByTime { stop: None, limit }\n\n }\n\n}\n\n\n\n/// Elements are emitted periodically with the specified interval. The tick element will be\n\n/// sent to downstream consumers via its outlet.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::time::Duration;\n\n///\n\n/// use proctor::graph::stage::tick;\n\n/// use proctor::graph::stage::{self, Stage};\n\n/// use proctor::graph::SourceShape;\n\n/// use tokio::sync::mpsc;\n\n///\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 87, "score": 81154.61049527595 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"Tick\")\n\n .field(\"name\", &self.name)\n\n .field(\"initial_delay\", &self.initial_delay)\n\n .field(\"interval\", &self.interval)\n\n .field(\"outlet\", &self.outlet)\n\n .finish()\n\n }\n\n}\n\n\n\n// /////////////////////////////////////////////////////\n\n// // Unit Tests ///////////////////////////////////////\n\n//\n\n#[cfg(test)]\n\nmod tests {\n\n use std::time::Duration;\n\n\n\n use tokio::sync::{mpsc, oneshot};\n\n use tokio_test::block_on;\n\n\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 88, "score": 81152.37203065386 }, { "content": " name: S, initial_delay: Duration, interval: Duration, tick: T, constraint: Constraint,\n\n ) -> Self {\n\n assert!(interval > Duration::new(0, 0), \"`interval` must be non-zero.\");\n\n let name = name.into();\n\n let outlet = Outlet::new(name.clone(), PORT_DATA);\n\n let (tx_api, rx_api) = mpsc::unbounded_channel();\n\n\n\n Self {\n\n name,\n\n initial_delay,\n\n interval,\n\n tick,\n\n constraint,\n\n outlet,\n\n tx_api,\n\n rx_api,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 89, "score": 81149.75127857403 }, { "content": "/// use proctor::graph::{SinkShape, SourceShape};\n\n///\n\n/// #[tokio::main]\n\n/// async fn main() {\n\n/// let mut src = stage::Sequence::new(\n\n/// \"my_data\",\n\n/// vec![\n\n/// \"I am serious.\".to_string(),\n\n/// \"And don't call me\".to_string(),\n\n/// \"Shirley!\".to_string(),\n\n/// ],\n\n/// );\n\n///\n\n/// let mut sink = stage::Fold::new(\"concatenate\", \"\".to_string(), |acc, s: String| {\n\n/// let result = if !acc.is_empty() { acc + \" \" } else { acc };\n\n/// result + s.as_str()\n\n/// });\n\n/// let mut rx_quote = sink.take_final_rx().unwrap();\n\n///\n\n/// (src.outlet(), sink.inlet()).connect().await;\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 90, "score": 81149.23691483158 }, { "content": "pub use self::actor_source::*;\n\npub use self::composite_source::CompositeSource;\n\npub use self::refreshable::RefreshableSource;\n\npub use self::sequence::Sequence;\n\npub use self::tick::Tick;\n\npub use self::triggered_generator::TriggeredGenerator;\n\n\n\nmod actor_source;\n\nmod composite_source;\n\nmod refreshable;\n\nmod sequence;\n\npub mod tick;\n\nmod triggered_generator;\n", "file_path": "src/graph/stage/source/mod.rs", "rank": 91, "score": 81149.20466981795 }, { "content": " }\n\n };\n\n\n\n tokio::pin!(ticks);\n\n\n\n let rx_api = &mut self.rx_api;\n\n let outlet = &self.outlet;\n\n\n\n loop {\n\n let _timer = stage::start_stage_eval_time(self.name.as_ref());\n\n\n\n tokio::select! {\n\n next_tick = ticks.next() => match next_tick {\n\n Some(tick) => {\n\n tracing::info!(?tick, \"sending tick...\");\n\n outlet.send(tick).await?;\n\n },\n\n\n\n None => {\n\n tracing::warn!(\"ticking stopped -- breaking...\");\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 92, "score": 81148.31107182398 }, { "content": "/// assert_eq!(None, rx.recv().await);\n\n/// }\n\n/// ```\n\npub struct Tick<T> {\n\n name: SharedString,\n\n pub initial_delay: Duration,\n\n pub interval: Duration,\n\n tick: T,\n\n constraint: Constraint,\n\n outlet: Outlet<T>,\n\n tx_api: TickApi,\n\n rx_api: mpsc::UnboundedReceiver<TickMsg>,\n\n}\n\n\n\nimpl<T> Tick<T> {\n\n pub fn new<S: Into<SharedString>>(name: S, initial_delay: Duration, interval: Duration, tick: T) -> Self {\n\n Self::with_constraint(name, initial_delay, interval, tick, Constraint::None)\n\n }\n\n\n\n pub fn with_constraint<S: Into<SharedString>>(\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 93, "score": 81148.0328494589 }, { "content": " tick.run().await.expect(\"failed to run tick source\");\n\n });\n\n\n\n tokio::time::sleep(Duration::from_millis(100)).await;\n\n let (stop_tx, stop_rx) = oneshot::channel();\n\n let foo: anyhow::Result<()> = tx_api.send(TickMsg::Stop { tx: stop_tx }).map_err(|err| err.into());\n\n let _ = foo?;\n\n tracing::info!(\"Stop sent to Tick source.\");\n\n\n\n stop_rx.await??;\n\n tracing::info!(\"Stop Ack received.\");\n\n\n\n for _ in 1..=2 {\n\n assert_eq!(Some(17), rx.recv().await);\n\n }\n\n assert_eq!(None, rx.recv().await);\n\n\n\n Ok(())\n\n })\n\n }\n\n}\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 94, "score": 81146.1480915328 }, { "content": "\n\n use super::*;\n\n\n\n #[test]\n\n fn test_basic_usage() {\n\n let my_data = vec![2, 3, 5, 7, 11, 13, 17, 19];\n\n let actual = Arc::new(Mutex::new(Vec::<i32>::with_capacity(my_data.len())));\n\n\n\n let (tx, mut rx) = mpsc::channel(8);\n\n\n\n let mut src = Sequence::new(\"my_data\", my_data);\n\n\n\n let recv_actual = Arc::clone(&actual);\n\n block_on(async move {\n\n src.outlet.attach(\"test_tx\", tx).await;\n\n let src_handle = tokio::spawn(async move { src.run().await });\n\n\n\n let actual_handle = tokio::spawn(async move {\n\n while let Some(d) = rx.recv().await {\n\n let recv_a = recv_actual.lock();\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 95, "score": 81145.82696205963 }, { "content": "///\n\n/// let sink_handle = tokio::spawn(async move {\n\n/// sink.run().await;\n\n/// });\n\n/// let src_handle = tokio::spawn(async move {\n\n/// src.run().await;\n\n/// });\n\n///\n\n/// src_handle.await.unwrap();\n\n/// sink_handle.await.unwrap();\n\n///\n\n/// match rx_quote.try_recv() {\n\n/// Ok(quote) => assert_eq!(\"I am serious. And don't call me Shirley!\", quote),\n\n/// Err(err) => panic!(\"quote not yet assembled: {}\", err),\n\n/// }\n\n/// }\n\n/// ```\n\npub struct Sequence<T, I> {\n\n name: SharedString,\n\n items: Option<I>,\n", "file_path": "src/graph/stage/source/sequence.rs", "rank": 96, "score": 81145.80039340517 }, { "content": " },\n\n}\n\n\n\nimpl TickMsg {\n\n #[inline]\n\n pub fn stop() -> (Self, oneshot::Receiver<Result<(), StageError>>) {\n\n let (tx, rx) = oneshot::channel();\n\n (Self::Stop { tx }, rx)\n\n }\n\n}\n\n\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 97, "score": 81144.27550581434 }, { "content": " }\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl Constraint {\n\n /// By default, Tick has no constraint and will produce ticks ongoing until it is stopped by\n\n /// either dropping the Tick source or sending it the [TickMsg::Stop] message.\n\n #[inline]\n\n pub fn none() -> Constraint {\n\n Constraint::None\n\n }\n\n\n\n /// Tick can be set to stop after a predefined count of ticks.\n\n #[inline]\n\n pub fn by_count(limit: usize) -> Constraint {\n\n Constraint::ByCount { count: 0, limit }\n\n }\n\n\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 98, "score": 81143.38482743179 }, { "content": " break;\n\n }\n\n },\n\n\n\n Some(msg) = rx_api.recv() => match msg {\n\n TickMsg::Stop { tx } => {\n\n tracing::info!(\"handling request to stop ticking.\");\n\n let _ = tx.send(Ok(()));\n\n break;\n\n }\n\n },\n\n\n\n else => {\n\n tracing::trace!(\"done ticking - completing tick source.\");\n\n break;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n", "file_path": "src/graph/stage/source/tick.rs", "rank": 99, "score": 81143.23601608195 } ]
Rust
logpack-derive/src/encode_derive.rs
da-x/logpack
4c95c05a415a94d41ff562cde38d77b3e55516c1
use std::collections::HashSet; use proc_macro2::{TokenStream as Tokens, Span}; use syn::{Data, DeriveInput, Fields, DataEnum, Ident}; use quote::quote; pub fn derive(input: &DeriveInput) -> Tokens { let name = &input.ident; let generics = super::add_trait_bounds( input.generics.clone(), &HashSet::new(), &[quote!{ logpack::Encoder }], ); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let encoder_fields = match &input.data { Data::Enum(data) => encoder_for_enum(name, data, false), Data::Struct(variant_data) => encoder_for_struct(&variant_data.fields, false), Data::Union{..} => { panic!() } }; let sizer_fields = match &input.data { Data::Enum(data) => encoder_for_enum(name, &data, true), Data::Struct(variant_data) => encoder_for_struct(&variant_data.fields, true), Data::Union{..} => { panic!() } }; let result = quote! { impl #impl_generics logpack::Encoder for #name #ty_generics #where_clause { fn logpack_encode(&self, _buf: &mut logpack::buffers::BufEncoder) -> Result<(), (usize, usize)> { #encoder_fields; Ok(()) } fn logpack_sizer(&self) -> usize { #sizer_fields } } }; result } fn encoder_for_struct(fields: &Fields, sized: bool) -> Tokens { match fields { Fields::Named(ref fields) => { let fields : Vec<_> = fields.named.iter().collect(); encoder_for_struct_kind(Some(&fields[..]), true, sized) }, Fields::Unnamed(ref fields) => { let fields : Vec<_> = fields.unnamed.iter().collect(); encoder_for_struct_kind(Some(&fields[..]), false, sized) }, Fields::Unit => { encoder_for_struct_kind(None, false, sized) }, } } fn encoder_for_enum_struct<'a>(name: &Ident, ident: &Ident, fields: Vec<FieldExt<'a>>, prefix: Tokens, named: bool, sizer: bool, header_size: usize) -> Tokens { let one_ref = fields.iter().map(|v| { let ident = &v.get_match_ident(); quote! { ref #ident } }); let fields_match = match named { false => quote!(( #(#one_ref),* )), true => quote!({ #(#one_ref),* }), }; let body = if sizer { let body_impls = fields.iter().map(|v| { let ident = &v.get_match_ident(); quote! { size += #ident.logpack_sizer(); } }); quote!(let mut size: usize = #header_size; #(#body_impls);*; size ) } else { let body_impls = fields.iter().map(|v| { let ident = &v.get_match_ident(); quote! { #ident.logpack_encode(_buf)? } }); quote!(#(#body_impls);*; Ok(()) ) }; quote! { &#name::#ident #fields_match => { #prefix #body } } } fn encoder_for_enum(name: &Ident, data_enum: &DataEnum, sizer: bool) -> Tokens { let variants = &data_enum.variants; if variants.len() == 0 { if sizer { quote!(0) } else { quote!() } } else { let mut idx : u32 = 0; let (idx_type, header_size) = if variants.len() < 0x100 { ("u8", 1) } else if variants.len() < 0x10000 { ("u16", 2) } else { ("u32", 4) }; let idx_type = Ident::new(idx_type, Span::call_site()); let impls = variants.iter().map(|v| { let ident = &v.ident; let prefix = if sizer { quote! {} } else { quote! { let idx : #idx_type = #idx as #idx_type; idx.logpack_encode(_buf)?; } }; idx += 1; match v.fields { Fields::Named(ref fields) => { let fields: Vec<_> = fields.named.iter().enumerate().map(|(i, f)| FieldExt::new(f, i, true)).collect(); encoder_for_enum_struct(name, ident, fields, prefix, true, sizer, header_size) }, Fields::Unnamed(ref fields) => { let fields: Vec<_> = fields.unnamed.iter().enumerate().map(|(i, f)| FieldExt::new(f, i, false)).collect(); encoder_for_enum_struct(name, ident, fields, prefix, false, sizer, header_size) }, Fields::Unit => { if sizer { quote! { &#name::#ident => { #header_size } } } else { quote! { &#name::#ident => { #prefix Ok(()) } } } }, } }); if sizer { quote!( match self { #(#impls),* } ) } else { quote!( match self { #(#impls),* }? ) } } } fn encoder_for_struct_kind(fields: Option<&[&syn::Field]>, named: bool, sizer: bool) -> Tokens { let unit = fields.is_none(); let fields: Vec<_> = fields.unwrap_or(&[]).iter() .enumerate().map(|(i, f)| FieldExt::new(f, i, named)).collect(); if unit { if sizer { quote![ 0 ] } else { quote![ ] } } else { let fields = fields.iter().map(|f| { let field_expr = &f.access_expr(); if sizer { quote!(size += #field_expr.logpack_sizer();) } else { quote!(#field_expr.logpack_encode(_buf)?) } }); if sizer { quote!{ let mut size : usize = 0; #(#fields);*; size } } else { quote!{ #(#fields);* } } } } struct FieldExt<'a> { field: &'a syn::Field, idx: usize, named: bool, } impl<'a> FieldExt<'a> { fn new(field: &'a syn::Field, idx: usize, named: bool) -> FieldExt<'a> { FieldExt { field, idx, named } } fn access_expr(&self) -> Tokens { if self.named { let ident = &self.field.ident; quote! { self.#ident } } else { let idx = syn::Index::from(self.idx); quote! { self.#idx } } } fn get_match_ident(&self) -> Ident { if self.named { self.field.ident.clone().unwrap() } else { Ident::new(&format!("f{}", self.idx), Span::call_site()) } } }
use std::collections::HashSet; use proc_macro2::{TokenStream as Tokens, Span}; use syn::{Data, DeriveInput, Fields, DataEnum, Ident}; use quote::quote; pub fn derive(input: &DeriveInput) -> Tokens { let name = &input.ident; let generics = super::add_trait_bounds( input.generics.clone(), &HashSet::new(), &[quote!{ logpack::Encoder }], ); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let encoder_fields = match &input.data { Data::Enum(data) => encoder_for_enum(name, data, false), Data::Struct(variant_data) => encoder_for_struct(&variant_data.fields, false), Data::Union{..} => { panic!() } }; let sizer_fields = match &input.data { Data::Enum(data) => encoder_for_enum(name, &data, true), Data::Struct(variant_data) => encoder_for_struct(&variant_data.fields, true), Data::Union{..} => { panic!() } }; let result = quote! { impl #impl_generics logpack::Encoder for #name #ty_generics #where_clause { fn logpack_encode(&self, _buf: &mut logpack::buffers::BufEncoder) -> Result<(), (usize, usize)> { #encoder_fields; Ok(()) } fn logpack_sizer(&self) -> usize { #sizer_fields } } }; result } fn encoder_for_struct(fields: &Fields, sized: bool) -> Tokens { match fields { Fields::Named(ref fields) => { let fields : Vec<_> = fields.named.iter().collect(); encoder_for_struct_kind(Some(&fields[..]), true, sized) }, Fields::Unnamed(ref fields) => { let fields : Vec<_> = fields.unnamed.iter().collect(); encoder_for_struct_kind(Some(&fields[..]), false, sized) }, Fields::Unit => { encoder_for_struct_kind(None, false, sized) }, } } fn encoder_for_enum_struct<'a>(name: &Ident, ident: &Ident, fields: Vec<FieldExt<'a>>, prefix: Tokens, named: bool, sizer: bool, header_size: usize) -> Tokens { let one_ref = fields.iter().map(|v| { let ident = &v.get_match_ident(); quote! { ref #ident } }); let fields_match = match named { false => quote!(( #(#one_ref),* )), true => quote!({ #(#one_ref),* }), }; let body = if sizer { let body_impls = fields.iter().map(|v| { let ident = &v.get_match_ident(); quote! { size += #ident.logpack_sizer(); } }); quote!(let mut size: usize = #header_size; #(#body_impls);*; size ) } else { let body_impls = fields.iter().map(|v| { let ident = &v.get_match_ident(); quote! { #ident.logpack_encode(_buf)? } }); quote!(#(#body_impls);*; Ok(()) ) }; quote! { &#name::#ident #fields_match => { #prefix #body } } } fn encoder_for_enum(name: &Ident, data_enum: &DataEnum, sizer: bool) -> Tokens { let variants = &data_enum.variants; if variants.len() == 0 { if sizer { quote!(0) } else { quote!() } } else { let mut idx : u32 = 0; let (idx_type, header_size) = if variants.len() < 0x100 { ("u8", 1) } else if variants.len() < 0x10000 { ("u16", 2) } else { ("u32", 4) }; let idx_type = Ident::new(idx_type, Span::call_site()); let impls = variants.iter().map(|v| { let ident = &v.iden
fn encoder_for_struct_kind(fields: Option<&[&syn::Field]>, named: bool, sizer: bool) -> Tokens { let unit = fields.is_none(); let fields: Vec<_> = fields.unwrap_or(&[]).iter() .enumerate().map(|(i, f)| FieldExt::new(f, i, named)).collect(); if unit { if sizer { quote![ 0 ] } else { quote![ ] } } else { let fields = fields.iter().map(|f| { let field_expr = &f.access_expr(); if sizer { quote!(size += #field_expr.logpack_sizer();) } else { quote!(#field_expr.logpack_encode(_buf)?) } }); if sizer { quote!{ let mut size : usize = 0; #(#fields);*; size } } else { quote!{ #(#fields);* } } } } struct FieldExt<'a> { field: &'a syn::Field, idx: usize, named: bool, } impl<'a> FieldExt<'a> { fn new(field: &'a syn::Field, idx: usize, named: bool) -> FieldExt<'a> { FieldExt { field, idx, named } } fn access_expr(&self) -> Tokens { if self.named { let ident = &self.field.ident; quote! { self.#ident } } else { let idx = syn::Index::from(self.idx); quote! { self.#idx } } } fn get_match_ident(&self) -> Ident { if self.named { self.field.ident.clone().unwrap() } else { Ident::new(&format!("f{}", self.idx), Span::call_site()) } } }
t; let prefix = if sizer { quote! {} } else { quote! { let idx : #idx_type = #idx as #idx_type; idx.logpack_encode(_buf)?; } }; idx += 1; match v.fields { Fields::Named(ref fields) => { let fields: Vec<_> = fields.named.iter().enumerate().map(|(i, f)| FieldExt::new(f, i, true)).collect(); encoder_for_enum_struct(name, ident, fields, prefix, true, sizer, header_size) }, Fields::Unnamed(ref fields) => { let fields: Vec<_> = fields.unnamed.iter().enumerate().map(|(i, f)| FieldExt::new(f, i, false)).collect(); encoder_for_enum_struct(name, ident, fields, prefix, false, sizer, header_size) }, Fields::Unit => { if sizer { quote! { &#name::#ident => { #header_size } } } else { quote! { &#name::#ident => { #prefix Ok(()) } } } }, } }); if sizer { quote!( match self { #(#impls),* } ) } else { quote!( match self { #(#impls),* }? ) } } }
function_block-function_prefixed
[ { "content": "fn bintype_for_enum(data_enum: &DataEnum) -> Tokens {\n\n let impls = data_enum.variants.iter().map(|v| {\n\n let fields = bintype_for_struct(&v.fields);\n\n let ident = &v.ident;\n\n quote! { (stringify!(#ident), #fields) }\n\n });\n\n\n\n quote!(vec![#(#impls),*])\n\n}\n", "file_path": "logpack-derive/src/type_derive.rs", "rank": 4, "score": 95069.40413083519 }, { "content": "pub fn derive(input: &DeriveInput) -> Tokens {\n\n let name = &input.ident;\n\n let generics = super::add_trait_bounds(\n\n input.generics.clone(),\n\n &HashSet::new(),\n\n &[quote!{ logpack::Logpack }],\n\n );\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let fields = match &input.data {\n\n Data::Enum(data) => {\n\n let fields = bintype_for_enum(data);\n\n quote! { Some(logpack::Named::Enum( #fields )) }\n\n }\n\n Data::Struct(data) => {\n\n let fields = bintype_for_struct(&data.fields);\n\n quote! { Some(logpack::Named::Struct( #fields )) }\n\n }\n\n Data::Union{..} => {\n\n panic!()\n", "file_path": "logpack-derive/src/type_derive.rs", "rank": 5, "score": 84621.5786259209 }, { "content": "#[proc_macro_derive(Logpack, attributes(Logpack))]\n\npub fn derive(input: TokenStream) -> TokenStream {\n\n let input: DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let a = type_derive::derive(&input);\n\n let b = encode_derive::derive(&input);\n\n let res = quote!(#a #b);\n\n\n\n if let Some((_, value)) =\n\n std::env::vars().find(|(key, _)| key.as_str() == \"LOGPACK_DERIVE_SAVE_DIR\")\n\n {\n\n let dir = std::path::Path::new(value.as_str());\n\n tokens_to_rustfmt_file(&dir.join(format!(\"derive_logpack_{}.rs\", name)), &res);\n\n }\n\n\n\n res.into()\n\n}\n\n\n", "file_path": "logpack-derive/src/lib.rs", "rank": 7, "score": 80016.13178579476 }, { "content": "pub fn encoded_string_len(value: &str) -> usize\n\n{\n\n let bytes = value.as_bytes();\n\n let size = bytes.len();\n\n\n\n if size < 0x40 { return 1 + size };\n\n if size < 0x4000 { return 2 + size };\n\n if size < 0x4000_0000 { return 4 + size };\n\n if size < 0x4000_0000_0000_0000 { return 8 + size };\n\n\n\n panic!(\"string length {}\", size);\n\n}\n\n\n", "file_path": "logpack/src/encoder.rs", "rank": 8, "score": 74400.75386349508 }, { "content": "pub fn encode_stored_string(value: &str, buf: &mut buffers::BufEncoder)\n\n -> Result<(), (usize, usize)>\n\n{\n\n let bytes = value.as_bytes();\n\n let size = bytes.len();\n\n\n\n // TODO: fix little-endian assumption\n\n\n\n if size < 0x40 {\n\n (0u8 | ((size as u8) << 2) ).logpack_encode(buf)?;\n\n } else if size < 0x4000 {\n\n (1u16 | ((size as u16) << 2) ).logpack_encode(buf)?;\n\n } else if size < 0x4000_0000 {\n\n (2u32 | ((size as u32) << 2) ).logpack_encode(buf)?;\n\n } else if size < 0x4000_0000_0000_0000 {\n\n (3u64 | ((size as u64) << 2) ).logpack_encode(buf)?;\n\n } else {\n\n panic!(\"string length {}\", size);\n\n }\n\n\n", "file_path": "logpack/src/encoder.rs", "rank": 9, "score": 72560.21392802912 }, { "content": "fn bintype_for_struct(fields: &Fields) -> Tokens {\n\n match fields {\n\n Fields::Named(ref fields) => {\n\n let fields : Vec<_> = fields.named.iter().map(|f| {\n\n let f_name = &f.ident;\n\n let ty = &f.ty;\n\n quote!((stringify!(#f_name),\n\n logpack::LogpackWrapper::<#ty>::logpack_describe(st)))\n\n }).collect();\n\n quote![ logpack::Struct::Named(vec![ #(#fields),* ]) ]\n\n },\n\n Fields::Unnamed(ref fields) => {\n\n let fields : Vec<_> = fields.unnamed.iter().map(|f| {\n\n let ty = &f.ty;\n\n quote!(logpack::LogpackWrapper::<#ty>::logpack_describe(st))\n\n }).collect();\n\n quote![ logpack::Struct::Tuple(vec![ #(#fields),* ]) ]\n\n },\n\n Fields::Unit => {\n\n quote![ logpack::Struct::Unit ]\n\n },\n\n }\n\n}\n\n\n", "file_path": "logpack-derive/src/type_derive.rs", "rank": 10, "score": 71752.30654694921 }, { "content": "fn test<E>(st: &mut logpack::SeenTypes,\n\n tm: &mut logpack::NameMap,\n\n e: &E)\n\n where E: logpack::Logpack + logpack::Encoder +\n\n serde::de::DeserializeOwned + Eq + PartialEq + Debug\n\n{\n\n use logpack::*;\n\n\n\n let type_desc = Logpack::logpack_describe_by_value(e, st);\n\n let type_ser = to_string(&type_desc).expect(\"Serialization failed\");\n\n let mut bytes : [u8; 1024] = [0; 1024];\n\n let mut enc_buf = logpack::BufEncoder::new(&mut bytes);\n\n\n\n e.logpack_encode(&mut enc_buf).unwrap();\n\n let sizer_result = e.logpack_sizer();\n\n\n\n let encoded = enc_buf.get_content();\n\n let deser_type = from_str(type_ser.as_str()).unwrap();\n\n let deser_type = tm.feed(deser_type).unwrap();\n\n\n", "file_path": "test/src/main.rs", "rank": 11, "score": 55779.588808177985 }, { "content": "fn test_ser_only<E>(st: &mut logpack::SeenTypes,\n\n tm: &mut logpack::NameMap,\n\n e: &E)\n\n where E: logpack::Logpack + logpack::Encoder + Debug\n\n{\n\n use logpack::*;\n\n\n\n let type_desc = Logpack::logpack_describe_by_value(e, st);\n\n let type_ser = to_string(&type_desc).expect(\"Serialization failed\");\n\n let mut bytes : [u8; 1024] = [0; 1024];\n\n let mut enc_buf = logpack::BufEncoder::new(&mut bytes);\n\n\n\n e.logpack_encode(&mut enc_buf).unwrap();\n\n let sizer_result = e.logpack_sizer();\n\n\n\n let encoded = enc_buf.get_content();\n\n let deser_type = from_str(type_ser.as_str()).unwrap();\n\n let deser_type = tm.feed(deser_type).unwrap();\n\n\n\n let repr_output = {\n", "file_path": "test/src/main.rs", "rank": 12, "score": 54320.457336943786 }, { "content": "fn tokens_to_rustfmt_file(filename: &std::path::Path, expanded: &Tokens) {\n\n let mut file = std::fs::File::create(&filename).unwrap();\n\n use std::io::Write;\n\n file.write_all(format!(\"{}\", expanded).as_bytes()).unwrap();\n\n Command::new(\"rustfmt\")\n\n .args(&[filename])\n\n .output()\n\n .expect(\"failed to execute process\");\n\n}\n\n\n", "file_path": "logpack-derive/src/lib.rs", "rank": 13, "score": 47344.64652240075 }, { "content": "pub trait Callbacks {\n\n type SubType : Callbacks;\n\n\n\n fn handle_u8(&mut self, _: u8);\n\n fn handle_u16(&mut self, _: u16);\n\n fn handle_u32(&mut self, _: u32);\n\n fn handle_u64(&mut self, _: u64);\n\n fn handle_i8(&mut self, _: i32);\n\n fn handle_i16(&mut self, _: i32);\n\n fn handle_i32(&mut self, _: i32);\n\n fn handle_i64(&mut self, _: i64);\n\n fn handle_bool(&mut self, _: bool);\n\n fn handle_string(&mut self, _: &str);\n\n fn handle_unit(&mut self);\n\n fn handle_phantom(&mut self);\n\n fn handle_raw_ptr(&mut self, _: u64);\n\n\n\n fn begin_enum(&mut self, typename_id: &TypeNameId, option_name: &String) -> &mut Self::SubType;\n\n fn end_enum(&mut self, typename_id: &TypeNameId);\n\n\n", "file_path": "logpack/src/decoder.rs", "rank": 14, "score": 37936.63828398475 }, { "content": "pub trait Logpack {\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc;\n\n fn logpack_describe_by_value(&self, seen: &mut SeenTypes) -> RefDesc {\n\n Self::logpack_describe(seen)\n\n }\n\n}\n\n\n\nmacro_rules! simple {\n\n ($a:tt, $b:ident) => {\n\n impl Logpack for $a {\n\n fn logpack_describe(_: &mut SeenTypes) -> RefDesc {\n\n Description::$b\n\n }\n\n }\n\n }\n\n}\n\n\n\nsimple!(usize, U64);\n\nsimple!(u64, U64);\n\nsimple!(u32, U32);\n", "file_path": "logpack/src/lib.rs", "rank": 15, "score": 37936.63828398475 }, { "content": "pub trait Encoder {\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)>;\n\n fn logpack_sizer(&self) -> usize;\n\n}\n\n\n\nmacro_rules! simple {\n\n ($a:tt) => {\n\n impl Encoder for $a {\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n buf.put(self)\n\n }\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n size_of::<Self>()\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "logpack/src/encoder.rs", "rank": 16, "score": 37936.63828398475 }, { "content": "fn main()\n\n{\n\n let mut st = logpack::SeenTypes::new();\n\n let mut tm = logpack::NameMap::new();\n\n\n\n test(&mut st, &mut tm, &SimpleEnum::WithUnit);\n\n test(&mut st, &mut tm, &SimpleEnum::TupleField(30));\n\n test(&mut st, &mut tm, &SimpleEnum::NamedField { some_str: String::from(\"test\") });\n\n test(&mut st, &mut tm, &SimpleStructNamed { some_str: String::from(\"bla\") });\n\n test(&mut st, &mut tm, &Some(SimpleStructNamed { some_str: String::from(\"bla\") }));\n\n test(&mut st, &mut tm, &SimpleStructTuple(123, String::from(\"bla\")));\n\n test(&mut st, &mut tm, &SimpleStructUnit);\n\n test(&mut st, &mut tm, &SimpleEnum::OtherUnit(SimpleStructUnit));\n\n test(&mut st, &mut tm, &Some(10u32));\n\n test(&mut st, &mut tm, &Some((10u32, (4u8, 12u32))));\n\n\n\n let sr = StaticRecord {\n\n file : \"file.rs\",\n\n line : 123,\n\n function : \"func\",\n\n module: \"mod\",\n\n };\n\n\n\n test_ser_only(&mut st, &mut tm, &sr);\n\n}\n", "file_path": "test/src/main.rs", "rank": 17, "score": 37233.43941456187 }, { "content": "fn add_trait_bounds(\n\n mut generics: Generics,\n\n skip_set: &HashSet<String>,\n\n trait_names: &[Tokens],\n\n) -> Generics {\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n if let Some(_) = skip_set.get(&type_param.ident.to_string()) {\n\n continue;\n\n }\n\n for trait_name in trait_names {\n\n let bound = syn::parse(quote! { #trait_name }.into()).unwrap();\n\n type_param.bounds.push(bound);\n\n }\n\n let bound = syn::parse(quote! { 'static }.into()).unwrap();\n\n type_param.bounds.push(bound);\n\n }\n\n }\n\n generics\n\n}\n", "file_path": "logpack-derive/src/lib.rs", "rank": 18, "score": 33762.64137259316 }, { "content": " ($(($type:ident, $num:tt)),*) => {\n\n impl<$($type),*> Encoder for ($($type),*)\n\n where $($type : Encoder),*\n\n {\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n $(\n\n $type::logpack_encode(&self.$num, buf)?;\n\n )*\n\n\n\n Ok(())\n\n }\n\n fn logpack_sizer(&self) -> usize {\n\n let mut size = 0;\n\n $( size += $type::logpack_sizer(&self.$num); )*\n\n size\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "logpack/src/encoder.rs", "rank": 25, "score": 18.06276323633163 }, { "content": " Ok(())\n\n }\n\n\n\n fn decode_struct<C>(&mut self, typename_id: Option<&TypeNameId>, struct_desc: &Struct<TypeNameId>, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n use Struct::*;\n\n\n\n match struct_desc {\n\n &Unit => {\n\n callbacks.struct_unit(typename_id);\n\n }\n\n &Named(ref v) => {\n\n let ctx = callbacks.begin_struct_named(typename_id);\n\n let mut idx = 0;\n\n for &(ref key, ref value) in v.iter() {\n\n let ctx = ctx.begin_named_field( idx, key);\n\n self.decode(value, ctx)?;\n\n ctx.end_named_field();\n\n idx += 1;\n", "file_path": "logpack/src/decoder.rs", "rank": 26, "score": 17.395077584364344 }, { "content": " self.slice.len()\n\n }\n\n\n\n pub fn remaining(&self) -> usize {\n\n self.slice.len() - self.position\n\n }\n\n\n\n pub fn get_slice(&mut self, size: usize) -> Result<&[u8], (usize, usize)>\n\n {\n\n let remaining = self.remaining();\n\n if remaining < size {\n\n return Err((remaining, size));\n\n }\n\n\n\n let value = &self.slice[self.position .. self.position + size];\n\n self.position += size;\n\n Ok(value)\n\n }\n\n\n\n pub fn get_remaining_slice(&mut self) -> Result<&[u8], (usize, usize)>\n", "file_path": "logpack/src/buffers.rs", "rank": 27, "score": 17.236876333199465 }, { "content": "simple!(usize);\n\nsimple!(u64);\n\nsimple!(u32);\n\nsimple!(u16);\n\nsimple!(u8);\n\nsimple!(isize);\n\nsimple!(i64);\n\nsimple!(i32);\n\nsimple!(i16);\n\nsimple!(i8);\n\nsimple!(bool);\n\n\n\nimpl Encoder for () {\n\n #[inline(always)]\n\n fn logpack_encode(&self, _buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n Ok(())\n\n }\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize { 0 }\n\n}\n\n\n", "file_path": "logpack/src/encoder.rs", "rank": 28, "score": 17.148769616696228 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn append_bytes(&mut self, slice: &[u8]) -> Result<(), (usize, usize)> {\n\n let size = slice.len();\n\n let remaining = self.remaining();\n\n if remaining < size {\n\n return Err((remaining, size));\n\n }\n\n\n\n let dest = &mut self.slice[self.position];\n\n\n\n unsafe {\n\n ::std::ptr::copy(&slice[0], dest as *mut u8, size);\n\n }\n\n\n\n self.position += size;\n\n Ok(())\n\n\n\n }\n", "file_path": "logpack/src/buffers.rs", "rank": 29, "score": 16.409762965985365 }, { "content": " &None => 1,\n\n &Some(ref val) => {\n\n 1 + val.logpack_sizer()\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl<T, E> Encoder for Result<T, E>\n\n where T: Encoder, E: Encoder\n\n{\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n match self {\n\n &Ok(ref val) => {\n\n (0u8).logpack_encode(buf)?;\n\n val.logpack_encode(buf)\n\n }\n\n &Err(ref val) => {\n", "file_path": "logpack/src/encoder.rs", "rank": 30, "score": 16.345254924763093 }, { "content": " return Err((remaining, size));\n\n }\n\n\n\n let position = self.position;\n\n self.position += size;\n\n\n\n Ok((&mut self.slice[position] as *mut u8) as *mut T)\n\n }\n\n\n\n #[inline(always)]\n\n pub unsafe fn reserve_space_by_size(&mut self, size: usize) -> Result<*mut u8, (usize, usize)>\n\n {\n\n let remaining = self.remaining();\n\n if remaining < size {\n\n return Err((remaining, size));\n\n }\n\n\n\n let position = self.position;\n\n self.position += size;\n\n\n", "file_path": "logpack/src/buffers.rs", "rank": 31, "score": 16.008779065002987 }, { "content": "pub enum Error {\n\n MissingType(TypeNameId),\n\n UTF8Error(Utf8Error),\n\n GetError((usize, usize)),\n\n InvalidIndex(usize, usize),\n\n InvalidSome(u8),\n\n InvalidResult(u8),\n\n}\n\n\n\nmacro_rules! simple {\n\n ($self:ident, $callbacks:ident, $func:ident) => {\n\n {\n\n let val = $self.buffer.get::<_>().map_err(Error::GetError)?;\n\n $callbacks.$func(val);\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, 'b> Decoder<'a, 'b> {\n", "file_path": "logpack/src/decoder.rs", "rank": 32, "score": 15.717335095321033 }, { "content": "use logpack::decoder::Callbacks;\n\nuse logpack::decoder::TypeNameId;\n\nuse std::fmt::Write;\n\n\n\npub mod ansi;\n\n\n\npub struct Repr<'a> {\n\n output: &'a mut String,\n\n enum_names: bool,\n\n}\n\n\n\nimpl<'a> Repr<'a> {\n\n pub fn new(output: &'a mut String) -> Self {\n\n let enum_names = false;\n\n Self { output, enum_names }\n\n }\n\n\n\n pub fn with_enum_names(self) -> Self {\n\n Self { enum_names: true, ..self }\n\n }\n", "file_path": "logpack-ron/src/lib.rs", "rank": 33, "score": 15.696420060573598 }, { "content": "\n\n pub fn make_name_for_id(&mut self, name: &'static str, type_id: TypeId) -> (bool, TypeNameId) {\n\n if let Some(value) = self.by_ids.get(&type_id) {\n\n return (false, *value);\n\n }\n\n\n\n if let Some(value) = self.names.get_mut(name) {\n\n *value += 1;\n\n let v = (name, *value);\n\n self.by_ids.insert(type_id, v);\n\n return (true, v);\n\n }\n\n\n\n let v = (name, 0);\n\n self.names.insert(name, 0);\n\n self.by_ids.insert(type_id, v);\n\n (true, v)\n\n }\n\n}\n\n\n\n//////////////////////////////////////////////////////////////////////////\n\n//\n\n// Logpack and impl\n\n\n\npub type RefDesc = Description<TypeNameId, FieldName>;\n\n\n", "file_path": "logpack/src/lib.rs", "rank": 34, "score": 15.642424561563281 }, { "content": " pub fn new(types: &'a NameMap, buffer: BufDecoder<'b>) -> Self {\n\n Self { types, buffer }\n\n }\n\n\n\n pub fn into_decoder(self) -> BufDecoder<'b> {\n\n let Self { buffer, .. } = self;\n\n\n\n buffer\n\n }\n\n\n\n pub fn decode<C>(&mut self, desc: &ResolvedDesc, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n use Description::*;\n\n match desc {\n\n &U8 => simple!(self, callbacks, handle_u8),\n\n &U16 => simple!(self, callbacks, handle_u16),\n\n &U32 => simple!(self, callbacks, handle_u32),\n\n &U64 => simple!(self, callbacks, handle_u64),\n\n &I8 => simple!(self, callbacks, handle_i8),\n", "file_path": "logpack/src/decoder.rs", "rank": 35, "score": 15.436108054574117 }, { "content": "}\n\n\n\nimpl<T> Encoder for Option<T>\n\n where T: Encoder\n\n{\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n match self {\n\n &None => {\n\n (0u8).logpack_encode(buf)\n\n }\n\n &Some(ref val) => {\n\n (1u8).logpack_encode(buf)?;\n\n val.logpack_encode(buf)\n\n }\n\n }\n\n }\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n match self {\n", "file_path": "logpack/src/encoder.rs", "rank": 36, "score": 15.293726918649671 }, { "content": " }\n\n\n\n pub fn decode_by_name<C>(&mut self, typename_id: &TypeNameId, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n let named = match self.types.map.get(typename_id) {\n\n None => return Err(Error::MissingType(typename_id.clone())),\n\n Some(x) => x,\n\n };\n\n\n\n self.decode_by_name_direct(typename_id, named, callbacks)\n\n }\n\n\n\n fn decode_array<C>(&mut self, size: usize, sub: &Box<ResolvedDesc>, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n let ctx = callbacks.begin_array(size);\n\n let mut idx = 0;\n\n for _ in 0 .. size {\n\n ctx.begin_array_item(idx);\n", "file_path": "logpack/src/decoder.rs", "rank": 37, "score": 15.290416459695283 }, { "content": "\n\npub type FeedResult<T> = Result<T, FeedError>;\n\n\n\nimpl NameMap {\n\n pub fn new() -> Self {\n\n Self {\n\n map : HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn get_map(&self) -> &HashMap<TypeNameId, Named<TypeNameId>> {\n\n &self.map\n\n }\n\n\n\n pub fn feed(&mut self, description: ResolvedDesc) -> FeedResult<ResolvedDesc>\n\n {\n\n use Description::*;\n\n\n\n Ok(match description {\n\n Option(o) => Option(Box::new(self.feed(*o)?)),\n", "file_path": "logpack/src/decoder.rs", "rank": 38, "score": 14.951723063239157 }, { "content": "use std::mem::size_of;\n\n\n\n//////////////////////////////////////////////////////////////////////////\n\n// BufEncoder\n\n//\n\n\n\npub struct BufEncoder<'a> {\n\n slice: &'a mut [u8],\n\n position: usize,\n\n}\n\n\n\nimpl<'a> BufEncoder<'a> {\n\n #[inline(always)]\n\n pub fn new(slice: &'a mut [u8]) -> Self {\n\n Self { slice: slice, position: 0 }\n\n }\n\n}\n\n\n\nimpl<'a> BufEncoder<'a> {\n\n #[inline(always)]\n", "file_path": "logpack/src/buffers.rs", "rank": 39, "score": 14.899343992887173 }, { "content": "use std::collections::HashSet;\n\n\n\nuse proc_macro2::{TokenStream as Tokens};\n\nuse syn::{Data, DeriveInput, Fields, DataEnum};\n\nuse quote::quote;\n\n\n", "file_path": "logpack-derive/src/type_derive.rs", "rank": 40, "score": 14.766649201317884 }, { "content": " Ok(&mut self.slice[position] as *mut u8)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn put<T>(&mut self, item: &T) -> Result<(), (usize, usize)>\n\n where T: Sized + Copy\n\n {\n\n let size = size_of::<T>();\n\n let remaining = self.remaining();\n\n if remaining < size {\n\n return Err((remaining, size));\n\n }\n\n\n\n let dest = &mut self.slice[self.position];\n\n\n\n unsafe {\n\n ::std::ptr::write_unaligned((dest as *mut u8) as *mut T, item.clone());\n\n }\n\n\n\n self.position += size;\n", "file_path": "logpack/src/buffers.rs", "rank": 41, "score": 14.752691900689234 }, { "content": " fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n let size : u64 = self.len() as u64;\n\n size.logpack_encode(buf)?;\n\n\n\n for i in 0..size {\n\n self[i as usize].logpack_encode(buf)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n let mut size = 0;\n\n for i in 0..size {\n\n size += self[i as usize].logpack_sizer();\n\n }\n\n size\n\n }\n\n}\n", "file_path": "logpack/src/encoder.rs", "rank": 42, "score": 14.617458336508216 }, { "content": "use logpack::decoder::Callbacks;\n\nuse logpack::decoder::TypeNameId;\n\n\n\nuse ansi_term::ANSIString;\n\nuse ansi_term::Colour::RGB;\n\nuse ansi_term::Colour;\n\n\n\npub struct Repr<'a> {\n\n output: &'a mut Vec<ANSIString<'static>>,\n\n enum_names: bool,\n\n}\n\n\n\nimpl<'a> Repr<'a> {\n\n pub fn new(output: &'a mut Vec<ANSIString<'static>>) -> Self {\n\n let enum_names = false;\n\n Self { output, enum_names }\n\n }\n\n\n\n pub fn with_enum_names(self) -> Self {\n\n Self { enum_names: true, ..self }\n", "file_path": "logpack-ron/src/ansi.rs", "rank": 43, "score": 14.453188024458363 }, { "content": "\n\nuse std::time::Duration;\n\n\n\nimpl Encoder for Duration\n\n{\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n let secs: u64 = self.as_secs();\n\n secs.logpack_encode(buf)?;\n\n let nanos: u32 = self.subsec_nanos();\n\n nanos.logpack_encode(buf)?;\n\n Ok(())\n\n }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n let secs: u64 = 0;\n\n let nanos: u32 = 0;\n\n secs.logpack_sizer() + nanos.logpack_sizer()\n\n }\n", "file_path": "logpack/src/encoder.rs", "rank": 44, "score": 14.398420753169352 }, { "content": " fn option_none(&mut self);\n\n\n\n fn option_some(&mut self) -> &mut Self::SubType;\n\n fn option_end(&mut self);\n\n\n\n fn result_ok(&mut self) -> &mut Self::SubType;\n\n fn result_err(&mut self) -> &mut Self::SubType;\n\n fn result_end(&mut self);\n\n\n\n fn struct_unit(&mut self, typename_id: Option<&TypeNameId>);\n\n\n\n fn begin_struct_named(&mut self, typename_id: Option<&TypeNameId>) -> &mut Self::SubType;\n\n fn begin_named_field(&mut self, field_idx: u16, field_name: &String) -> &mut Self::SubType;\n\n fn end_named_field(&mut self);\n\n fn end_struct_named(&mut self);\n\n\n\n fn begin_struct_tuple(&mut self, typename_id: Option<&TypeNameId>) -> &mut Self::SubType;\n\n fn begin_tuple_field(&mut self, field_idx: u16) -> &mut Self::SubType;\n\n fn end_tuple_field(&mut self);\n\n fn end_struct_tuple(&mut self);\n", "file_path": "logpack/src/decoder.rs", "rank": 45, "score": 14.29679449525593 }, { "content": " pub fn len(&self) -> usize {\n\n self.slice.len()\n\n }\n\n\n\n #[inline(always)]\n\n pub fn remaining(&self) -> usize {\n\n self.slice.len() - self.position\n\n }\n\n\n\n pub fn get_content(&self) -> &[u8] {\n\n &self.slice[0 .. self.position]\n\n }\n\n\n\n #[inline(always)]\n\n pub unsafe fn reserve_space<T: Sized + Copy>(&mut self) -> Result<(*mut T), (usize, usize)>\n\n where T: Sized + Copy\n\n {\n\n let size = size_of::<T>();\n\n let remaining = self.remaining();\n\n if remaining < size {\n", "file_path": "logpack/src/buffers.rs", "rank": 46, "score": 14.079119773151998 }, { "content": " (1u8).logpack_encode(buf)?;\n\n val.logpack_encode(buf)\n\n }\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n match self {\n\n &Ok(ref val) => {\n\n 1 + val.logpack_sizer()\n\n }\n\n &Err(ref val) => {\n\n 1 + val.logpack_sizer()\n\n }\n\n }\n\n }\n\n}\n\n\n\n//////////////////////////////////////////////////////////////////////\n", "file_path": "logpack/src/encoder.rs", "rank": 47, "score": 14.014697032565323 }, { "content": " }\n\n ctx.end_struct_named();\n\n }\n\n &Tuple(ref v) => {\n\n let ctx = callbacks.begin_struct_tuple(typename_id);\n\n let mut idx = 0;\n\n for ref value in v.iter() {\n\n let ctx = ctx.begin_tuple_field(idx);\n\n self.decode(value, ctx)?;\n\n ctx.end_tuple_field();\n\n idx += 1;\n\n }\n\n ctx.end_struct_tuple();\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "logpack/src/decoder.rs", "rank": 48, "score": 13.834831326380074 }, { "content": "\n\n ((f3 as u64) << 30) | ((f2 as u64) << 14) | ((f1 as u64) << 6) | ((f0 >> 2) as u64)\n\n }\n\n _ => panic!(),\n\n };\n\n\n\n let u8slice = self.buffer.get_slice(len as usize).map_err(Error::GetError)?;\n\n let strslice = str::from_utf8(u8slice).map_err(Error::UTF8Error)?;\n\n callbacks.handle_string(strslice);\n\n\n\n Ok(())\n\n }\n\n\n\n fn decode_by_name_direct<C>(&mut self, typename_id: &TypeNameId, named: &Named<TypeNameId>, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n use Named::*;\n\n\n\n match named {\n\n &Enum(ref optvec) => {\n", "file_path": "logpack/src/decoder.rs", "rank": 49, "score": 13.75874117796993 }, { "content": " q@String => q,\n\n\n\n q@ByName(_, None) => q,\n\n ByName(name, Some(named)) => {\n\n let v = self.feed_named(named)?;\n\n self.map.insert(name.clone(), v);\n\n ByName(name, None)\n\n },\n\n })\n\n }\n\n\n\n pub fn feed_named(&mut self, named: Named<TypeNameId>) -> FeedResult<Named<TypeNameId>>\n\n {\n\n use Named::*;\n\n\n\n Ok(match named {\n\n Enum(vec) => Enum({\n\n let items: ::std::result::Result<Vec<_>, _> =\n\n vec.into_iter().map(|(name, x)| Ok((name, self.feed_struct(x)?))).collect();\n\n items?\n", "file_path": "logpack/src/decoder.rs", "rank": 50, "score": 13.632335571657512 }, { "content": "\n\n fn begin_tuple(&mut self, size: usize) -> &mut Self::SubType;\n\n fn begin_tuple_item(&mut self, field_idx: u16);\n\n fn end_tuple_item(&mut self);\n\n fn end_tuple(&mut self);\n\n\n\n fn begin_array(&mut self, size: usize) -> &mut Self::SubType;\n\n fn begin_array_item(&mut self, field_idx: u16);\n\n fn end_array_item(&mut self);\n\n fn end_array(&mut self);\n\n\n\n fn begin_slice(&mut self, size: usize) -> &mut Self::SubType;\n\n fn begin_slice_item(&mut self, field_idx: u16);\n\n fn end_slice_item(&mut self);\n\n fn end_slice(&mut self);\n\n}\n\n\n\nuse std::str::{Utf8Error, self};\n\n\n\n#[derive(Debug)]\n", "file_path": "logpack/src/decoder.rs", "rank": 51, "score": 13.476789478082177 }, { "content": " {\n\n let remaining = self.remaining();\n\n self.get_slice(remaining)\n\n }\n\n\n\n pub fn get<T>(&mut self) -> Result<T, (usize, usize)>\n\n where T: Sized + Copy\n\n {\n\n let size = size_of::<T>();\n\n let remaining = self.remaining();\n\n if remaining < size {\n\n return Err((remaining, size));\n\n }\n\n\n\n let source = &self.slice[self.position];\n\n\n\n let value = unsafe {\n\n ::std::ptr::read((source as *const u8) as *const T)\n\n };\n\n\n\n self.position += size;\n\n\n\n Ok(value)\n\n }\n\n}\n\n\n\n\n", "file_path": "logpack/src/buffers.rs", "rank": 53, "score": 13.416669773109994 }, { "content": " }),\n\n Struct(struct_) => Struct(self.feed_struct(struct_)?),\n\n })\n\n }\n\n\n\n pub fn feed_struct(&mut self, struct_: Struct<TypeNameId>) -> FeedResult<Struct<TypeNameId>>\n\n {\n\n use Struct::*;\n\n\n\n Ok(match struct_ {\n\n Unit => Unit,\n\n Tuple(vec) => Tuple({\n\n let items: ::std::result::Result<Vec<_>, _> =\n\n vec.into_iter().map(|x|self.feed(x)).collect();\n\n items?\n\n }),\n\n Named(vec) => Named({\n\n let items: ::std::result::Result<Vec<_>, _> =\n\n vec.into_iter().map(|(name, x)| Ok((name, self.feed(x)?))).collect();\n\n items?\n", "file_path": "logpack/src/decoder.rs", "rank": 54, "score": 13.340973416543212 }, { "content": " }\n\n fn logpack_sizer(&self) -> usize {\n\n let mut size = 0;\n\n for i in 0..$len {\n\n size += self[i].logpack_sizer();\n\n }\n\n size\n\n }\n\n }\n\n )+\n\n }\n\n}\n\n\n\narray_impls!(00\n\n 01 02 03 04 05 06 07 08 09 10\n\n 11 12 13 14 15 16 17 18 19 20\n\n 21 22 23 24 25 26 27 28 29 30\n\n 31 32);\n\n\n\nmacro_rules! tuple {\n", "file_path": "logpack/src/encoder.rs", "rank": 55, "score": 13.274300726583306 }, { "content": " unsafe {\n\n let space = buf.reserve_space_by_size(size)?;\n\n ::std::ptr::copy_nonoverlapping(bytes.as_ptr(), space, size);\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl<'a> Encoder for &'a str {\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n encode_stored_string(self, buf)\n\n }\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n encoded_string_len(self)\n\n }\n\n}\n\n\n\nimpl Encoder for String {\n\n #[inline(always)]\n", "file_path": "logpack/src/encoder.rs", "rank": 56, "score": 13.02936846423887 }, { "content": " &I16 => simple!(self, callbacks, handle_i16),\n\n &I32 => simple!(self, callbacks, handle_i32),\n\n &I64 => simple!(self, callbacks, handle_i64),\n\n &Bool => simple!(self, callbacks, handle_bool),\n\n &RawPtr => simple!(self, callbacks, handle_raw_ptr),\n\n &Unit => { callbacks.handle_unit(); Ok(()) }\n\n &PhantomData => { callbacks.handle_phantom(); Ok(()) }\n\n &ByName(ref typename_id, None) => {\n\n self.decode_by_name(typename_id, callbacks)\n\n }\n\n &ByName(ref typename_id, Some(ref desc)) => {\n\n self.decode_by_name_direct(typename_id, desc, callbacks)\n\n }\n\n &String => self.decode_string(callbacks),\n\n &Option(ref sub) => self.decode_option(sub, callbacks),\n\n &Result(ref sub, ref sub2) => self.decode_result(sub, sub2, callbacks),\n\n &Array(size, ref sub) => self.decode_array(size, sub, callbacks),\n\n &Slice(ref sub) => self.decode_slice(sub, callbacks),\n\n &Tuple(ref subs) => self.decode_tuple(subs, callbacks),\n\n }\n", "file_path": "logpack/src/decoder.rs", "rank": 57, "score": 12.848895953392665 }, { "content": " Slice(o) => Slice(Box::new(self.feed(*o)?)),\n\n Array(size, o) => Array(size, Box::new(self.feed(*o)?)),\n\n Result(t, f) => Result(Box::new(self.feed(*t)?), Box::new(self.feed(*f)?)),\n\n Tuple(vec) => Tuple({\n\n let items: ::std::result::Result<Vec<_>, _> = vec.into_iter().map(|x| self.feed(x)).collect();\n\n items?\n\n }),\n\n\n\n q@U64 => q,\n\n q@U32 => q,\n\n q@U16 => q,\n\n q@U8 => q,\n\n q@I64 => q,\n\n q@I32 => q,\n\n q@I16 => q,\n\n q@I8 => q,\n\n q@Unit => q,\n\n q@PhantomData => q,\n\n q@Bool => q,\n\n q@RawPtr => q,\n", "file_path": "logpack/src/decoder.rs", "rank": 58, "score": 12.72427145550655 }, { "content": " }\n\n };\n\n\n\n let result = quote! {\n\n impl #impl_generics logpack::Logpack for #name #ty_generics #where_clause {\n\n fn logpack_describe(st: &mut logpack::SeenTypes) ->\n\n logpack::Description<logpack::TypeNameId, logpack::FieldName>\n\n {\n\n use std::any::TypeId;\n\n let self_id = TypeId::of::<Self>();\n\n let (first_seen, typename_id) = st.make_name_for_id(stringify!(#name), self_id);\n\n let may_recurse = if first_seen { #fields } else { None };\n\n\n\n logpack::Description::ByName(typename_id, may_recurse)\n\n }\n\n }\n\n };\n\n\n\n result\n\n}\n\n\n", "file_path": "logpack-derive/src/type_derive.rs", "rank": 59, "score": 12.319148311126778 }, { "content": " let len = optvec.len();\n\n let idx = if len < 0x100 {\n\n self.buffer.get::<u8>().map_err(Error::GetError)? as usize\n\n } else if len < 0x10000 {\n\n self.buffer.get::<u16>().map_err(Error::GetError)? as usize\n\n } else {\n\n self.buffer.get::<u32>().map_err(Error::GetError)? as usize\n\n };\n\n if idx >= len {\n\n return Err(Error::InvalidIndex(idx, len));\n\n }\n\n let ctx = callbacks.begin_enum(typename_id, &optvec[idx].0);\n\n self.decode_struct(None, &optvec[idx].1, ctx)?;\n\n ctx.end_enum(typename_id);\n\n }\n\n &Struct(ref desc) => {\n\n self.decode_struct(Some(typename_id), desc, callbacks)?;\n\n }\n\n }\n\n\n", "file_path": "logpack/src/decoder.rs", "rank": 60, "score": 12.234897516227354 }, { "content": " }\n\n }\n\n\n\n fn begin_struct_named(&mut self, typename_id: Option<&TypeNameId>) -> &mut Self::SubType {\n\n if let Some(typename_id) = typename_id {\n\n *self.output += typename_id.0.as_str();\n\n }\n\n *self.output += \"(\";\n\n self\n\n }\n\n fn begin_named_field(&mut self, field_idx: u16, field_name: &String) -> &mut Self::SubType {\n\n if field_idx != 0 {\n\n *self.output += \", \";\n\n }\n\n *self.output += field_name.as_str();\n\n *self.output += \": \";\n\n self\n\n }\n\n fn end_named_field(&mut self) {\n\n }\n", "file_path": "logpack-ron/src/lib.rs", "rank": 61, "score": 12.144843581116493 }, { "content": " fn end_struct_named(&mut self) {\n\n *self.output += \")\";\n\n }\n\n\n\n fn begin_struct_tuple(&mut self, typename_id: Option<&TypeNameId>) -> &mut Self::SubType {\n\n if let Some(typename_id) = typename_id {\n\n *self.output += typename_id.0.as_str();\n\n }\n\n *self.output += \"(\";\n\n self\n\n }\n\n fn begin_tuple_field(&mut self, field_idx: u16) -> &mut Self::SubType {\n\n if field_idx != 0 {\n\n *self.output += \", \";\n\n }\n\n self\n\n }\n\n fn end_tuple_field(&mut self) {\n\n }\n\n fn end_struct_tuple(&mut self) {\n", "file_path": "logpack-ron/src/lib.rs", "rank": 62, "score": 12.106218761649359 }, { "content": " *self.output += \")\";\n\n }\n\n\n\n fn begin_tuple(&mut self, _size: usize) -> &mut Self::SubType {\n\n *self.output += \"(\";\n\n self\n\n }\n\n fn begin_tuple_item(&mut self, field_idx: u16) {\n\n if field_idx != 0 {\n\n *self.output += \", \";\n\n }\n\n }\n\n fn end_tuple_item(&mut self) {\n\n }\n\n fn end_tuple(&mut self) {\n\n *self.output += \")\";\n\n }\n\n\n\n fn begin_array(&mut self, _size: usize) -> &mut Self::SubType {\n\n *self.output += \"[\";\n", "file_path": "logpack-ron/src/lib.rs", "rank": 63, "score": 11.992310902915726 }, { "content": "//////////////////////////////////////////////////////////////////////////\n\n//\n\n// SeenTypes\n\n\n\npub type TypeName = &'static str;\n\npub type FieldName = &'static str;\n\npub type TypeNameId = (TypeName, u16);\n\n\n\npub struct SeenTypes {\n\n by_ids: HashMap<TypeId, (TypeName, u16)>,\n\n names: HashMap<TypeName, u16>,\n\n}\n\n\n\nimpl SeenTypes {\n\n pub fn new() -> Self {\n\n Self {\n\n by_ids: HashMap::new(),\n\n names: HashMap::new(),\n\n }\n\n }\n", "file_path": "logpack/src/lib.rs", "rank": 64, "score": 11.978266245826202 }, { "content": "\n\n\n\nimpl<T> Encoder for Box<T>\n\n where T: Encoder\n\n{\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n (**self).logpack_encode(buf)\n\n }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n (**self).logpack_sizer()\n\n }\n\n}\n\n\n\nimpl<T> Encoder for *mut T\n\n{\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n", "file_path": "logpack/src/encoder.rs", "rank": 65, "score": 11.933354090096337 }, { "content": " fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n encode_stored_string(self.as_str(), buf)\n\n }\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n encoded_string_len(self.as_str())\n\n }\n\n}\n\n\n\nmacro_rules! array_impls {\n\n ($($len:tt)+) => {\n\n $(\n\n impl<T> Encoder for [T; $len]\n\n where T: Encoder,\n\n {\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n for i in 0..$len {\n\n self[i].logpack_encode(buf)?\n\n }\n\n Ok(())\n", "file_path": "logpack/src/encoder.rs", "rank": 67, "score": 11.73022089578797 }, { "content": " self\n\n }\n\n fn begin_array_item(&mut self, field_idx: u16) {\n\n if field_idx != 0 {\n\n *self.output += \", \";\n\n }\n\n }\n\n fn end_array_item(&mut self) {\n\n }\n\n fn end_array(&mut self) {\n\n *self.output += \"]\";\n\n }\n\n\n\n fn begin_slice(&mut self, _size: usize) -> &mut Self::SubType {\n\n *self.output += \"[\";\n\n self\n\n }\n\n fn begin_slice_item(&mut self, field_idx: u16) {\n\n if field_idx != 0 {\n\n *self.output += \", \";\n\n }\n\n }\n\n fn end_slice_item(&mut self) {\n\n }\n\n fn end_slice(&mut self) {\n\n *self.output += \"]\";\n\n }\n\n}\n", "file_path": "logpack-ron/src/lib.rs", "rank": 68, "score": 11.580558181948698 }, { "content": " self.decode(sub, ctx)?;\n\n ctx.end_array_item();\n\n idx += 1;\n\n }\n\n ctx.end_array();\n\n Ok(())\n\n }\n\n\n\n fn decode_slice<C>(&mut self, sub: &Box<ResolvedDesc>, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n let size = self.buffer.get::<u64>().map_err(Error::GetError)? as usize;\n\n let ctx = callbacks.begin_slice(size);\n\n let mut idx = 0;\n\n for _ in 0 .. size {\n\n ctx.begin_slice_item(idx);\n\n self.decode(sub, ctx)?;\n\n ctx.end_slice_item();\n\n idx += 1;\n\n }\n", "file_path": "logpack/src/decoder.rs", "rank": 69, "score": 11.506168231519647 }, { "content": " }\n\n\n\n fn begin_struct_tuple(&mut self, typename_id: Option<&TypeNameId>) -> &mut Self::SubType {\n\n if let Some(typename_id) = typename_id {\n\n self.output.push(VALNAME.paint(typename_id.0.clone()));\n\n }\n\n self.output.push(PUNCT.paint(\"(\".to_string()));\n\n self\n\n }\n\n\n\n fn begin_tuple_field(&mut self, field_idx: u16) -> &mut Self::SubType {\n\n if field_idx != 0 {\n\n self.output.push(PUNCT.paint(\", \".to_string()));\n\n }\n\n self\n\n }\n\n\n\n fn end_tuple_field(&mut self) {\n\n }\n\n\n", "file_path": "logpack-ron/src/ansi.rs", "rank": 70, "score": 11.413465463142057 }, { "content": " }\n\n}\n\n\n\nimpl<R: Read> BufRead for HexReader<R> {\n\n fn fill_buf(&mut self) -> io::Result<&[u8]> {\n\n if self.buf_pos >= self.buf.len() {\n\n let mut nread: usize = 0;\n\n let mut tmp: [u8; HR_BYTES_PER_LINE] = [0; HR_BYTES_PER_LINE];\n\n loop {\n\n nread += match self.inner.read(&mut tmp[nread..]) {\n\n Ok(0) if nread == 0 => return Ok(&[]),\n\n Ok(0) => break,\n\n Ok(n) => n,\n\n Err(e) => return Err(e),\n\n };\n\n if nread >= HR_BYTES_PER_LINE { break }\n\n }\n\n self.buf.clear();\n\n self.render_bytes(&tmp[..nread]).expect(\"TODO:\");\n\n self.buf_pos = 0;\n\n }\n\n Ok(self.buf[self.buf_pos..].as_bytes())\n\n }\n\n\n\n fn consume(&mut self, count: usize) {\n\n self.buf_pos = cmp::min(self.buf_pos + count, self.buf.len());\n\n }\n\n}\n", "file_path": "test/hexdump/src/lib.rs", "rank": 71, "score": 11.399956622691159 }, { "content": "}\n\n\n\nimpl<'a> Callbacks for Repr<'a> {\n\n type SubType = Repr<'a>;\n\n\n\n fn handle_u8(&mut self, val: u8) {\n\n write!(self.output, \"{}\", val).unwrap();\n\n }\n\n\n\n fn handle_u16(&mut self, val: u16) {\n\n write!(self.output, \"{}\", val).unwrap();\n\n }\n\n\n\n fn handle_u32(&mut self, val: u32) {\n\n write!(self.output, \"{}\", val).unwrap();\n\n }\n\n\n\n fn handle_u64(&mut self, val: u64) {\n\n write!(self.output, \"{}\", val).unwrap();\n\n }\n", "file_path": "logpack-ron/src/lib.rs", "rank": 72, "score": 11.202822812321141 }, { "content": " write!(&mut self.buf, \"{}\", *b as char)?;\n\n continue;\n\n }\n\n write!(&mut self.buf, \".\")?;\n\n }\n\n write!(&mut self.buf, \"|\")?;\n\n write!(&mut self.buf, \"\\n\")?;\n\n self.line_count += 1;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T: Read> Read for HexReader<T> {\n\n fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {\n\n let nread = {\n\n let mut rem = self.fill_buf()?;\n\n rem.read(buf)?\n\n };\n\n self.consume(nread);\n\n Ok(nread)\n", "file_path": "test/hexdump/src/lib.rs", "rank": 73, "score": 11.170831143317727 }, { "content": "}\n\n\n\ncfg_if! {\n\n if #[cfg(unix)] {\n\n use std::time::Instant;\n\n\n\n #[cfg(not(any(target_os = \"macos\", target_os = \"ios\")))]\n\n impl Encoder for Instant\n\n {\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n use libc::timespec;\n\n let timespec = unsafe {\n\n ::std::mem::transmute::<_, &timespec>(&self)\n\n };\n\n let secs: u64 = timespec.tv_sec as u64;\n\n secs.logpack_encode(buf)?;\n\n let nanos: u32 = timespec.tv_nsec as u32;\n\n nanos.logpack_encode(buf)?;\n\n Ok(())\n", "file_path": "logpack/src/encoder.rs", "rank": 74, "score": 11.052725848529489 }, { "content": " self.output.push(VALNAME.paint(typename_id.0.clone()));\n\n }\n\n self.output.push(PUNCT.paint(\"(\".to_string()));\n\n self\n\n }\n\n\n\n fn begin_named_field(&mut self, field_idx: u16, field_name: &String) -> &mut Self::SubType {\n\n if field_idx != 0 {\n\n self.output.push(PUNCT.paint(\", \".to_string()));\n\n }\n\n self.output.push(FIELDNAME.paint(field_name.clone()));\n\n self.output.push(PUNCT.paint(\": \".to_string()));\n\n self\n\n }\n\n\n\n fn end_named_field(&mut self) {\n\n }\n\n\n\n fn end_struct_named(&mut self) {\n\n self.output.push(PUNCT.paint(\")\".to_string()));\n", "file_path": "logpack-ron/src/ansi.rs", "rank": 75, "score": 11.009653390094515 }, { "content": " *self.output += \")\";\n\n }\n\n\n\n fn result_ok(&mut self) -> &mut Self::SubType {\n\n *self.output += \"Ok(\";\n\n self\n\n }\n\n\n\n fn result_err(&mut self) -> &mut Self::SubType {\n\n *self.output += \"Err(\";\n\n self\n\n }\n\n\n\n fn result_end(&mut self) {\n\n *self.output += \")\";\n\n }\n\n\n\n fn struct_unit(&mut self, typename_id: Option<&TypeNameId>) {\n\n if let Some(typename_id) = typename_id {\n\n *self.output += typename_id.0.as_str();\n", "file_path": "logpack-ron/src/lib.rs", "rank": 76, "score": 10.986437321041793 }, { "content": "}\n\n\n\n//////////////////////////////////////////////////////////////////////////\n\n// BufDecoder\n\n//\n\n\n\n#[derive(Clone)]\n\npub struct BufDecoder<'a> {\n\n slice: &'a [u8],\n\n position: usize,\n\n}\n\n\n\nimpl<'a> BufDecoder<'a> {\n\n pub fn new(slice: &'a [u8]) -> Self {\n\n Self { slice: slice, position: 0 }\n\n }\n\n}\n\n\n\nimpl<'a> BufDecoder<'a> {\n\n pub fn len(&self) -> usize {\n", "file_path": "logpack/src/buffers.rs", "rank": 77, "score": 10.891285994743308 }, { "content": "use super::Description;\n\nuse super::Named;\n\nuse super::Struct;\n\n\n\nuse std::collections::{HashMap};\n\nuse super::buffers::BufDecoder;\n\n\n\npub type TypeName = String;\n\npub type TypeNameId = (TypeName, u16);\n\npub type ResolvedDesc = Description<TypeNameId>;\n\n\n\n#[derive(Clone)]\n\npub struct NameMap {\n\n map: HashMap<TypeNameId, Named<TypeNameId>>\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum FeedError {\n\n Dups,\n\n}\n", "file_path": "logpack/src/decoder.rs", "rank": 78, "score": 10.750364476267077 }, { "content": " (*self as u64).logpack_encode(buf)\n\n }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n (*self as u64).logpack_sizer()\n\n }\n\n}\n\n\n\nimpl<T> Encoder for *const T\n\n{\n\n #[inline(always)]\n\n fn logpack_encode(&self, buf: &mut buffers::BufEncoder) -> Result<(), (usize, usize)> {\n\n (*self as u64).logpack_encode(buf)\n\n }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n (*self as u64).logpack_sizer()\n\n }\n", "file_path": "logpack/src/encoder.rs", "rank": 79, "score": 10.651555431777801 }, { "content": "impl<T, S> Logpack for Result<T, S>\n\n where T: Logpack, S: Logpack\n\n{\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc {\n\n Description::Result(Box::new(T::logpack_describe(seen)),\n\n Box::new(S::logpack_describe(seen)))\n\n }\n\n}\n\n\n\nimpl<T> Logpack for PhantomData<T>\n\n where T: Logpack\n\n{\n\n fn logpack_describe(_: &mut SeenTypes) -> RefDesc {\n\n Description::PhantomData\n\n }\n\n}\n\n\n\nimpl<T> Logpack for [T; 0] {\n\n fn logpack_describe(_: &mut SeenTypes) -> RefDesc {\n\n Description::Unit\n", "file_path": "logpack/src/lib.rs", "rank": 80, "score": 10.636690323610113 }, { "content": "\n\n fn begin_array(&mut self, _size: usize) -> &mut Self::SubType {\n\n self.output.push(PUNCT.paint(\"[\".to_string()));\n\n self\n\n }\n\n fn begin_array_item(&mut self, field_idx: u16) {\n\n if field_idx != 0 {\n\n self.output.push(PUNCT.paint(\", \".to_string()));\n\n }\n\n }\n\n fn end_array_item(&mut self) {\n\n }\n\n fn end_array(&mut self) {\n\n self.output.push(PUNCT.paint(\"]\".to_string()));\n\n }\n\n\n\n fn begin_slice(&mut self, _size: usize) -> &mut Self::SubType {\n\n self.output.push(PUNCT.paint(\"[\".to_string()));\n\n self\n\n }\n", "file_path": "logpack-ron/src/ansi.rs", "rank": 81, "score": 10.624787015859042 }, { "content": " let f0 = self.buffer.get::<u8>().map_err(Error::GetError)?;\n\n\n\n match f0 {\n\n 0 => {\n\n let ctx = callbacks.result_ok();\n\n self.decode(desc, ctx)?;\n\n ctx.result_end();\n\n },\n\n 1 => {\n\n let ctx = callbacks.result_err();\n\n self.decode(desc2, ctx)?;\n\n ctx.result_end();\n\n },\n\n n => return Err(Error::InvalidResult(n)),\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn decode_string<C>(&mut self, callbacks: &mut C) -> Result<(), Error>\n", "file_path": "logpack/src/decoder.rs", "rank": 83, "score": 10.405771820350633 }, { "content": "simple!(u16, U16);\n\nsimple!(u8, U8);\n\nsimple!(isize, I64);\n\nsimple!(i64, I64);\n\nsimple!(i32, I32);\n\nsimple!(i16, I16);\n\nsimple!(i8, I8);\n\nsimple!((), Unit);\n\nsimple!(bool, Bool);\n\nsimple!(str, String);\n\nsimple!(String, String);\n\n\n\nimpl<T> Logpack for Option<T>\n\n where T: Logpack\n\n{\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc{\n\n Description::Option(Box::new(T::logpack_describe(seen)))\n\n }\n\n}\n\n\n", "file_path": "logpack/src/lib.rs", "rank": 84, "score": 10.326207121907974 }, { "content": " }\n\n}\n\n\n\n//////////////////////////////////////////////////////////////////////////\n\n\n\nmacro_rules! std_type_to_tuple {\n\n ($name:ident: $($fields:ident),+) => {\n\n impl Logpack for $name\n\n {\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc {\n\n let (first_seen, typename_id) = seen.make_name_for_id(stringify!($name),\n\n TypeId::of::<Self>());\n\n let may_recurse = if first_seen {\n\n Some(Named::Struct(Struct::Tuple(vec![\n\n $( $fields::logpack_describe(seen) ),*\n\n ])))\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "logpack/src/lib.rs", "rank": 85, "score": 9.863798844692326 }, { "content": "use serde_derive::{Serialize, Deserialize};\n\n\n\npub mod decoder;\n\npub mod encoder;\n\npub mod buffers;\n\n\n\npub use encoder::Encoder;\n\npub use decoder::Decoder;\n\npub use decoder::NameMap;\n\npub use buffers::BufEncoder;\n\npub use buffers::BufDecoder;\n\npub use decoder::ResolvedDesc;\n\n\n\nuse std::collections::HashMap;\n\nuse std::marker::PhantomData;\n\nuse std::any::TypeId;\n\n\n\n//////////////////////////////////////////////////////////////////////////\n\n//\n\n// Type description\n", "file_path": "logpack/src/lib.rs", "rank": 86, "score": 9.78048214035033 }, { "content": "\n\n fn handle_bool(&mut self, val: bool) {\n\n write!(self.output, \"{}\", val).unwrap();\n\n }\n\n\n\n fn handle_string(&mut self, val: &str) {\n\n write!(self.output, \"{:?}\", val).unwrap();\n\n }\n\n\n\n fn handle_unit(&mut self) {\n\n *self.output += &\"()\";\n\n }\n\n\n\n fn handle_phantom(&mut self) {\n\n *self.output += &\"PhantomData\";\n\n }\n\n\n\n fn begin_enum(&mut self, typename_id: &TypeNameId, option_name: &String) -> &mut Self::SubType {\n\n if self.enum_names {\n\n *self.output += typename_id.0.as_str();\n", "file_path": "logpack-ron/src/lib.rs", "rank": 87, "score": 9.590418852981983 }, { "content": " fn end_struct_tuple(&mut self) {\n\n self.output.push(PUNCT.paint(\")\".to_string()));\n\n }\n\n\n\n fn begin_tuple(&mut self, _size: usize) -> &mut Self::SubType {\n\n self.output.push(PUNCT.paint(\"(\".to_string()));\n\n self\n\n }\n\n fn begin_tuple_item(&mut self, field_idx: u16) {\n\n if field_idx != 0 {\n\n self.output.push(PUNCT.paint(\", \".to_string()));\n\n }\n\n }\n\n\n\n fn end_tuple_item(&mut self) {\n\n }\n\n\n\n fn end_tuple(&mut self) {\n\n self.output.push(PUNCT.paint(\")\".to_string()));\n\n }\n", "file_path": "logpack-ron/src/ansi.rs", "rank": 88, "score": 9.586239382469575 }, { "content": " ($($desc:tt)+) => {\n\n impl $($desc)+ {\n\n #[inline]\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc {\n\n T::logpack_describe(seen)\n\n }\n\n }\n\n };\n\n}\n\n\n\nderef_impl!(<'a, T: ?Sized> Logpack for &'a T where T: Logpack);\n\nderef_impl!(<'a, T: ?Sized> Logpack for &'a mut T where T: Logpack);\n\n\n\nimpl<T> Logpack for Box<T> where T: Logpack\n\n{\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc {\n\n T::logpack_describe(seen)\n\n }\n\n}\n\n\n", "file_path": "logpack/src/lib.rs", "rank": 89, "score": 9.195761186166159 }, { "content": " ctx.end_slice();\n\n Ok(())\n\n }\n\n\n\n fn decode_tuple<C>(&mut self, subs: &Vec<ResolvedDesc>, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n\n let ctx = callbacks.begin_tuple(subs.len());\n\n let mut idx = 0;\n\n for v in subs.iter() {\n\n ctx.begin_tuple_item(idx);\n\n self.decode(v, ctx)?;\n\n ctx.end_tuple_item();\n\n idx += 1;\n\n }\n\n ctx.end_tuple();\n\n Ok(())\n\n }\n\n\n\n fn decode_option<C>(&mut self, desc: &ResolvedDesc, callbacks: &mut C) -> Result<(), Error>\n", "file_path": "logpack/src/decoder.rs", "rank": 90, "score": 9.175719429849222 }, { "content": "pub struct LogpackWrapper<T>(T);\n\n\n\nimpl<T> Logpack for LogpackWrapper<T> where T: Logpack\n\n{\n\n fn logpack_describe(seen: &mut SeenTypes) -> RefDesc {\n\n T::logpack_describe(seen)\n\n }\n\n}\n\n\n\nimpl<T> Logpack for *const T\n\n{\n\n fn logpack_describe(_seen: &mut SeenTypes) -> RefDesc{\n\n Description::RawPtr\n\n }\n\n}\n\n\n\nimpl<T> Logpack for *mut T\n\n{\n\n fn logpack_describe(_seen: &mut SeenTypes) -> RefDesc{\n\n Description::RawPtr\n", "file_path": "logpack/src/lib.rs", "rank": 91, "score": 8.687638916421275 }, { "content": "\n\n println!(\"\");\n\n println!(\"Value to encode (Debug repr): {:?}\", *e);\n\n println!(\"Serialized Type in 'ron' (None = type already seen): {}\", type_ser);\n\n let r = HexReader::new(encoded);\n\n for line in r.lines() {\n\n println!(\"Binary value in hex: {}\", line.unwrap());\n\n }\n\n println!(\"Packlog Deser Output: {}\", repr_output);\n\n println!(\"Packlog Deser ANSI output: {}\", ANSIStrings(repr_ansi_output.as_slice()));\n\n println!(\"Size in bytes of Packlog binary: {:?}\", sizer_result);\n\n\n\n assert_eq!(encoded.len(), sizer_result);\n\n}\n\n\n\n#[derive(Logpack, Debug)]\n\npub struct StaticRecord {\n\n pub file: &'static str,\n\n pub line: u32,\n\n pub function: &'static str,\n\n pub module: &'static str,\n\n}\n\n\n", "file_path": "test/src/main.rs", "rank": 92, "score": 8.614284153530825 }, { "content": "use logpack_derive::Logpack;\n\nuse serde_derive::Deserialize;\n\n\n\nuse hexdump::HexReader;\n\nuse ansi_term::{ANSIString, ANSIStrings};\n\nuse ron::ser::{to_string};\n\nuse ron::de::{from_str};\n\nuse std::fmt::Debug;\n\nuse std::io::BufRead;\n\n\n\n#[derive(Logpack, Debug, Eq, PartialEq, Deserialize)]\n\npub struct GenericType<T> {\n\n test: T,\n\n field: u32,\n\n}\n\n\n\n#[derive(Logpack, Debug, Eq, PartialEq, Deserialize)]\n\npub enum SimpleEnum {\n\n WithUnit,\n\n TupleField(u32),\n", "file_path": "test/src/main.rs", "rank": 93, "score": 8.608028998736994 }, { "content": " where C: Callbacks\n\n {\n\n let f0 = self.buffer.get::<u8>().map_err(Error::GetError)?;\n\n\n\n match f0 {\n\n 0 => callbacks.option_none(),\n\n 1 => {\n\n let ctx = callbacks.option_some();\n\n self.decode(desc, ctx)?;\n\n ctx.option_end();\n\n },\n\n n => return Err(Error::InvalidSome(n)),\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn decode_result<C>(&mut self, desc: &ResolvedDesc, desc2: &ResolvedDesc, callbacks: &mut C) -> Result<(), Error>\n\n where C: Callbacks\n\n {\n", "file_path": "logpack/src/decoder.rs", "rank": 94, "score": 8.58760804923414 }, { "content": " }\n\n\n\n #[inline(always)]\n\n fn logpack_sizer(&self) -> usize {\n\n let secs: u64 = 0;\n\n let nanos: u32 = 0;\n\n secs.logpack_sizer() + nanos.logpack_sizer()\n\n }\n\n }\n\n }\n\n}\n", "file_path": "logpack/src/encoder.rs", "rank": 95, "score": 8.366803309593685 }, { "content": "\n\n#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]\n\npub enum Struct<T, S=String> {\n\n Unit,\n\n Tuple(Vec<Description<T, S>>),\n\n Named(Vec<(S, Description<T, S>)>),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]\n\npub enum Named<T, S=String> {\n\n Enum(Vec<(S, Struct<T, S>)>),\n\n Struct(Struct<T, S>),\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, PartialEq, Debug)]\n\npub enum Description<T, S=String> {\n\n U64,\n\n U32,\n\n U16,\n\n U8,\n", "file_path": "logpack/src/lib.rs", "rank": 96, "score": 8.261447696876624 }, { "content": " Description::ByName(typename_id, may_recurse)\n\n }\n\n }\n\n };\n\n}\n\n\n\nuse std::time::Duration;\n\nuse std::time::Instant;\n\n\n\nstd_type_to_tuple!(Duration: u64, u32);\n\nstd_type_to_tuple!(Instant: u64, u32);\n", "file_path": "logpack/src/lib.rs", "rank": 97, "score": 8.253423481651533 }, { "content": " I64,\n\n I32,\n\n I16,\n\n I8,\n\n Unit,\n\n PhantomData,\n\n Bool,\n\n String,\n\n RawPtr,\n\n\n\n Option(Box<Description<T, S>>),\n\n Result(Box<Description<T, S>>, Box<Description<T, S>>),\n\n\n\n Array(usize, Box<Description<T, S>>),\n\n Slice(Box<Description<T, S>>),\n\n Tuple(Vec<Description<T, S>>),\n\n\n\n ByName(T, Option<Named<T, S>>),\n\n}\n\n\n", "file_path": "logpack/src/lib.rs", "rank": 98, "score": 8.216151506651252 }, { "content": "// Based on stuff from: https://www.snip2code.com/Snippet/1473242/Rust-Hexdump\n\n\n\nuse std::io::{self, Read, BufRead};\n\nuse std::cmp;\n\nuse std::fmt::{self, Write};\n\n\n\nconst HR_BYTES_PER_LINE: usize = 16;\n\n\n\npub struct HexReader<T> {\n\n inner: T,\n\n buf: String,\n\n buf_pos: usize,\n\n line_count: usize,\n\n}\n\n\n\nimpl<T: Read> HexReader<T> {\n\n pub fn new(inner: T) -> HexReader<T> {\n\n HexReader {\n\n inner: inner,\n\n buf: String::new(),\n", "file_path": "test/hexdump/src/lib.rs", "rank": 99, "score": 8.103770349025469 } ]
Rust
src/main.rs
icefoxen/otter
518de550ea792ce1c16f7cf353b8dd97bcd4ff57
extern crate pencil; #[macro_use] extern crate log; extern crate env_logger; extern crate hoedown; extern crate git2; use std::collections::BTreeMap; use std::fs; use std::io; use std::io::Read; use pencil::helpers; use pencil::{Pencil, Request, Response, PencilResult, PencilError}; use pencil::http_errors; use git2::Repository; use hoedown::Render; static PAGE_PATH: &'static str = "pages/"; fn page_path(page: &str) -> String { let mut pagepath = PAGE_PATH.to_string(); pagepath += page; pagepath += ".md"; pagepath } use std::convert::From; fn git_to_http_error(_err: git2::Error) -> PencilError { let err = http_errors::InternalServerError; PencilError::PenHTTPError(err) } fn load_page_file(pagename: &str) -> Result<String, PencilError> { let r = Repository::init(PAGE_PATH).map_err(git_to_http_error); let pagepath = page_path(pagename); match fs::File::open(pagepath) { Ok(mut file) => { let mut s = String::new(); let _ = file.read_to_string(&mut s).unwrap(); Ok(s) } Err(e) => { let status = match e.kind() { io::ErrorKind::NotFound => http_errors::NotFound, io::ErrorKind::PermissionDenied => http_errors::Forbidden, _ => http_errors::InternalServerError, }; let err = PencilError::PenHTTPError(status); return Err(err) } } } fn index_redirect(_request: &mut Request) -> PencilResult { helpers::redirect("/index", 308) } fn page_get(request: &mut Request) -> PencilResult { let page = request.view_args.get("page").unwrap(); let contents = load_page_file(page)?; let md = hoedown::Markdown::from(contents.as_bytes()); let mut html = hoedown::Html::new(hoedown::renderer::html::Flags::empty(), 0); let buffer = html.render(&md); let rendered_markdown = buffer.to_str().unwrap(); let mut ctx = BTreeMap::new(); ctx.insert("pagename".to_string(), page.to_string()); ctx.insert("page".to_string(), rendered_markdown.to_string()); request.app.render_template("page.html", &ctx) } fn page_edit_get(request: &mut Request) -> PencilResult { let page = request.view_args.get("page").unwrap(); let contents = load_page_file(page)?; let mut ctx = BTreeMap::new(); ctx.insert("title".to_string(), page.to_string()); ctx.insert("page".to_string(), contents.to_string()); request.app.render_template("edit.html", &ctx) } fn page_edit_post(request: &mut Request) -> PencilResult { println!("Edit posted thing"); let newpage = request.form().get("submission").unwrap(); let response = format!("Posted editing page: {}", newpage); Ok(Response::from(response)) } fn setup_app() -> Pencil { let mut app = Pencil::new("."); app.set_debug(true); app.enable_static_file_handling(); app.register_template("page.html"); app.register_template("edit.html"); app.get("/", "index", index_redirect); app.get("/<page:string>", "page_get", page_get); app.get("/edit/<page:string>", "page_edit_get", page_edit_get); app.post("/edit/<page:string>", "page_edit_post", page_edit_post); app } static ADDRESS: &'static str = "localhost:5000"; fn main() { let app = setup_app(); app.run(ADDRESS); } mod test { /* fn start_test_server() -> Child { let child = Command::new("cargo") .arg("run") .spawn() .unwrap(); child } fn curl(url: &str) -> Child { let child = Command::new("curl") .arg(url) .spawn() .unwrap(); child } */ /* #[test] fn it_works() { let mut c = start_test_server(); //c.wait().unwrap(); let mut curl = curl("http://localhost:5000/start"); curl.wait().unwrap(); // Goodness, no TERM signal? How violent. c.kill().unwrap(); } */ }
extern crate pencil; #[macro_use] extern crate log; extern crate env_logger; extern crate hoedown; extern crate git2; use std::collections::BTreeMap; use std::fs; use std::io; use std::io::Read; use pencil::helpers; use pencil::{Pencil, Request, Response, PencilResult, PencilError}; use pencil::http_errors; use git2::Repository; use hoedown::Render; static PAGE_PATH: &'static str = "pages/"; fn page_path(page: &str) -> String { let mut pagepath = PAGE_PATH.to_string(); pagepath += page; pagepath += ".md"; pagepath } use std::convert::From; fn git_to_http_error(_err: git2::Error) -> PencilError { let err = http_errors::InternalServerError; PencilError::PenHTTPError(err) } fn load_page_file(pagename: &str) -> Result<String, PencilError> { let r = Repository::init(PAGE_PATH).map_err(git_to_http_error); let pagepath = page_path(pagename); match fs::File::open(pagepath) { Ok(mut file) => { let mut s = String::new(); let _ = file.read_to_string(&mut s).unwrap(); Ok(s) } Err(e) => { let status = match e.kind() { io::ErrorKind::NotFound => http_errors::NotFound, io::ErrorKind::PermissionDenied => http_errors::Forbidden, _ => http_errors::InternalServerError, }; let err = PencilError::PenHTTPError(status); return Err(err) } } } fn index_redirect(_request: &mut Request) -> PencilResult { helpers::redirect("/index", 308) } fn page_get(request: &mut Request) -> PencilResult {
::from(contents.as_bytes()); let mut html = hoedown::Html::new(hoedown::renderer::html::Flags::empty(), 0); let buffer = html.render(&md); let rendered_markdown = buffer.to_str().unwrap(); let mut ctx = BTreeMap::new(); ctx.insert("pagename".to_string(), page.to_string()); ctx.insert("page".to_string(), rendered_markdown.to_string()); request.app.render_template("page.html", &ctx) } fn page_edit_get(request: &mut Request) -> PencilResult { let page = request.view_args.get("page").unwrap(); let contents = load_page_file(page)?; let mut ctx = BTreeMap::new(); ctx.insert("title".to_string(), page.to_string()); ctx.insert("page".to_string(), contents.to_string()); request.app.render_template("edit.html", &ctx) } fn page_edit_post(request: &mut Request) -> PencilResult { println!("Edit posted thing"); let newpage = request.form().get("submission").unwrap(); let response = format!("Posted editing page: {}", newpage); Ok(Response::from(response)) } fn setup_app() -> Pencil { let mut app = Pencil::new("."); app.set_debug(true); app.enable_static_file_handling(); app.register_template("page.html"); app.register_template("edit.html"); app.get("/", "index", index_redirect); app.get("/<page:string>", "page_get", page_get); app.get("/edit/<page:string>", "page_edit_get", page_edit_get); app.post("/edit/<page:string>", "page_edit_post", page_edit_post); app } static ADDRESS: &'static str = "localhost:5000"; fn main() { let app = setup_app(); app.run(ADDRESS); } mod test { /* fn start_test_server() -> Child { let child = Command::new("cargo") .arg("run") .spawn() .unwrap(); child } fn curl(url: &str) -> Child { let child = Command::new("curl") .arg(url) .spawn() .unwrap(); child } */ /* #[test] fn it_works() { let mut c = start_test_server(); //c.wait().unwrap(); let mut curl = curl("http://localhost:5000/start"); curl.wait().unwrap(); // Goodness, no TERM signal? How violent. c.kill().unwrap(); } */ }
let page = request.view_args.get("page").unwrap(); let contents = load_page_file(page)?; let md = hoedown::Markdown
function_block-random_span
[ { "content": "pub fn clone(repo_url: &str, into_directory: &str) -> Result<String, Error> {\n\n let output = try!(Command::new(\"git\")\n\n .arg(\"clone\")\n\n .arg(repo_url)\n\n .arg(into_directory)\n\n .output());\n\n\n\n match output.status.success() {\n\n true => Ok(String::from_utf8_lossy(&output.stdout).into_owned()),\n\n false => {\n\n Err(Error::new(ErrorKind::Other,\n\n format!(\"{}\", String::from_utf8_lossy(&output.stderr))))\n\n }\n\n }\n\n}\n", "file_path": "src/repo.rs", "rank": 7, "score": 55115.01548735621 }, { "content": "# Otter wiki\n\n\n", "file_path": "pages/index.md", "rank": 9, "score": 19269.308208237755 }, { "content": "# Hello world!\n\n\n\nThis is a test markdown page\n", "file_path": "pages/start.md", "rank": 10, "score": 19269.274683394142 }, { "content": "# Installing\n\n\n\nInstall dependencies\n\n\n\n```\n\napt install cmake\n\n```\n\n\n\n# Running\n\n\n\nRun with:\n\n\n\n```\n\nenv RUST_LOG=otter=info,logger=info cargo run\n\n```\n\n\n\n# TODO\n\n\n\n## Functionality\n\n\n\n* Make wikilinks work properly\n\n* Figure out how to display changelogs/diffs sanely (patch, unidiff crates can parse)\n\n* Figure out sessions, user accounts.\n\n* Figure out backlinks\n\n* Play with Hoedown Markdown extensions/options\n\n* Syntax highlighting, somehow???\n\n\n\n## Utility\n\n\n\n* Handle HTTP errors nicely.\n\n* Sanitize inputs, make 'em safe, however that's done. (ammonia crate?)\n\n* Make better logging.\n\n* Tests!\n\n\n\n## Presentation\n\n\n\n* Make nice templates :/\n\n* Documentation\n\n\n\n## Other\n\n\n\n* Mongle/worry about license (MIT if all the dependent crates support it)\n\n* Play with Pullmark?\n", "file_path": "README.md", "rank": 12, "score": 10586.280453405401 }, { "content": "use std::process::Command;\n\nuse std::io::{Error, ErrorKind};\n\n\n\n\n", "file_path": "src/repo.rs", "rank": 15, "score": 1.3189512954585874 } ]
Rust
sw/linalg/src/im4.rs
yupferris/xenowing
0762908cba96bdd695c9af07f494bf34736b3288
use crate::fixed::*; use crate::iv4::*; use trig::*; use core::ops::{Mul, MulAssign}; #[derive(Clone, Copy)] pub struct Im4<const FRACT_BITS: u32> { pub columns: [Iv4<FRACT_BITS>; 4], } impl<const FRACT_BITS: u32> Im4<FRACT_BITS> { pub fn identity() -> Self { Self { columns: [ Iv4::new(1.0, 0.0, 0.0, 0.0), Iv4::new(0.0, 1.0, 0.0, 0.0), Iv4::new(0.0, 0.0, 1.0, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ], } } pub fn translation( x: impl Into<Fixed<FRACT_BITS>>, y: impl Into<Fixed<FRACT_BITS>>, z: impl Into<Fixed<FRACT_BITS>>, ) -> Self { Self { columns: [ Iv4::new(1.0, 0.0, 0.0, 0.0), Iv4::new(0.0, 1.0, 0.0, 0.0), Iv4::new(0.0, 0.0, 1.0, 0.0), Iv4::new(x, y, z, 1.0), ], } } pub fn rotation_x(radians: f32) -> Self { let s = sin(radians); let c = cos(radians); Self { columns: [ Iv4::new(1.0, 0.0, 0.0, 0.0), Iv4::new(0.0, c, s, 0.0), Iv4::new(0.0, -s, c, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn rotation_y(radians: f32) -> Self { let s = sin(radians); let c = cos(radians); Self { columns: [ Iv4::new(c, 0.0, -s, 0.0), Iv4::new(0.0, 1.0, 0.0, 0.0), Iv4::new(s, 0.0, c, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn rotation_z(radians: f32) -> Self { let s = sin(radians); let c = cos(radians); Self { columns: [ Iv4::new(c, s, 0.0, 0.0), Iv4::new(-s, c, 0.0, 0.0), Iv4::new(0.0, 0.0, 1.0, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn scale( x: impl Into<Fixed<FRACT_BITS>>, y: impl Into<Fixed<FRACT_BITS>>, z: impl Into<Fixed<FRACT_BITS>>, ) -> Self { Self { columns: [ Iv4::new(x, 0.0, 0.0, 0.0), Iv4::new(0.0, y, 0.0, 0.0), Iv4::new(0.0, 0.0, z, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn ortho(left: f32, right: f32, bottom: f32, top: f32, z_near: f32, z_far: f32) -> Self { let tx = -(right + left) / (right - left); let ty = -(top + bottom) / (top - bottom); let tz = -(z_far + z_near) / (z_far - z_near); Self { columns: [ Iv4::new(2.0 / (right - left), 0.0, 0.0, 0.0), Iv4::new(0.0, 2.0 / (top - bottom), 0.0, 0.0), Iv4::new(0.0, 0.0, -2.0 / (z_far - z_near), 0.0), Iv4::new(tx, ty, tz, 1.0), ] } } pub fn perspective(fov_degrees: f32, aspect: f32, z_near: f32, z_far: f32) -> Self { let fov_radians = fov_degrees.to_radians(); let top = z_near * tan(fov_radians / 2.0); let right = top * aspect; let z_range = z_far - z_near; Self { columns: [ Iv4::new(z_near / right, 0.0, 0.0, 0.0), Iv4::new(0.0, z_near / top, 0.0, 0.0), Iv4::new(0.0, 0.0, -(z_near + z_far) / z_range, -1.0), Iv4::new(0.0, 0.0, -2.0 * z_near * z_far / z_range, 0.0), ] } } fn rows(&self) -> [Iv4<FRACT_BITS>; 4] { [ Iv4::new(self.columns[0].x, self.columns[1].x, self.columns[2].x, self.columns[3].x), Iv4::new(self.columns[0].y, self.columns[1].y, self.columns[2].y, self.columns[3].y), Iv4::new(self.columns[0].z, self.columns[1].z, self.columns[2].z, self.columns[3].z), Iv4::new(self.columns[0].w, self.columns[1].w, self.columns[2].w, self.columns[3].w), ] } } impl<const FRACT_BITS: u32> Mul for Im4<FRACT_BITS> { type Output = Self; fn mul(self, other: Self) -> Self { &self * &other } } impl<'a, const FRACT_BITS: u32> Mul<&'a Self> for Im4<FRACT_BITS> { type Output = Self; fn mul(self, other: &'a Self) -> Self { &self * other } } impl<'a, const FRACT_BITS: u32> Mul<Im4<FRACT_BITS>> for &'a Im4<FRACT_BITS> { type Output = Im4<FRACT_BITS>; fn mul(self, other: Im4<FRACT_BITS>) -> Im4<FRACT_BITS> { self * &other } } impl<'a, 'b, const FRACT_BITS: u32> Mul<&'a Im4<FRACT_BITS>> for &'b Im4<FRACT_BITS> { type Output = Im4<FRACT_BITS>; fn mul(self, other: &'a Im4<FRACT_BITS>) -> Im4<FRACT_BITS> { let rows = self.rows(); Im4 { columns: [ Iv4::new( rows[0].dot(other.columns[0]), rows[1].dot(other.columns[0]), rows[2].dot(other.columns[0]), rows[3].dot(other.columns[0]), ), Iv4::new( rows[0].dot(other.columns[1]), rows[1].dot(other.columns[1]), rows[2].dot(other.columns[1]), rows[3].dot(other.columns[1]), ), Iv4::new( rows[0].dot(other.columns[2]), rows[1].dot(other.columns[2]), rows[2].dot(other.columns[2]), rows[3].dot(other.columns[2]), ), Iv4::new( rows[0].dot(other.columns[3]), rows[1].dot(other.columns[3]), rows[2].dot(other.columns[3]), rows[3].dot(other.columns[3]), ), ], } } } impl<const FRACT_BITS: u32> Mul<Iv4<FRACT_BITS>> for Im4<FRACT_BITS> { type Output = Iv4<FRACT_BITS>; fn mul(self, other: Iv4<FRACT_BITS>) -> Iv4<FRACT_BITS> { let rows = self.rows(); Iv4::new( rows[0].dot(other), rows[1].dot(other), rows[2].dot(other), rows[3].dot(other), ) } } impl<const FRACT_BITS: u32> MulAssign for Im4<FRACT_BITS> { fn mul_assign(&mut self, other: Self) { *self = *self * other } }
use crate::fixed::*; use crate::iv4::*; use trig::*; use core::ops::{Mul, MulAssign}; #[derive(Clone, Copy)] pub struct Im4<const FRACT_BITS: u32> { pub columns: [Iv4<FRACT_BITS>; 4], } impl<const FRACT_BITS: u32> Im4<FRACT_BITS> { pub fn identity() -> Self { Self { columns: [ Iv4::new(1.0, 0.0, 0.0, 0.0), Iv4::new(0.0, 1.0, 0.0, 0.0), Iv4::new(0.0, 0.0, 1.0, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ], } } pub fn translation( x: impl Into<Fixed<FRACT_BITS>>, y: impl Into<Fixed<FRACT_BITS>>, z: impl Into<Fixed<FRACT_BITS>>, ) -> Self { Self { columns: [ Iv4::new(1.0, 0.0, 0.0, 0.0), Iv4::new(0.0, 1.0, 0.0, 0.0), Iv4::new(0.0, 0.0, 1.0, 0.0), Iv4::new(x, y, z, 1.0), ], } } pub fn rotation_x(radians: f32) -> Self { let s = sin(radians); let c = cos(radians); Self { columns: [ Iv4::new(1.0, 0.0, 0.0, 0.0), Iv4::new(0.0, c, s, 0.0), Iv4::new(0.0, -s, c, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn rotation_y(radians: f32) -> Self { let s = sin(radians); let c = cos(radians); Self { columns: [ Iv4::new(c, 0.0, -s, 0.0), Iv4::new(0.0, 1.0, 0.0, 0.0), Iv4::new(s, 0.0, c, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn rotation_z(radians: f32) -> Self { let s = sin(radians); let c = cos(radians); Self { columns: [ Iv4::new(c, s, 0.0, 0.0), Iv4::new(-s, c, 0.0, 0.0), Iv4::new(0.0, 0.0, 1.0, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn scale( x: impl Into<Fixed<FRACT_BITS>>, y: impl Into<Fixed<FRACT_BITS>>, z: impl Into<Fixed<FRACT_BITS>>, ) -> Self { Self { columns: [ Iv4::new(x, 0.0, 0.0, 0.0), Iv4::new(0.0, y, 0.0, 0.0), Iv4::new(0.0, 0.0, z, 0.0), Iv4::new(0.0, 0.0, 0.0, 1.0), ] } } pub fn ortho(left: f32, right: f32, bottom: f32, top: f32, z_near: f32, z_far: f32) -> Self { let tx = -(right + left) / (right - left); let ty = -(top + bottom) / (top - bottom); let tz = -(z_far + z_near) / (z_far - z_near); Self { columns:
( rows[0].dot(other.columns[0]), rows[1].dot(other.columns[0]), rows[2].dot(other.columns[0]), rows[3].dot(other.columns[0]), ), Iv4::new( rows[0].dot(other.columns[1]), rows[1].dot(other.columns[1]), rows[2].dot(other.columns[1]), rows[3].dot(other.columns[1]), ), Iv4::new( rows[0].dot(other.columns[2]), rows[1].dot(other.columns[2]), rows[2].dot(other.columns[2]), rows[3].dot(other.columns[2]), ), Iv4::new( rows[0].dot(other.columns[3]), rows[1].dot(other.columns[3]), rows[2].dot(other.columns[3]), rows[3].dot(other.columns[3]), ), ], } } } impl<const FRACT_BITS: u32> Mul<Iv4<FRACT_BITS>> for Im4<FRACT_BITS> { type Output = Iv4<FRACT_BITS>; fn mul(self, other: Iv4<FRACT_BITS>) -> Iv4<FRACT_BITS> { let rows = self.rows(); Iv4::new( rows[0].dot(other), rows[1].dot(other), rows[2].dot(other), rows[3].dot(other), ) } } impl<const FRACT_BITS: u32> MulAssign for Im4<FRACT_BITS> { fn mul_assign(&mut self, other: Self) { *self = *self * other } }
[ Iv4::new(2.0 / (right - left), 0.0, 0.0, 0.0), Iv4::new(0.0, 2.0 / (top - bottom), 0.0, 0.0), Iv4::new(0.0, 0.0, -2.0 / (z_far - z_near), 0.0), Iv4::new(tx, ty, tz, 1.0), ] } } pub fn perspective(fov_degrees: f32, aspect: f32, z_near: f32, z_far: f32) -> Self { let fov_radians = fov_degrees.to_radians(); let top = z_near * tan(fov_radians / 2.0); let right = top * aspect; let z_range = z_far - z_near; Self { columns: [ Iv4::new(z_near / right, 0.0, 0.0, 0.0), Iv4::new(0.0, z_near / top, 0.0, 0.0), Iv4::new(0.0, 0.0, -(z_near + z_far) / z_range, -1.0), Iv4::new(0.0, 0.0, -2.0 * z_near * z_far / z_range, 0.0), ] } } fn rows(&self) -> [Iv4<FRACT_BITS>; 4] { [ Iv4::new(self.columns[0].x, self.columns[1].x, self.columns[2].x, self.columns[3].x), Iv4::new(self.columns[0].y, self.columns[1].y, self.columns[2].y, self.columns[3].y), Iv4::new(self.columns[0].z, self.columns[1].z, self.columns[2].z, self.columns[3].z), Iv4::new(self.columns[0].w, self.columns[1].w, self.columns[2].w, self.columns[3].w), ] } } impl<const FRACT_BITS: u32> Mul for Im4<FRACT_BITS> { type Output = Self; fn mul(self, other: Self) -> Self { &self * &other } } impl<'a, const FRACT_BITS: u32> Mul<&'a Self> for Im4<FRACT_BITS> { type Output = Self; fn mul(self, other: &'a Self) -> Self { &self * other } } impl<'a, const FRACT_BITS: u32> Mul<Im4<FRACT_BITS>> for &'a Im4<FRACT_BITS> { type Output = Im4<FRACT_BITS>; fn mul(self, other: Im4<FRACT_BITS>) -> Im4<FRACT_BITS> { self * &other } } impl<'a, 'b, const FRACT_BITS: u32> Mul<&'a Im4<FRACT_BITS>> for &'b Im4<FRACT_BITS> { type Output = Im4<FRACT_BITS>; fn mul(self, other: &'a Im4<FRACT_BITS>) -> Im4<FRACT_BITS> { let rows = self.rows(); Im4 { columns: [ Iv4::new
random
[ { "content": "pub fn sin(x: f32) -> f32 {\n\n let phase_scale = 1.0 / core::f32::consts::TAU;\n\n let phase = x * phase_scale;\n\n let phase = phase - unsafe { intrinsics::floorf32(phase) };\n\n let phase_with_offset = phase + 1.0;\n\n let bits = phase_with_offset.to_bits();\n\n const NUM_SIGNIFICAND_BITS: usize = 23;\n\n let significand = bits & ((1 << NUM_SIGNIFICAND_BITS) - 1);\n\n let index = (significand >> (NUM_SIGNIFICAND_BITS - NUM_ENTRIES_BITS)) as usize;\n\n f32::from_bits(SIN_TAB[index])\n\n}\n\n\n", "file_path": "sw/trig/src/lib.rs", "rank": 0, "score": 240633.693540689 }, { "content": "pub fn tan(x: f32) -> f32 {\n\n sin(x) / cos(x)\n\n}\n", "file_path": "sw/trig/src/lib.rs", "rank": 1, "score": 240633.69354068898 }, { "content": "pub fn cos(x: f32) -> f32 {\n\n sin(x + core::f32::consts::PI / 2.0)\n\n}\n\n\n", "file_path": "sw/trig/src/lib.rs", "rank": 2, "score": 240633.693540689 }, { "content": "pub fn write_u32_le(x: u32) {\n\n write_u8((x >> 0) as _);\n\n write_u8((x >> 8) as _);\n\n write_u8((x >> 16) as _);\n\n write_u8((x >> 24) as _);\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 3, "score": 199859.47529429663 }, { "content": "pub fn read_u32_le() -> u32 {\n\n let mut ret = 0;\n\n ret |= (read_u8() as u32) << 0;\n\n ret |= (read_u8() as u32) << 8;\n\n ret |= (read_u8() as u32) << 16;\n\n ret |= (read_u8() as u32) << 24;\n\n ret\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 4, "score": 182073.32874582784 }, { "content": "pub fn putc(c: char) {\n\n write_u8(COMMAND_PUTC);\n\n // TODO: This shouldn't actually be safe... :)\n\n write_u8(c as _);\n\n}\n\n\n", "file_path": "sw/xw/src/stdio.rs", "rank": 5, "score": 136026.67175782457 }, { "content": "pub fn sleep_cycles(c: u64) {\n\n let t = cycles();\n\n while cycles() - t < c {\n\n // Do nothing\n\n }\n\n}\n", "file_path": "sw/xw/src/marv.rs", "rank": 6, "score": 133791.9041645598 }, { "content": "pub fn write_u8(x: u8) {\n\n unsafe {\n\n while (ptr::read_volatile(&(*REGS).tx_status) & 1) == 0 {\n\n // Do nothing\n\n }\n\n\n\n ptr::write_volatile(&mut (*REGS).tx_write, x);\n\n }\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 7, "score": 133465.942490771 }, { "content": "pub fn write_u64_le(x: u64) {\n\n write_u32_le((x >> 0) as _);\n\n write_u32_le((x >> 32) as _);\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 8, "score": 131363.74546681874 }, { "content": "pub fn write_u128_le(x: u128) {\n\n write_u64_le((x >> 0) as _);\n\n write_u64_le((x >> 64) as _);\n\n}\n", "file_path": "sw/xw/src/uart.rs", "rank": 9, "score": 131363.74546681874 }, { "content": "pub fn init() {\n\n unsafe {\n\n ALLOCATOR.init();\n\n }\n\n}\n\n\n", "file_path": "sw/xw/src/heap.rs", "rank": 10, "score": 122085.54229819356 }, { "content": "pub fn cycles() -> u64 {\n\n extern \"C\" {\n\n fn _cycles() -> u64;\n\n }\n\n\n\n unsafe {\n\n _cycles()\n\n }\n\n}\n\n\n", "file_path": "sw/xw/src/marv.rs", "rank": 11, "score": 116190.21472534156 }, { "content": "pub fn stdout() -> Stdout {\n\n Stdout\n\n}\n", "file_path": "sw/xw/src/stdio.rs", "rank": 12, "score": 116190.21472534156 }, { "content": "pub fn read_u8() -> u8 {\n\n unsafe {\n\n while (ptr::read_volatile(&(*REGS).rx_status) & 1) == 0 {\n\n // Do nothing\n\n }\n\n\n\n ptr::read_volatile(&(*REGS).rx_read)\n\n }\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 13, "score": 113955.44713207679 }, { "content": "pub fn puts(s: &str) {\n\n puts_nn(s);\n\n putc('\\n');\n\n}\n\n\n", "file_path": "sw/xw/src/stdio.rs", "rank": 14, "score": 113168.29728032422 }, { "content": "pub fn read_u64_le() -> u64 {\n\n let mut ret = 0;\n\n ret |= (read_u32_le() as u64) << 0;\n\n ret |= (read_u32_le() as u64) << 32;\n\n ret\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 15, "score": 111853.25010812454 }, { "content": "pub fn read_u128_le() -> u128 {\n\n let mut ret = 0;\n\n ret |= (read_u64_le() as u128) << 0;\n\n ret |= (read_u64_le() as u128) << 64;\n\n ret\n\n}\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 16, "score": 111853.25010812454 }, { "content": "pub fn set(leds: u8) {\n\n const LEDS: *mut u8 = 0x01000000 as _;\n\n\n\n unsafe {\n\n ptr::write_volatile(LEDS, leds);\n\n }\n\n}\n", "file_path": "sw/xw/src/leds.rs", "rank": 17, "score": 110933.52968705946 }, { "content": "pub fn puts_nn(s: &str) {\n\n for c in s.chars() {\n\n putc(c);\n\n }\n\n}\n\n\n\npub struct Stdout;\n\n\n\nimpl Write for Stdout {\n\n fn write_str(&mut self, s: &str) -> core::fmt::Result {\n\n puts_nn(s);\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "sw/xw/src/stdio.rs", "rank": 18, "score": 110933.52968705946 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"tab.rs\");\n\n let mut file = File::create(&dest_path).unwrap();\n\n\n\n const NUM_ENTRIES_BITS: usize = 12;\n\n const NUM_ENTRIES: usize = 1 << NUM_ENTRIES_BITS;\n\n writeln!(file, \"pub const NUM_ENTRIES_BITS: usize = {};\", NUM_ENTRIES_BITS)?;\n\n writeln!(file, \"pub const NUM_ENTRIES: usize = {};\", NUM_ENTRIES)?;\n\n writeln!(file, \"pub static SIN_TAB: [u32; NUM_ENTRIES] = [\")?;\n\n for i in 0..NUM_ENTRIES {\n\n let phase = i as f64 / NUM_ENTRIES as f64 * std::f64::consts::TAU;\n\n let entry = (phase.sin() as f32).to_bits();\n\n writeln!(file, \"0x{:08x}, \", entry)?;\n\n }\n\n writeln!(file, \"];\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "sw/trig/build.rs", "rank": 19, "score": 99426.54654356318 }, { "content": "fn build_trace(test_name: &'static str) -> io::Result<impl Trace> {\n\n let mut path = env::temp_dir();\n\n path.push(format!(\"{}.vcd\", test_name));\n\n println!(\"Writing trace to {:?}\", path);\n\n let file = File::create(path)?;\n\n VcdTrace::new(file, 10, TimeScaleUnit::Ns)\n\n}\n\n\n", "file_path": "sim/read-cache/src/tests.rs", "rank": 20, "score": 90531.81912244928 }, { "content": "fn build_trace(test_name: &'static str) -> io::Result<impl Trace> {\n\n let mut path = env::temp_dir();\n\n path.push(format!(\"{}.vcd\", test_name));\n\n println!(\"Writing trace to {:?}\", path);\n\n let file = File::create(path)?;\n\n VcdTrace::new(file, 10, TimeScaleUnit::Ns)\n\n}\n\n\n", "file_path": "sim/read-cache/src/main.rs", "rank": 21, "score": 90531.81912244928 }, { "content": "fn build_trace(test_name: &'static str) -> io::Result<impl Trace> {\n\n let mut path = env::temp_dir();\n\n path.push(format!(\"{}.vcd\", test_name));\n\n println!(\"Writing trace to {:?}\", path);\n\n let file = File::create(path)?;\n\n VcdTrace::new(file, 10, TimeScaleUnit::Ns)\n\n}\n\n\n", "file_path": "sim/peek-buffer/src/main.rs", "rank": 22, "score": 90531.81912244928 }, { "content": "fn build_trace(test_name: &'static str) -> io::Result<impl Trace> {\n\n let mut path = env::temp_dir();\n\n path.push(format!(\"{}.vcd\", test_name));\n\n println!(\"Writing trace to {:?}\", path);\n\n let file = File::create(path)?;\n\n VcdTrace::new(file, 10, TimeScaleUnit::Ns)\n\n}\n\n\n", "file_path": "sim/flow-controlled-pipe/src/main.rs", "rank": 23, "score": 89095.5380367503 }, { "content": "#[repr(C)]\n\nstruct Regs {\n\n tx_status: u8, _padding0: [u8; 15],\n\n tx_write: u8, _padding1: [u8; 15],\n\n\n\n rx_status: u8, _padding2: [u8; 15],\n\n rx_read: u8, _padding3: [u8; 15],\n\n}\n\n\n\nconst REGS: *mut Regs = 0x02000000 as _;\n\n\n", "file_path": "sw/xw/src/uart.rs", "rank": 24, "score": 66551.39425184387 }, { "content": "#[derive(Clone, Default)]\n\nstruct Triangle {\n\n w0_min: u32,\n\n w0_dx: u32,\n\n w0_dy: u32,\n\n w1_min: u32,\n\n w1_dx: u32,\n\n w1_dy: u32,\n\n w2_min: u32,\n\n w2_dx: u32,\n\n w2_dy: u32,\n\n r_min: u32,\n\n r_dx: u32,\n\n r_dy: u32,\n\n g_min: u32,\n\n g_dx: u32,\n\n g_dy: u32,\n\n b_min: u32,\n\n b_dx: u32,\n\n b_dy: u32,\n\n a_min: u32,\n", "file_path": "sw/strugl/src/lib.rs", "rank": 25, "score": 66546.50223242954 }, { "content": "// TODO: I got lazy here; match the pattern used in other stages\n\nstruct Mem<'a> {\n\n #[allow(unused)]\n\n pub m: &'a Module<'a>,\n\n\n\n pub enable: &'a Input<'a>,\n\n pub ready: &'a Output<'a>,\n\n\n\n pub bus_enable_in: &'a Input<'a>,\n\n pub bus_ready_in: &'a Input<'a>,\n\n pub bus_addr_in: &'a Input<'a>,\n\n pub bus_write_data_in: &'a Input<'a>,\n\n pub bus_write_byte_enable_in: &'a Input<'a>,\n\n pub bus_write_in: &'a Input<'a>,\n\n pub bus_enable_out: &'a Output<'a>,\n\n pub bus_addr_out: &'a Output<'a>,\n\n pub bus_write_data_out: &'a Output<'a>,\n\n pub bus_write_byte_enable_out: &'a Output<'a>,\n\n pub bus_write_out: &'a Output<'a>,\n\n}\n\n\n", "file_path": "rtl/src/marv.rs", "rank": 26, "score": 65679.14286088354 }, { "content": "struct Writeback<'a> {\n\n #[allow(unused)]\n\n pub m: &'a Module<'a>,\n\n\n\n pub enable: &'a Input<'a>,\n\n pub ready: &'a Output<'a>,\n\n\n\n pub instruction: &'a Input<'a>,\n\n pub bus_addr_low: &'a Input<'a>,\n\n pub bus_read_data: &'a Input<'a>,\n\n pub bus_read_data_valid: &'a Input<'a>,\n\n pub rd_value_write_data: &'a Input<'a>,\n\n pub rd_value_write_enable: &'a Input<'a>,\n\n pub next_pc: &'a Input<'a>,\n\n pub pc_write_data: &'a Output<'a>,\n\n pub pc_write_enable: &'a Output<'a>,\n\n pub instructions_retired_counter_increment_enable: &'a Output<'a>,\n\n pub register_file_write_addr: &'a Output<'a>,\n\n pub register_file_write_data: &'a Output<'a>,\n\n pub register_file_write_enable: &'a Output<'a>,\n", "file_path": "rtl/src/marv.rs", "rank": 27, "score": 65674.67585452476 }, { "content": "struct Issue<'a> {\n\n #[allow(unused)]\n\n m: &'a Module<'a>,\n\n replica_issues: Vec<ReplicaIssue<'a>>,\n\n issue_arb_bus_enable: &'a Input<'a>,\n\n issue_arb_bus_addr: &'a Input<'a>,\n\n issue_arb_bus_write: &'a Input<'a>,\n\n issue_arb_bus_write_data: &'a Input<'a>,\n\n issue_arb_bus_write_byte_enable: &'a Input<'a>,\n\n issue_arb_bus_ready: &'a Output<'a>,\n\n issue_arb_bus_primary: Option<&'a Input<'a>>,\n\n primary_fifo_full: Option<&'a Input<'a>>,\n\n primary_fifo_write_enable: Option<&'a Output<'a>>,\n\n primary_fifo_write_data: Option<&'a Output<'a>>,\n\n replica_fifo_full: Option<&'a Input<'a>>,\n\n replica_fifo_write_enable: Option<&'a Output<'a>>,\n\n replica_fifo_write_data: Option<&'a Output<'a>>,\n\n}\n\n\n\nimpl<'a> Issue<'a> {\n", "file_path": "rtl/src/buster.rs", "rank": 28, "score": 65674.67585452476 }, { "content": "struct Instruction<'a> {\n\n pub value: &'a dyn Signal<'a>,\n\n}\n\n\n\nimpl<'a> Instruction<'a> {\n\n fn new(value: &'a dyn Signal<'a>) -> Instruction<'a> {\n\n if value.bit_width() != 32 {\n\n panic!(\"value bit width must be 32\");\n\n }\n\n\n\n Instruction {\n\n value,\n\n }\n\n }\n\n\n\n fn opcode(&self) -> &'a dyn Signal<'a> {\n\n self.value.bits(6, 2) // Bottom two bits are always 0b11 for RV32I, so just ignore them\n\n }\n\n\n\n fn rs1(&self) -> &'a dyn Signal<'a> {\n", "file_path": "rtl/src/marv.rs", "rank": 29, "score": 65674.67585452476 }, { "content": "#[derive(Clone, Copy)]\n\nstruct TransformedVertex {\n\n position: Iv4<FRACT_BITS>,\n\n color: Iv4<FRACT_BITS>,\n\n tex_coord: Iv2<FRACT_BITS>,\n\n}\n\n\n\npub enum TextureFilter {\n\n Nearest,\n\n Bilinear,\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum TextureDim {\n\n X16,\n\n X32,\n\n X64,\n\n X128,\n\n}\n\n\n\nimpl TextureDim {\n", "file_path": "sw/strugl/src/lib.rs", "rank": 30, "score": 65276.65947409432 }, { "content": "struct ReturnArbiter<'a> {\n\n #[allow(unused)]\n\n pub m: &'a Module<'a>,\n\n primary_fifo_empty: Option<&'a Input<'a>>,\n\n replica_data_fifo_empty_inputs: Vec<&'a Input<'a>>,\n\n replica_data_fifo_read_data_inputs: Vec<&'a Input<'a>>,\n\n replica_buffer_egress_ready: Option<&'a Input<'a>>,\n\n replica_buffer_egress_data: Option<&'a Input<'a>>,\n\n primary_fifo_read_enable: &'a Output<'a>,\n\n replica_buffer_egress_read_enable: &'a Output<'a>,\n\n replica_data_fifo_read_enable_outputs: Vec<&'a Output<'a>>,\n\n primary_fifo_read_data: Option<&'a Input<'a>>,\n\n primary_bus_read_data_outputs: Vec<&'a Output<'a>>,\n\n primary_bus_read_data_valid_outputs: Vec<&'a Output<'a>>,\n\n}\n\n\n\nimpl<'a> ReturnArbiter<'a> {\n\n fn new(\n\n instance_name: impl Into<String>,\n\n num_primaries: u32,\n", "file_path": "rtl/src/buster.rs", "rank": 31, "score": 64311.2307396436 }, { "content": "struct PrimaryIssue<'a> {\n\n bus_enable: &'a Input<'a>,\n\n bus_addr: &'a Input<'a>,\n\n bus_write: &'a Input<'a>,\n\n bus_write_data: &'a Input<'a>,\n\n bus_write_byte_enable: &'a Input<'a>,\n\n bus_ready: &'a Output<'a>,\n\n}\n\n\n", "file_path": "rtl/src/buster.rs", "rank": 32, "score": 64311.2307396436 }, { "content": "struct IssueArbiter<'a> {\n\n #[allow(unused)]\n\n m: &'a Module<'a>,\n\n primary_issues: Vec<PrimaryIssue<'a>>,\n\n issue_bus_enable: &'a Output<'a>,\n\n issue_bus_addr: &'a Output<'a>,\n\n issue_bus_write: &'a Output<'a>,\n\n issue_bus_write_data: &'a Output<'a>,\n\n issue_bus_write_byte_enable: &'a Output<'a>,\n\n issue_bus_ready: &'a Input<'a>,\n\n issue_bus_primary: Option<&'a Output<'a>>,\n\n}\n\n\n\nimpl<'a> IssueArbiter<'a> {\n\n fn new(\n\n instance_name: impl Into<String>,\n\n num_primaries: u32,\n\n addr_bit_width: u32,\n\n data_bit_width: u32,\n\n data_byte_width: u32,\n", "file_path": "rtl/src/buster.rs", "rank": 33, "score": 64311.2307396436 }, { "content": "struct ReplicaIssue<'a> {\n\n bus_enable: &'a Output<'a>,\n\n bus_addr: &'a Output<'a>,\n\n bus_write: &'a Output<'a>,\n\n bus_write_data: &'a Output<'a>,\n\n bus_write_byte_enable: &'a Output<'a>,\n\n bus_ready: &'a Input<'a>,\n\n}\n\n\n", "file_path": "rtl/src/buster.rs", "rank": 34, "score": 64311.2307396436 }, { "content": "#[derive(Clone, Copy)]\n\nstruct Vertex {\n\n position: Vec2,\n\n color: Vec4,\n\n tex_coord: Vec2,\n\n}\n\n\n", "file_path": "sw/misc/xw-blaster/src/main.rs", "rank": 35, "score": 64082.38044292286 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=src/_cycles.s\");\n\n println!(\"cargo:rerun-if-changed=src/entry.s\");\n\n}\n", "file_path": "sw/xw/build.rs", "rank": 36, "score": 63393.08937075022 }, { "content": "fn main() {\n\n let out_dir = env::var(\"OUT_DIR\").expect(\"No out dir\");\n\n let dest_path = Path::new(&out_dir);\n\n\n\n fs::write(&dest_path.join(\"link.ld\"), include_bytes!(\"link.ld\")).expect(\"Could not write file\");\n\n\n\n println!(\"cargo:rustc-link-search={}\", dest_path.display());\n\n\n\n println!(\"cargo:rerun-if-changed=link.ld\");\n\n}\n", "file_path": "sw/program/build.rs", "rank": 37, "score": 63393.08937075022 }, { "content": "struct SimDevice {\n\n host_command_rx: Receiver<u8>,\n\n host_response_tx: Sender<u8>,\n\n}\n\n\n\nimpl SimDevice {\n\n fn new() -> SimDevice {\n\n let (host_command_tx, host_command_rx) = channel();\n\n let (host_response_tx, host_response_rx) = channel();\n\n\n\n // TODO: This is leaky, but I guess it doesn't matter :)\n\n thread::spawn(move|| {\n\n let mut leds = 0b000;\n\n\n\n let mut is_sending_byte = false;\n\n\n\n let mut top = Top::new();\n\n\n\n let mut is_first_cycle = true;\n\n loop {\n", "file_path": "sw/misc/xw-blaster/src/main.rs", "rank": 38, "score": 62956.29371212372 }, { "content": "struct SerialDevice {\n\n port: Box<dyn SerialPort>,\n\n}\n\n\n\nimpl SerialDevice {\n\n fn new(port_name: String) -> Result<SerialDevice, Error> {\n\n let baud_rate: u32 = 460800;\n\n\n\n let mut settings: SerialPortSettings = Default::default();\n\n settings.baud_rate = baud_rate.into();\n\n\n\n let port = serialport::open_with_settings(&port_name, &settings)?;\n\n let actual_baud_rate = port.baud_rate()?;\n\n if actual_baud_rate != baud_rate {\n\n return Err(format!(\"Unable to achieve specified baud rate: got {}, expected {}\", actual_baud_rate, baud_rate).into());\n\n }\n\n\n\n Ok(SerialDevice {\n\n port,\n\n })\n", "file_path": "sw/misc/xw-blaster/src/main.rs", "rank": 39, "score": 62956.29371212372 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let mut c = Context::new(NativeDevice::new());\n\n\n\n writeln!(stdio::stdout(), \"ready for commands\").unwrap();\n\n\n\n loop {\n\n // TODO: Proper command\n\n uart::write_u8(0x02);\n\n\n\n loop {\n\n // TODO: This obviously won't work once NativeDevice is a proper singleton, but it's fine for now!\n\n let mut device = NativeDevice::new();\n\n\n\n // TODO: Proper command\n\n match uart::read_u8() {\n\n 0x00 => {\n\n // Write word\n\n let addr = uart::read_u32_le();\n\n let data = uart::read_u32_le();\n\n device.write_reg(addr, data);\n", "file_path": "sw/program/src/main.rs", "rank": 40, "score": 62039.61261987957 }, { "content": "fn main() {\n\n let program_rom_file_name = env::args().nth(1).expect(\"No program ROM file name specified\");\n\n let program_elf_file_name = env::args().nth(2).expect(\"No program elf file name specified\");\n\n let signature_file_name = env::args().nth(3).expect(\"No signature file name specified\");\n\n\n\n let program_rom = {\n\n let mut ret = fs::read(program_rom_file_name).expect(\"Couldn't read program ROM file\");\n\n // Zero-pad ROM, since all ROM reads are interpreted as 32-bit reads in sim\n\n while (ret.len() % 4) != 0 {\n\n ret.push(0);\n\n }\n\n ret\n\n };\n\n\n\n let mut mem = vec![0; 0x20000 / 4];\n\n\n\n let mut marv = Marv::new();\n\n\n\n for i in 0..100000000 {\n\n //println!(\"*** CYCLE {} ***\", i);\n", "file_path": "sim/marv/src/main.rs", "rank": 41, "score": 62039.61261987957 }, { "content": "fn main() {\n\n let out_dir = env::var(\"OUT_DIR\").expect(\"No out dir\");\n\n let dest_path = Path::new(&out_dir);\n\n\n\n fs::write(&dest_path.join(\"link.ld\"), include_bytes!(\"link.ld\")).expect(\"Could not write file\");\n\n\n\n println!(\"cargo:rustc-link-search={}\", dest_path.display());\n\n\n\n println!(\"cargo:rerun-if-changed=link.ld\");\n\n}\n", "file_path": "sw/boot-rom/build.rs", "rank": 42, "score": 62039.61261987957 }, { "content": "fn main() {\n\n let seed = env::args().skip(1).nth(0).expect(\"seed not specified\").parse().expect(\"Couldn't parse seed\");\n\n let num_elements = env::args().skip(1).nth(1).expect(\"num_elements not specified\").parse().expect(\"Couldn't parse num_elements\");\n\n\n\n println!(\"Testing FIFO with seed = {} and num_elements = {}\", seed, num_elements);\n\n\n\n let data = (0..num_elements).collect::<Vec<_>>();\n\n let mut data_write_ptr = 0;\n\n\n\n let mut read_data = Vec::new();\n\n\n\n let mut m = Fifo::new();\n\n m.reset();\n\n\n\n let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(seed);\n\n\n\n let mut last_read_successful = false;\n\n let mut num_cycles = 0;\n\n\n\n loop {\n", "file_path": "sim/fifo/src/main.rs", "rank": 43, "score": 62039.61261987957 }, { "content": "pub trait Allocator {\n\n fn init(&mut self, heap_start: usize, heap_end: usize);\n\n\n\n fn alloc(&mut self, layout: Layout) -> *mut u8;\n\n fn dealloc(&mut self, ptr: *mut u8, layout: Layout);\n\n}\n", "file_path": "sw/xw/src/heap/allocator.rs", "rank": 44, "score": 62007.43753140583 }, { "content": "struct DepthTestPipe<'a> {\n\n #[allow(unused)]\n\n pub m: &'a Module<'a>,\n\n\n\n // Inputs\n\n pub in_valid: &'a Input<'a>,\n\n pub in_tile_addr: &'a Input<'a>,\n\n\n\n pub in_r: &'a Input<'a>,\n\n pub in_g: &'a Input<'a>,\n\n pub in_b: &'a Input<'a>,\n\n pub in_a: &'a Input<'a>,\n\n\n\n pub in_w_inverse: &'a Input<'a>,\n\n\n\n pub in_z: &'a Input<'a>,\n\n\n\n pub in_s: &'a Input<'a>,\n\n pub in_t: &'a Input<'a>,\n\n\n", "file_path": "rtl/src/color_thrust.rs", "rank": 45, "score": 61842.28877978175 }, { "content": "struct InnerPipe<'a> {\n\n #[allow(unused)]\n\n pub m: &'a Module<'a>,\n\n pub in_a: &'a Input<'a>,\n\n pub in_valid: &'a Input<'a>,\n\n pub out_b: &'a Output<'a>,\n\n pub out_c: &'a Output<'a>,\n\n pub out_valid: &'a Output<'a>,\n\n pub d: &'a Input<'a>,\n\n pub e: &'a Output<'a>,\n\n}\n\n\n\nimpl<'a> InnerPipe<'a> {\n\n fn new(instance_name: impl Into<String>, num_pipe_stages: u32, data_bit_width: u32, p: &'a impl ModuleParent<'a>) -> InnerPipe<'a> {\n\n let m = p.module(instance_name, \"InnerPipe\");\n\n\n\n // Pipeline\n\n let in_a = m.input(\"in_a\", data_bit_width);\n\n let in_valid = m.input(\"in_valid\", 1);\n\n let mut a: &'a dyn Signal<'a> = in_a.into();\n", "file_path": "sim/flow-controlled-pipe/build.rs", "rank": 46, "score": 61842.28877978175 }, { "content": "#[repr(C)]\n\nstruct FreeBlockHeader {\n\n next: Option<NonNull<FreeBlockHeader>>,\n\n}\n\n\n\nconst_assert!(mem::size_of::<FreeBlockHeader>() <= SIZE_CLASS_BLOCK_SIZES[0]);\n\nconst_assert!(mem::align_of::<FreeBlockHeader>() <= SIZE_CLASS_BLOCK_SIZES[0]);\n\n\n\npub struct SegregatedListAllocator<T: Allocator> {\n\n free_heads: [Option<NonNull<FreeBlockHeader>>; NUM_SIZE_CLASSES],\n\n fallback_allocator: T,\n\n}\n\n\n\nimpl<T: Allocator> SegregatedListAllocator<T> {\n\n pub const fn new(fallback_allocator: T) -> SegregatedListAllocator<T> {\n\n SegregatedListAllocator {\n\n free_heads: [None; NUM_SIZE_CLASSES],\n\n fallback_allocator,\n\n }\n\n }\n\n}\n", "file_path": "sw/xw/src/heap/segregated_list_allocator.rs", "rank": 47, "score": 60912.45121609992 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct BlockHeader {\n\n storage: usize,\n\n}\n\n\n\nimpl BlockHeader {\n\n fn new(block_end: usize, is_prev_allocated: bool) -> BlockHeader {\n\n BlockHeader {\n\n storage: block_end | ((is_prev_allocated as usize) << 1),\n\n }\n\n }\n\n\n\n // Explicit lifetimes to work around the compiler inferring that `self` must outlive the return value\n\n fn additional_header<'a, 'b>(&'a self) -> &'b mut FreeBlockAdditionalHeader {\n\n let additional_header_ptr = (self.block_start() + mem::size_of::<Self>()) as *mut FreeBlockAdditionalHeader;\n\n unsafe { &mut *additional_header_ptr }\n\n }\n\n\n\n fn block_start(&self) -> usize {\n\n self as *const _ as _\n\n }\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 48, "score": 60912.30357574276 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n writeln!(stdio::stdout(), \"xw online\").unwrap();\n\n\n\n extern \"C\" {\n\n static mut _sprogram: u8;\n\n static _max_program_size: u8;\n\n }\n\n\n\n // TODO: Proper command\n\n uart::write_u8(0x01);\n\n // TODO: Proper filename\n\n let filename = \"../../program/target/program.bin\";\n\n for b in filename.bytes() {\n\n uart::write_u8(b);\n\n }\n\n uart::write_u8(0);\n\n let program_size = uart::read_u32_le();\n\n // TODO: Is there a better way to get this symbol value?\n\n let max_program_size = unsafe { &_max_program_size as *const _ as u32 };\n\n if program_size > max_program_size {\n", "file_path": "sw/boot-rom/src/main.rs", "rank": 49, "score": 60774.26895452086 }, { "content": "struct Fifo<T> {\n\n inner: VecDeque<T>,\n\n depth: usize,\n\n}\n\n\n\nimpl<T> Fifo<T> {\n\n fn new(depth: usize) -> Fifo<T> {\n\n Fifo {\n\n inner: VecDeque::new(),\n\n depth: depth,\n\n }\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.inner.len() == 0\n\n }\n\n\n\n fn is_full(&self) -> bool {\n\n self.inner.len() == self.depth\n\n }\n", "file_path": "sim-old/ddr3-simulator/src/lib.rs", "rank": 50, "score": 60721.022219337785 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n let fifo = Fifo::new(\"fifo\", 4, 32, &c);\n\n sim::generate(fifo.m, sim::GenerationOptions::default(), file)\n\n}\n", "file_path": "sim/fifo/build.rs", "rank": 51, "score": 60148.70902535414 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n let marv = Marv::new(\"marv\", &c);\n\n sim::generate(marv.m, sim::GenerationOptions::default(), file)\n\n}\n", "file_path": "sim/marv/build.rs", "rank": 52, "score": 60148.70902535414 }, { "content": "fn main() -> Result<()> {\n\n let c = Context::new();\n\n\n\n let _xenowing = Xenowing::new(\"xenowing\", &c);\n\n let _lfsr = Lfsr::new(\"lfsr\", &c);\n\n let _uart = Uart::new(\"uart\", &c);\n\n\n\n // TODO: Generate verilog for above modules\n\n\n\n Ok(())\n\n}\n", "file_path": "rtl/src/main.rs", "rank": 53, "score": 60148.70902535414 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let mut file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n sim::generate(Crossbar::new(\"buster_1x2\", 1, 2, 17, 1, 32, 2, &c).m, sim::GenerationOptions {\n\n override_module_name: Some(\"Buster1x2\".into()),\n\n ..sim::GenerationOptions::default()\n\n }, &mut file)?;\n\n sim::generate(Crossbar::new(\"buster_2x1\", 2, 1, 16, 0, 32, 2, &c).m, sim::GenerationOptions {\n\n override_module_name: Some(\"Buster2x1\".into()),\n\n ..sim::GenerationOptions::default()\n\n }, &mut file)?;\n\n sim::generate(Crossbar::new(\"buster_2x2\", 2, 2, 17, 1, 128, 4, &c).m, sim::GenerationOptions {\n\n override_module_name: Some(\"Buster2x2\".into()),\n\n ..sim::GenerationOptions::default()\n\n }, &mut file)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "sim/buster/build.rs", "rank": 54, "score": 60148.70902535414 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct FreeBlockFooter {\n\n header: NonNull<BlockHeader>,\n\n}\n\n\n\nconst MIN_FREE_BLOCK_SIZE\n\n : usize\n\n = mem::size_of::<BlockHeader>()\n\n + mem::size_of::<FreeBlockAdditionalHeader>()\n\n + mem::size_of::<FreeBlockFooter>()\n\n ;\n\n\n\n// This is unused at runtime, but is required by const asserts\n\n#[allow(dead_code)]\n\nconst MIN_ALLOCATED_BLOCK_SIZE\n\n : usize\n\n = mem::size_of::<BlockHeader>()\n\n + mem::size_of::<NonNull<BlockHeader>>()\n\n ;\n\n\n\nconst MIN_BLOCK_SIZE: usize = MIN_FREE_BLOCK_SIZE;\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 55, "score": 59973.98292895213 }, { "content": "pub trait Device {\n\n fn write_reg(&mut self, addr: u32, data: u32);\n\n fn read_reg(&mut self, addr: u32) -> u32;\n\n fn write_color_buffer_word(&mut self, addr: u32, data: u128);\n\n fn read_color_buffer_word(&mut self, addr: u32) -> u128;\n\n fn write_depth_buffer_word(&mut self, addr: u32, data: u128);\n\n fn read_depth_buffer_word(&mut self, addr: u32) -> u128;\n\n fn write_tex_buffer_word(&mut self, addr: u32, data: u128);\n\n}\n\n\n\nimpl<D: Device + ?Sized> Device for &mut D {\n\n #[inline]\n\n fn write_reg(&mut self, addr: u32, data: u32) {\n\n (**self).write_reg(addr, data);\n\n }\n\n\n\n #[inline]\n\n fn read_reg(&mut self, addr: u32) -> u32 {\n\n (**self).read_reg(addr)\n\n }\n", "file_path": "color-thrust/color-thrust-interface/src/device.rs", "rank": 56, "score": 59830.58120771579 }, { "content": "#[allow(unused)]\n\nstruct ReadCacheDelayedReturnPath<'a> {\n\n pub m: &'a Module<'a>,\n\n pub invalidate: &'a Input<'a>,\n\n pub client_port: ReplicaPort<'a>,\n\n pub system_port: PrimaryPort<'a>,\n\n}\n\n\n\nimpl<'a> ReadCacheDelayedReturnPath<'a> {\n\n fn new(\n\n instance_name: impl Into<String>,\n\n data_bit_width: u32,\n\n addr_bit_width: u32,\n\n cache_addr_bit_width: u32,\n\n p: &'a impl ModuleParent<'a>,\n\n ) -> ReadCacheDelayedReturnPath<'a> {\n\n let m = p.module(instance_name, \"ReadCacheDelayedReturnPath\");\n\n\n\n let read_cache = ReadCache::new(\"read_cache\", data_bit_width, addr_bit_width, cache_addr_bit_width, m);\n\n\n\n let invalidate = m.input(\"invalidate\", 1);\n", "file_path": "sim/read-cache/build.rs", "rank": 57, "score": 59666.271276319596 }, { "content": "fn main() {\n\n let device_type = env::args().skip(1).nth(0).expect(\"No device type argument provided\");\n\n\n\n let mut device: Box<dyn Device> = match device_type.as_str() {\n\n \"model\" => box model_device::ModelDevice::new(),\n\n \"sim\" => box sim_device::SimDevice::new(),\n\n _ => panic!(\"Invalid device type argument\")\n\n };\n\n\n\n let mut window = Window::new(\"strugl\", WIDTH, HEIGHT, WindowOptions {\n\n scale: Scale::X4,\n\n scale_mode: ScaleMode::AspectRatioStretch,\n\n ..WindowOptions::default()\n\n }).unwrap();\n\n\n\n let mut c = Context::new(&mut *device);\n\n\n\n let tex = image::open(\"myface.png\").unwrap();\n\n let texture_dim = match tex.width() {\n\n 16 => TextureDim::X16,\n", "file_path": "sw/misc/strugl-test/src/main.rs", "rank": 58, "score": 59588.721487226016 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct FreeBlockAdditionalHeader {\n\n free_prev: Option<NonNull<BlockHeader>>,\n\n free_next: Option<NonNull<BlockHeader>>,\n\n}\n\n\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 59, "score": 59086.77688762115 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let mut file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n // TODO: Expose these to test driver somehow so we don't have to duplicate them\n\n let data_bit_width = 32;\n\n let addr_bit_width = 4;\n\n let cache_addr_bit_width = 2;\n\n\n\n sim::generate(ReadCache::new(\n\n \"read_cache\",\n\n data_bit_width, addr_bit_width,\n\n cache_addr_bit_width,\n\n &c,\n\n ).m, sim::GenerationOptions {\n\n tracing: true,\n\n ..sim::GenerationOptions::default()\n", "file_path": "sim/read-cache/build.rs", "rank": 60, "score": 58883.36535999544 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let mut file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n let peek_buffer = PeekBuffer::new(\"peek_buffer\", 32, &c);\n\n sim::generate(peek_buffer.m, sim::GenerationOptions::default(), &mut file)?;\n\n sim::generate(peek_buffer.m, sim::GenerationOptions {\n\n override_module_name: Some(\"TracingPeekBuffer\".into()),\n\n tracing: true,\n\n ..sim::GenerationOptions::default()\n\n }, file)\n\n}\n", "file_path": "sim/peek-buffer/build.rs", "rank": 61, "score": 58883.36535999544 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n const W_INVERSE_FRACT_BITS: u32 = 30;\n\n const RESTORED_W_FRACT_BITS: u32 = 8; // Must be less than W_INVERSE_FRACT_BITS and ST_FRACT_BITS\n\n\n\n let approx_reciprocal = ApproxReciprocal::new(\"approx_reciprocal\", W_INVERSE_FRACT_BITS - RESTORED_W_FRACT_BITS - 3, 4, &c);\n\n sim::generate(approx_reciprocal.m, sim::GenerationOptions::default(), file)\n\n}\n", "file_path": "sim/approx-reciprocal/build.rs", "rank": 62, "score": 58883.36535999544 }, { "content": "struct SystemAllocator<T: Allocator> {\n\n impl_: RefCell<T>,\n\n}\n\n\n\nimpl<T: Allocator> SystemAllocator<T> {\n\n const fn new(impl_: T) -> SystemAllocator<T> {\n\n SystemAllocator {\n\n impl_: RefCell::new(impl_),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Allocator> SystemAllocator<T> {\n\n fn init(&mut self) {\n\n extern \"C\" {\n\n static _sheap: u8;\n\n static _eheap: u8;\n\n }\n\n\n\n let heap_start = unsafe { &_sheap } as *const _ as usize;\n", "file_path": "sw/xw/src/heap.rs", "rank": 63, "score": 58822.99148204915 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n let inner_pipe_num_stages = 4;\n\n let data_bit_width = 32;\n\n\n\n // Outer pipe module\n\n let pipe = c.module(\"pipe\", \"Pipe\");\n\n\n\n // Inner pipe\n\n let inner_pipe = InnerPipe::new(\"inner_pipe\", inner_pipe_num_stages, data_bit_width, pipe);\n\n\n\n // Outer pipe\n\n let mut pipe = flow_controlled_pipe::FlowControlledPipe::new(pipe, inner_pipe_num_stages, inner_pipe.in_valid, inner_pipe.out_valid);\n\n pipe.input(\"in_a\", inner_pipe.in_a);\n\n pipe.output(\"out_b\", inner_pipe.out_b);\n\n pipe.output(\"out_c\", inner_pipe.out_c);\n\n pipe.aux_input(\"d\", inner_pipe.d);\n\n pipe.aux_output(\"e\", inner_pipe.e);\n\n\n\n sim::generate(pipe.m, sim::GenerationOptions {\n\n tracing: true,\n\n ..Default::default()\n\n }, file)\n\n}\n\n\n", "file_path": "sim/flow-controlled-pipe/build.rs", "rank": 64, "score": 57697.817892700594 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let file = File::create(&dest_path).unwrap();\n\n\n\n let c = Context::new();\n\n\n\n let m = c.module(\"top\", \"Top\");\n\n\n\n let xenowing = Xenowing::new(\"xenowing\", m);\n\n\n\n m.output(\"leds\", xenowing.leds);\n\n\n\n let clock_freq = 100000000;\n\n let uart_baud_rate = 460800;\n\n\n\n let uart_rx = UartRx::new(\"uart_rx\", clock_freq, uart_baud_rate, m);\n\n uart_rx.rx.drive(xenowing.tx);\n\n m.output(\"uart_tx_data\", uart_rx.data);\n\n m.output(\"uart_tx_data_valid\", uart_rx.data_valid);\n\n\n\n let uart_tx = UartTx::new(\"uart_tx\", clock_freq, uart_baud_rate, m);\n\n xenowing.rx.drive(uart_tx.tx);\n\n m.output(\"uart_rx_ready\", uart_tx.ready);\n\n uart_tx.data.drive(m.input(\"uart_rx_data\", 8));\n\n uart_tx.enable.drive(m.input(\"uart_rx_enable\", 1));\n\n\n\n sim::generate(m, sim::GenerationOptions::default(), file)\n\n}\n", "file_path": "sw/misc/xw-blaster/build.rs", "rank": 65, "score": 57697.817892700594 }, { "content": "fn main() -> Result<()> {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n let dest_path = Path::new(&out_dir).join(\"modules.rs\");\n\n let file = File::create(&dest_path).unwrap();\n\n\n\n let p = Context::new();\n\n\n\n let m = p.module(\"top\", \"Top\");\n\n\n\n let color_thrust = ColorThrust::new(\"color_thrust\", m);\n\n\n\n let mem_addr_bit_width = 13;\n\n let mem = ByteRam::new(\"mem\", mem_addr_bit_width, SYSTEM_BUS_ADDR_BITS, m);\n\n\n\n // Interconnect\n\n color_thrust.reg_port.forward(\"reg\", m);\n\n color_thrust.color_buffer_port.forward(\"color_buffer\", m);\n\n color_thrust.depth_buffer_port.forward(\"depth_buffer\", m);\n\n\n\n let mem_crossbar = Crossbar::new(\"mem_crossbar\", 2, 1, SYSTEM_BUS_ADDR_BITS, 0, 128, 5, m);\n\n\n\n mem_crossbar.replica_ports[0].forward(\"mem\", m);\n\n color_thrust.tex_cache_system_port.connect(&mem_crossbar.replica_ports[1]);\n\n\n\n mem_crossbar.primary_ports[0].connect(&mem.client_port);\n\n\n\n sim::generate(m, sim::GenerationOptions::default(), file)\n\n}\n", "file_path": "color-thrust/color-thrust-test-devices/build.rs", "rank": 66, "score": 55537.70959917361 }, { "content": "fn leading_zeros<'a>(x: &'a dyn Signal<'a>, m: &'a Module<'a>) -> &'a dyn Signal<'a> {\n\n let mut ret = m.lit(0u32, 5);\n\n\n\n for i in 0..31 {\n\n ret = if_(x.bit(i), {\n\n m.lit(31 - i, 5)\n\n }).else_({\n\n ret\n\n });\n\n }\n\n\n\n ret\n\n}\n", "file_path": "rtl/src/approx_reciprocal.rs", "rank": 67, "score": 55382.640011938965 }, { "content": "fn main() -> io::Result<()> {\n\n let seed = env::args().skip(1).nth(0).expect(\"seed not specified\").parse().expect(\"Couldn't parse seed\");\n\n let num_elements = env::args().skip(1).nth(1).expect(\"num_elements not specified\").parse().expect(\"Couldn't parse num_elements\");\n\n\n\n println!(\"Testing PeekBuffer with seed = {} and num_elements = {}\", seed, num_elements);\n\n\n\n let data = (0..num_elements).collect::<Vec<_>>();\n\n let mut ingress_data_ptr = 0;\n\n let mut last_ingress_read_successful = false;\n\n\n\n let mut read_data = Vec::new();\n\n\n\n let trace = build_trace(\"PeekBuffer__fuzz\")?;\n\n\n\n let mut m = TracingPeekBuffer::new(trace)?;\n\n let mut time_stamp = 0;\n\n\n\n m.reset();\n\n\n\n let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(seed);\n", "file_path": "sim/peek-buffer/src/main.rs", "rank": 68, "score": 54979.13064299865 }, { "content": "#[test]\n\nfn read_all() -> io::Result<()> {\n\n let addr_bit_width = 4;\n\n let num_elements = 1 << addr_bit_width;\n\n let data = (0..num_elements).collect::<Vec<_>>();\n\n\n\n let mut client_read_addr = 0;\n\n let mut client_read_data = Vec::new();\n\n\n\n let mut system_read_addr = None;\n\n\n\n let trace = build_trace(\"ReadCache__read_all\")?;\n\n\n\n let mut m = ReadCache::new(trace)?;\n\n let mut time_stamp = 0;\n\n\n\n m.reset();\n\n\n\n loop {\n\n m.prop();\n\n m.update_trace(time_stamp)?;\n", "file_path": "sim/read-cache/src/tests.rs", "rank": 69, "score": 54979.13064299865 }, { "content": "fn main() -> io::Result<()> {\n\n let seed = env::args().skip(1).nth(0).expect(\"seed not specified\").parse().expect(\"Couldn't parse seed\");\n\n let num_cycles = env::args().skip(2).nth(0).expect(\"num cycles not specified\").parse().expect(\"Couldn't parse num cycles\");\n\n\n\n let data_bit_width = 32;\n\n\n\n let mem_addr_bit_width = 4;\n\n let mem_num_elements = 1 << mem_addr_bit_width;\n\n let mem_data = (0..mem_num_elements).collect::<Vec<_>>();\n\n\n\n let cache_addr_bit_width = 2;\n\n let cache_num_elements = 1 << cache_addr_bit_width;\n\n\n\n println!(\"Testing ReadCache with seed = {}, num cycles = {}, mem size = {} bytes, cache size = {} bytes\", seed, num_cycles, mem_num_elements * data_bit_width / 8, cache_num_elements * data_bit_width / 8);\n\n\n\n let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(seed);\n\n\n\n let trace = build_trace(\"ReadCache__fuzz\")?;\n\n\n\n let mut m = ReadCacheDelayedReturnPath::new(trace)?;\n", "file_path": "sim/read-cache/src/main.rs", "rank": 70, "score": 54979.13064299865 }, { "content": "#[test]\n\nfn read_all_with_delays() -> io::Result<()> {\n\n let addr_bit_width = 4;\n\n let num_elements = 1 << addr_bit_width;\n\n let data = (0..num_elements).collect::<Vec<_>>();\n\n\n\n let mut client_read_addr = 0;\n\n let mut client_read_data = Vec::new();\n\n\n\n let mut system_read_addr = None;\n\n\n\n let trace = build_trace(\"ReadCache__read_all_with_delays\")?;\n\n\n\n let mut m = ReadCacheDelayedReturnPath::new(trace)?;\n\n let mut time_stamp = 0;\n\n\n\n m.reset();\n\n\n\n loop {\n\n m.prop();\n\n m.update_trace(time_stamp)?;\n", "file_path": "sim/read-cache/src/tests.rs", "rank": 71, "score": 53932.09115182086 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut device: Box<dyn Device> = if let Some(port_name) = env::args().nth(1) {\n\n println!(\"Creating serial device on port {}\", port_name);\n\n Box::new(SerialDevice::new(port_name)?)\n\n } else {\n\n println!(\"Creating sim device\");\n\n Box::new(SimDevice::new())\n\n };\n\n println!();\n\n\n\n let mut back_buffer = vec![0xffff00ff; PIXELS];\n\n\n\n let mut window = Window::new(\"trim\", WIDTH, HEIGHT, WindowOptions {\n\n scale: Scale::X4,\n\n scale_mode: ScaleMode::AspectRatioStretch,\n\n ..WindowOptions::default()\n\n }).unwrap();\n\n\n\n let tex = image::open(\"tex.png\").unwrap();\n\n\n", "file_path": "sw/misc/xw-blaster/src/main.rs", "rank": 72, "score": 53932.09115182086 }, { "content": "fn main() -> io::Result<()> {\n\n let seed = env::args().skip(1).nth(0).expect(\"seed not specified\").parse().expect(\"Couldn't parse seed\");\n\n let num_elements = env::args().skip(2).nth(0).expect(\"num_elements not specified\").parse().expect(\"Couldn't parse num_elements\");\n\n\n\n println!(\"Testing Pipe with seed = {}, num_elements = {}\", seed, num_elements);\n\n\n\n let data = (0..num_elements).collect::<Vec<_>>();\n\n let mut data_write_ptr = 0;\n\n let mut data_read_ptr = 0;\n\n\n\n let trace = build_trace(\"Pipe__fuzz\")?;\n\n\n\n let mut m = Pipe::new(trace)?;\n\n let mut time_stamp = 0;\n\n\n\n m.reset();\n\n\n\n let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(seed);\n\n\n\n loop {\n", "file_path": "sim/flow-controlled-pipe/src/main.rs", "rank": 73, "score": 53932.09115182086 }, { "content": "#[panic_handler]\n\nfn panic_handler(panic_info: &PanicInfo) -> ! {\n\n leds::set(0xff);\n\n\n\n writeln!(stdio::stdout(), \"Panic: {}\", panic_info).ok();\n\n\n\n loop {\n\n marv::sleep_cycles(100000000 / 4);\n\n leds::set(0x00);\n\n marv::sleep_cycles(100000000 / 4);\n\n leds::set(0xff);\n\n }\n\n}\n", "file_path": "sw/xw/src/lib.rs", "rank": 74, "score": 52945.3747513334 }, { "content": "#[test]\n\nfn read_first_addr() -> io::Result<()> {\n\n let trace = build_trace(\"ReadCache__read_first_addr\")?;\n\n\n\n let mut m = ReadCache::new(trace)?;\n\n let mut time_stamp = 0;\n\n\n\n m.reset();\n\n m.system_bus_ready = true;\n\n m.system_bus_read_data = 0;\n\n m.system_bus_read_data_valid = false;\n\n\n\n // Issue read to addr 0 (should cause a cache miss)\n\n m.client_bus_enable = true;\n\n m.client_bus_addr = 0;\n\n loop {\n\n m.prop();\n\n m.update_trace(time_stamp)?;\n\n\n\n let client_bus_ready = m.client_bus_ready;\n\n m.posedge_clk();\n", "file_path": "sim/read-cache/src/tests.rs", "rank": 75, "score": 52945.3747513334 }, { "content": "#[test]\n\nfn read_all_multiple_times_with_delays() -> io::Result<()> {\n\n let addr_bit_width = 4;\n\n let num_elements = 1 << addr_bit_width;\n\n let data = (0..num_elements).collect::<Vec<_>>();\n\n let repeat_times = 4;\n\n let expanded_data = data.clone().into_iter().flat_map(|x| vec![x; repeat_times]).collect::<Vec<_>>();\n\n\n\n let mut client_read_addr = 0;\n\n let mut client_read_repeat_count = 0;\n\n let mut client_read_data = Vec::new();\n\n\n\n let mut system_read_addr = None;\n\n\n\n let trace = build_trace(\"ReadCache__read_all_multiple_times_with_delays\")?;\n\n\n\n let mut m = ReadCacheDelayedReturnPath::new(trace)?;\n\n let mut time_stamp = 0;\n\n\n\n m.reset();\n\n\n", "file_path": "sim/read-cache/src/tests.rs", "rank": 76, "score": 52013.914315976064 }, { "content": "fn main() -> Result<(), Error> {\n\n let port_name = env::args().nth(1).expect(\"No COM port name specified\");\n\n let baud_rate: u32 = 460800;\n\n\n\n let mut settings: SerialPortSettings = Default::default();\n\n settings.baud_rate = baud_rate.into();\n\n\n\n let mut port = serialport::open_with_settings(&port_name, &settings)?;\n\n let actual_baud_rate = port.baud_rate()?;\n\n if actual_baud_rate != baud_rate {\n\n panic!(\"Unable to achieve specified baud rate: got {}, expected {}\", actual_baud_rate, baud_rate);\n\n }\n\n let mut buf = vec![0; 1000];\n\n\n\n let start_state = 0xace1u16;\n\n let mut lfsr = start_state;\n\n\n\n let mut total_bytes = 0u64;\n\n let mut total_errors = 0u64;\n\n\n", "file_path": "mimas_a7/test/uart/misc/uart-check/src/main.rs", "rank": 77, "score": 51133.1947790711 }, { "content": "fn main() -> Result<(), Error> {\n\n let port_name = env::args().nth(1).expect(\"No COM port name specified\");\n\n let baud_rate: u32 = 460800;\n\n\n\n let mut settings: SerialPortSettings = Default::default();\n\n settings.baud_rate = baud_rate.into();\n\n\n\n let mut port = serialport::open_with_settings(&port_name, &settings)?;\n\n let actual_baud_rate = port.baud_rate()?;\n\n if actual_baud_rate != baud_rate {\n\n panic!(\"Unable to achieve specified baud rate: got {}, expected {}\", actual_baud_rate, baud_rate);\n\n }\n\n\n\n loop {\n\n let write_cycles = read_u64(&mut port)?;\n\n println!(\"Write cycles: {} (0x{:016x})\", write_cycles, write_cycles);\n\n let read_cycles = read_u64(&mut port)?;\n\n println!(\"Read cycles: {} (0x{:016x})\", read_cycles, read_cycles);\n\n let total_cycles = write_cycles + read_cycles;\n\n println!(\"Total cycles: {} (0x{:016x})\", total_cycles, total_cycles);\n\n println!(\"\");\n\n }\n\n}\n\n\n", "file_path": "mimas_a7/test/ddr3/misc/ddr3-check/src/main.rs", "rank": 78, "score": 51133.1947790711 }, { "content": "fn size_class(layout: Layout) -> Option<usize> {\n\n let block_size = layout.size().max(layout.align());\n\n SIZE_CLASS_BLOCK_SIZES.iter().position(|&size| size >= block_size)\n\n}\n", "file_path": "sw/xw/src/heap/segregated_list_allocator.rs", "rank": 79, "score": 47719.4035736104 }, { "content": "// Assumes `align` is a power of 2\n\nfn align_up(addr: usize, align: usize) -> usize {\n\n align_down(addr + align - 1, align)\n\n}\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 80, "score": 45876.62793644854 }, { "content": "// Assumes `align` is a power of 2\n\nfn align_down(addr: usize, align: usize) -> usize {\n\n addr & !(align - 1)\n\n}\n\n\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 81, "score": 45876.62793644854 }, { "content": "fn initialize_footer(header: NonNull<BlockHeader>, block_end: usize) {\n\n let footer_ptr = (block_end - mem::size_of::<FreeBlockFooter>()) as _;\n\n let footer = FreeBlockFooter {\n\n header,\n\n };\n\n unsafe {\n\n ptr::write(footer_ptr, footer);\n\n }\n\n}\n\n\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 82, "score": 43083.24463603382 }, { "content": "use std::env;\n\nuse std::fs::File;\n\nuse std::io::{Result, Write};\n\nuse std::path::Path;\n\n\n", "file_path": "sw/trig/build.rs", "rank": 83, "score": 42117.88656635456 }, { "content": "#![feature(core_intrinsics)]\n\n#![no_std]\n\n\n\nmod tab {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/tab.rs\"));\n\n}\n\n\n\nuse tab::*;\n\n\n\nuse core::intrinsics;\n\n\n", "file_path": "sw/trig/src/lib.rs", "rank": 84, "score": 40650.58407725906 }, { "content": "fn read_u64<R: io::Read>(port: &mut R) -> Result<u64, Error> {\n\n loop {\n\n let mut buf = [0; 8];\n\n match port.read_exact(&mut buf) {\n\n Ok(()) => {\n\n return Ok(u64::from_le_bytes(buf));\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::TimedOut => (),\n\n Err(e) => Err(e)?\n\n }\n\n }\n\n}\n", "file_path": "mimas_a7/test/ddr3/misc/ddr3-check/src/main.rs", "rank": 85, "score": 38705.123510522244 }, { "content": "fn initialize_free_block(block_start: usize, block_end: usize, is_prev_allocated: bool) -> NonNull<BlockHeader> {\n\n let header_ptr = block_start as _;\n\n let header = BlockHeader::new(block_end, is_prev_allocated);\n\n unsafe {\n\n ptr::write(header_ptr, header);\n\n }\n\n\n\n let additional_header_ptr = (block_start + mem::size_of::<BlockHeader>()) as _;\n\n let additional_header = FreeBlockAdditionalHeader {\n\n free_prev: None,\n\n free_next: None,\n\n };\n\n unsafe {\n\n ptr::write(additional_header_ptr, additional_header);\n\n }\n\n\n\n let ret = unsafe { NonNull::new_unchecked(header_ptr) };\n\n\n\n initialize_footer(ret, block_end);\n\n\n\n ret\n\n}\n\n\n", "file_path": "sw/xw/src/heap/explicit_free_list_allocator.rs", "rank": 86, "score": 37253.24571395743 }, { "content": " V4::new(0.0, y, 0.0, 0.0),\n\n V4::new(0.0, 0.0, z, 0.0),\n\n V4::new(0.0, 0.0, 0.0, 1.0),\n\n ]\n\n }\n\n }\n\n\n\n pub fn ortho(left: f32, right: f32, bottom: f32, top: f32, z_near: f32, z_far: f32) -> M4 {\n\n let tx = -(right + left) / (right - left);\n\n let ty = -(top + bottom) / (top - bottom);\n\n let tz = -(z_far + z_near) / (z_far - z_near);\n\n\n\n M4 {\n\n columns: [\n\n V4::new(2.0 / (right - left), 0.0, 0.0, 0.0),\n\n V4::new(0.0, 2.0 / (top - bottom), 0.0, 0.0),\n\n V4::new(0.0, 0.0, -2.0 / (z_far - z_near), 0.0),\n\n V4::new(tx, ty, tz, 1.0),\n\n ]\n\n }\n", "file_path": "sw/linalg/src/m4.rs", "rank": 88, "score": 41.27897805861214 }, { "content": "use crate::v4::*;\n\n\n\nuse trig::*;\n\n\n\nuse core::ops::Mul;\n\n\n\n#[derive(Clone, Copy)]\n\npub struct M4 {\n\n pub columns: [V4; 4],\n\n}\n\n\n\nimpl M4 {\n\n pub fn identity() -> M4 {\n\n M4 {\n\n columns: [\n\n V4::new(1.0, 0.0, 0.0, 0.0),\n\n V4::new(0.0, 1.0, 0.0, 0.0),\n\n V4::new(0.0, 0.0, 1.0, 0.0),\n\n V4::new(0.0, 0.0, 0.0, 1.0),\n\n ],\n", "file_path": "sw/linalg/src/m4.rs", "rank": 91, "score": 27.401763191187495 }, { "content": "use crate::fixed::*;\n\n\n\nuse core::ops::{Add, AddAssign, Div, Mul, Sub};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Iv2<const FRACT_BITS: u32> {\n\n pub x: Fixed<FRACT_BITS>,\n\n pub y: Fixed<FRACT_BITS>,\n\n}\n\n\n\nimpl<const FRACT_BITS: u32> Iv2<FRACT_BITS> {\n\n pub fn new(\n\n x: impl Into<Fixed<FRACT_BITS>>,\n\n y: impl Into<Fixed<FRACT_BITS>>,\n\n ) -> Self {\n\n Self {\n\n x: x.into(),\n\n y: y.into(),\n\n }\n\n }\n", "file_path": "sw/linalg/src/iv2.rs", "rank": 93, "score": 23.95440261150097 }, { "content": "use crate::iv4::*;\n\n\n\nuse core::intrinsics;\n\nuse core::ops::{Add, AddAssign, Div, DivAssign, Mul, Sub};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct V4 {\n\n pub x: f32,\n\n pub y: f32,\n\n pub z: f32,\n\n pub w: f32,\n\n}\n\n\n\nimpl V4 {\n\n pub fn new(x: impl Into<f32>, y: impl Into<f32>, z: impl Into<f32>, w: impl Into<f32>) -> V4 {\n\n V4 {\n\n x: x.into(),\n\n y: y.into(),\n\n z: z.into(),\n\n w: w.into(),\n", "file_path": "sw/linalg/src/v4.rs", "rank": 94, "score": 23.63947980792523 }, { "content": "use crate::fixed::*;\n\n\n\nuse core::ops::{Add, AddAssign, Div, Mul, Sub};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Iv3<const FRACT_BITS: u32> {\n\n pub x: Fixed<FRACT_BITS>,\n\n pub y: Fixed<FRACT_BITS>,\n\n pub z: Fixed<FRACT_BITS>,\n\n}\n\n\n\nimpl<const FRACT_BITS: u32> Iv3<FRACT_BITS> {\n\n pub fn new(\n\n x: impl Into<Fixed<FRACT_BITS>>,\n\n y: impl Into<Fixed<FRACT_BITS>>,\n\n z: impl Into<Fixed<FRACT_BITS>>,\n\n ) -> Self {\n\n Self {\n\n x: x.into(),\n\n y: y.into(),\n", "file_path": "sw/linalg/src/iv3.rs", "rank": 95, "score": 23.51650295194258 }, { "content": "use crate::fixed::*;\n\n\n\nuse core::ops::{Add, AddAssign, Mul, Sub};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Iv4<const FRACT_BITS: u32> {\n\n pub x: Fixed<FRACT_BITS>,\n\n pub y: Fixed<FRACT_BITS>,\n\n pub z: Fixed<FRACT_BITS>,\n\n pub w: Fixed<FRACT_BITS>,\n\n}\n\n\n\nimpl<const FRACT_BITS: u32> Iv4<FRACT_BITS> {\n\n pub fn new(\n\n x: impl Into<Fixed<FRACT_BITS>>,\n\n y: impl Into<Fixed<FRACT_BITS>>,\n\n z: impl Into<Fixed<FRACT_BITS>>,\n\n w: impl Into<Fixed<FRACT_BITS>>,\n\n ) -> Self {\n\n Self {\n", "file_path": "sw/linalg/src/iv4.rs", "rank": 96, "score": 23.157050242604715 }, { "content": "use core::ops::{Add, AddAssign, Div, Mul, Neg, Sub};\n\n\n\n#[derive(Clone, Copy, Eq, PartialEq, PartialOrd, Ord)]\n\npub struct Fixed<const FRACT_BITS: u32>(i32);\n\n\n\nimpl<const FRACT_BITS: u32> Fixed<FRACT_BITS> {\n\n pub fn ceil(self) -> Self {\n\n if FRACT_BITS > 0 {\n\n Self(self.0 + (1 << (FRACT_BITS - 1))).floor()\n\n } else {\n\n self\n\n }\n\n }\n\n\n\n pub fn floor(self) -> Self {\n\n if FRACT_BITS > 0 {\n\n Self(self.0 & !((1 << FRACT_BITS) - 1))\n\n } else {\n\n self\n\n }\n", "file_path": "sw/linalg/src/fixed.rs", "rank": 97, "score": 21.491454491892107 }, { "content": "use core::intrinsics;\n\nuse core::ops::{Add, AddAssign, Div, DivAssign, Mul, Sub};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct V3 {\n\n pub x: f32,\n\n pub y: f32,\n\n pub z: f32,\n\n}\n\n\n\nimpl V3 {\n\n pub fn new(x: f32, y: f32, z: f32) -> V3 {\n\n V3 {\n\n x,\n\n y,\n\n z,\n\n }\n\n }\n\n\n\n pub fn splat(value: f32) -> V3 {\n", "file_path": "sw/linalg/src/v3.rs", "rank": 98, "score": 21.262407220424265 }, { "content": "use core::intrinsics;\n\nuse core::ops::{Add, AddAssign, Div, DivAssign, Mul, Sub};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct V2 {\n\n pub x: f32,\n\n pub y: f32,\n\n}\n\n\n\nimpl V2 {\n\n pub fn new(x: f32, y: f32) -> V2 {\n\n V2 {\n\n x,\n\n y,\n\n }\n\n }\n\n\n\n pub fn splat(value: f32) -> V2 {\n\n V2 {\n\n x: value,\n", "file_path": "sw/linalg/src/v2.rs", "rank": 99, "score": 20.932437754800183 } ]
Rust
memscanner/src/signature/mod.rs
garlond/memscanner
dcc322a6c83133dcc494472933b6be410ca46583
mod parser; use super::MemReader; use failure::{format_err, Error}; #[derive(Clone, Debug, PartialEq, Eq)] enum Match { Any, Position, Literal(u8), } #[derive(Clone, Debug, PartialEq, Eq)] enum Op { Asm(Vec<Match>), Ptr(i32), } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Signature { ops: Vec<Op>, } impl Signature { pub fn new(ops: &Vec<String>) -> Result<Signature, Error> { let mut sig = Signature { ops: vec![] }; for op_str in ops { let (_, op) = parser::parse_op(op_str).map_err(|_| format_err!("Can't parse op: {}", op_str))?; sig.ops.push(op); } Ok(sig) } pub fn resolve(&self, mem: &dyn MemReader, start_addr: u64, end_addr: u64) -> Option<u64> { let mut addr = start_addr; for op in &self.ops { addr = match &op { Op::Asm(p) => resolve_asm(mem, start_addr, end_addr, &p)?, Op::Ptr(o) => resolve_ptr(mem, addr, *o)?, }; } Some(addr) } } fn offset_addr(addr: u64, offset: i32) -> u64 { if offset < 0 { addr - (-offset) as u64 } else { addr + offset as u64 } } fn check_pattern( mem: &dyn MemReader, start_addr: u64, end_addr: u64, pattern: &[Match], ) -> Option<u64> { if end_addr - start_addr < pattern.len() as u64 { println!("Not enough room for pattern"); return None; } let mut mem_contents = vec![0x0; pattern.len()]; let mem_read = mem.read(&mut mem_contents, start_addr, pattern.len()); if mem_read != pattern.len() { println!("incomplete read"); return None; } let mut offset: Option<u64> = None; for i in 0..pattern.len() { match &pattern[i] { Match::Position => { if offset == None { offset = Some(i as u64); } } Match::Any => {} Match::Literal(val) => { if mem_contents[i] != *val { return None; } } }; } match offset { None => Some(pattern.len() as u64), Some(_) => offset, } } fn resolve_match( mem: &dyn MemReader, start_addr: u64, end_addr: u64, pattern: &[Match], ) -> Option<u64> { let mem_len = end_addr - start_addr; for i in 0..=(mem_len as usize - pattern.len()) { if let Some(offset) = check_pattern(mem, start_addr + i as u64, end_addr, pattern) { return Some(start_addr + offset + i as u64); } } None } fn resolve_asm( mem: &dyn MemReader, start_addr: u64, end_addr: u64, pattern: &[Match], ) -> Option<u64> { let match_addr = resolve_match(mem, start_addr, end_addr, pattern)?; let offset = mem.read_i32(match_addr)?; let addr = offset_addr(match_addr, offset) + 4; Some(addr) } fn resolve_ptr(mem: &dyn MemReader, addr: u64, offset: i32) -> Option<u64> { let addr = (addr as i64 + offset as i64) as u64; let addr = mem.read_u64(addr)?; Some(addr) } #[cfg(test)] mod tests { use super::super::test::TestMemReader; use super::*; #[test] fn single_lea() { #[rustfmt::skip] let mem = TestMemReader { mem: vec![ 0xff, 0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33, 0x04, 0x00, 0x00, 0x00, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, ], start_addr: 0x1000, }; let sig = Signature::new(&vec!["asm(00112233^^^^^^^^********)".to_string()]).unwrap(); println!("{:?}", sig); let offset = sig .resolve(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64) .unwrap(); assert_eq!(offset, 0x1010); assert_eq!(mem.read_u64(offset).unwrap(), 0xffeeddccbbaa9988); } }
mod parser; use super::MemReader; use failure::{format_err, Error}; #[derive(Clone, Debug, PartialEq, Eq)] enum Match { Any, Position, Literal(u8), } #[derive(Clone, Debug, PartialEq, Eq)] enum Op { Asm(Vec<Match>), Ptr(i32), } #[derive(Clone, Debug, PartialEq, Eq)] pub struct Signature { ops: Vec<Op>, } impl Signature { pub fn new(ops: &Vec<String>) -> Result<Signature, Error> { let mut sig = Signature { ops: vec![] }; for op_str in ops { let (_, op) = parser::parse_op(op_str).map_err(|_| format_err!("Can't parse op: {}", op_str))?; sig.ops.push(op); } Ok(sig) } pub fn resolve(&self, mem: &dyn MemReader, start_addr: u64, end_addr: u64) -> Option<u64> { let mut addr = start_addr; for op in &self.ops { addr = match &op { Op::Asm(p) => resolve_asm(mem, start_addr, end_addr, &p)?, Op::Ptr(o) => resolve_ptr(mem, addr, *o)?, }; } Some(addr) } } fn offset_addr(addr: u64, offset: i32) -> u64 { if offset < 0 { addr - (-offset) as u64 } else { addr + offset as u64 } } fn check_pattern( mem: &dyn MemReader, start_addr: u64, end_addr: u64, pattern: &[Match], ) -> Option<u64> { if end_addr - start_addr < pattern.len() as u64 { println!("Not enough room for pattern"); return None; } let mut mem_contents = vec![0x0; pattern.len()]; let mem_read = mem.read(&mut mem_contents, start_addr, pattern.len()); if mem_read != pattern.len() { println!("incomplete read"); return None; } let mut offset: Option<u64> = None; for i in 0..pattern.len() { match &pattern[i] { Match::Position => { if offset == None { offset = Some(i as u64); } } Match::Any => {} Match::Literal(val) => { if mem_contents[i] != *val { return None; } } }; } match offset { None => Some(pattern.len() as u64), Some(_) => offset, } } fn resolve_match( mem: &dyn MemReader, start_addr: u64, end_addr: u64, pattern: &[Match], ) -> Option<u64> { let mem_len = end_addr - start_addr; for i in 0..=(mem_len as usize - pattern.len()) { if let Some(offset) = check_pattern(mem, start_addr + i as u64, end_addr, pattern) { return Some(start_addr + offset + i as u64); } } None } fn resolve_asm( mem: &dyn MemReader, start_addr: u64, end_addr: u64, pattern: &[Match], ) -> Option<u64> { let match_addr = resolve_match(mem, start_addr, end_addr, pattern)?; let offset = mem.read_i32(match_addr)?; let addr = offset_addr(match_addr, offset) + 4; Some(addr) } fn resolve_ptr(mem: &dyn MemReader, addr: u64, offset: i32) -> Option<u64> { let addr = (addr as i64 + offset as i64) as u64; let addr = mem.read_u64(addr)?; Some(addr) } #[cfg(test)] mod tests { use super::super::test::TestMemReader; use super::*; #[test]
}
fn single_lea() { #[rustfmt::skip] let mem = TestMemReader { mem: vec![ 0xff, 0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33, 0x04, 0x00, 0x00, 0x00, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, ], start_addr: 0x1000, }; let sig = Signature::new(&vec!["asm(00112233^^^^^^^^********)".to_string()]).unwrap(); println!("{:?}", sig); let offset = sig .resolve(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64) .unwrap(); assert_eq!(offset, 0x1010); assert_eq!(mem.read_u64(offset).unwrap(), 0xffeeddccbbaa9988); }
function_block-full_function
[ { "content": "fn parse_position(input: &str) -> IResult<&str, Match> {\n\n value(Match::Position, tag(\"^^\"))(input)\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 6, "score": 111181.45690729145 }, { "content": "fn parse_i32(input: &str) -> IResult<&str, i32> {\n\n map_res(match_signed_integer, |s: &str| s.parse::<i32>())(input)\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 7, "score": 100836.02648634149 }, { "content": "fn parse_match(input: &str) -> IResult<&str, Match> {\n\n alt((parse_any, parse_position, parse_literal))(input)\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 8, "score": 99945.94473002697 }, { "content": "fn literal_from_hex(input: &str) -> Result<Match, std::num::ParseIntError> {\n\n let val = u8::from_str_radix(input, 16)?;\n\n Ok(Match::Literal(val))\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 9, "score": 97782.63663719376 }, { "content": "pub fn new_mem_cache(config: &ArrayConfig) -> TestMemReader {\n\n let mut data = Vec::with_capacity(config.element_size as usize);\n\n data.resize_with(config.element_size as usize, Default::default);\n\n TestMemReader {\n\n mem: data,\n\n start_addr: 0x0,\n\n }\n\n}\n\n\n", "file_path": "memscanner/src/macro_helpers.rs", "rank": 11, "score": 95500.23569684051 }, { "content": "fn parse_any(input: &str) -> IResult<&str, Match> {\n\n value(Match::Any, tag(\"**\"))(input)\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 12, "score": 95072.11752301478 }, { "content": "pub fn read_enum<T: Sized + Default + FromPrimitive>(\n\n e: &mut T,\n\n mem: &dyn MemReader,\n\n addr: u64,\n\n) -> Result<(), Error> {\n\n match size_of_val(e) {\n\n 1 => {\n\n let v = mem\n\n .read_u8(addr)\n\n .ok_or(format_err!(\"Can't read at %0x{:x}\", addr))?;\n\n *e = T::from_u8(v).unwrap_or(Default::default());\n\n }\n\n 2 => {\n\n let v = mem\n\n .read_u16(addr)\n\n .ok_or(format_err!(\"Can't read at %0x{:x}\", addr))?;\n\n *e = T::from_u16(v).unwrap_or(Default::default());\n\n }\n\n 4 => {\n\n let v = mem\n", "file_path": "memscanner/src/macro_helpers.rs", "rank": 13, "score": 94057.00940825194 }, { "content": "fn parse_lea(input: &str) -> IResult<&str, Op> {\n\n let (input, _) = tag(\"asm(\")(input)?;\n\n let (input, pattern) = many1(parse_match)(input)?;\n\n let (input, _) = tag(\")\")(input)?;\n\n\n\n Ok((input, Op::Asm(pattern)))\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 14, "score": 93316.7917073768 }, { "content": "fn parse_ptr(input: &str) -> IResult<&str, Op> {\n\n let (input, _) = tag(\"ptr(\")(input)?;\n\n let (input, offset) = parse_i32(input)?;\n\n let (input, _) = tag(\")\")(input)?;\n\n\n\n Ok((input, Op::Ptr(offset)))\n\n}\n\n\n\npub(super) fn parse_op(input: &str) -> IResult<&str, Op> {\n\n alt((parse_lea, parse_ptr))(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use failure::Error;\n\n\n\n #[test]\n\n fn match_from_hext_test() -> Result<(), Error> {\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 15, "score": 93316.7917073768 }, { "content": "fn parse_literal(input: &str) -> IResult<&str, Match> {\n\n map_res(take(2usize), literal_from_hex)(input)\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 16, "score": 92539.12365302318 }, { "content": "pub fn update_mem_cache(\n\n mem: &dyn MemReader,\n\n cached_mem: &mut TestMemReader,\n\n base_addr: u64,\n\n len: u64,\n\n) -> Result<(), Error> {\n\n let read_len = mem.read(&mut cached_mem.mem, base_addr, len as usize);\n\n if read_len != len as usize {\n\n return Err(format_err!(\"could not read {} bytes\", len));\n\n }\n\n cached_mem.start_addr = base_addr;\n\n Ok(())\n\n}\n\n\n", "file_path": "memscanner/src/macro_helpers.rs", "rank": 17, "score": 89312.81470917 }, { "content": "pub fn get_array_base_addr(\n\n config: &ArrayConfig,\n\n base_addr: u64,\n\n index: usize,\n\n mem: &dyn MemReader,\n\n) -> Result<u64, Error> {\n\n Ok(match config.uses_pointer_table.unwrap_or(false) {\n\n false => base_addr + index as u64 * config.element_size,\n\n true => mem\n\n .read_u64(base_addr + index as u64 * 8)\n\n .ok_or(format_err! {\"Can't load pointer table index {}\", index})?,\n\n })\n\n}\n\n\n", "file_path": "memscanner/src/macro_helpers.rs", "rank": 18, "score": 87125.58054761571 }, { "content": "fn scannable_enum_impl(ctx: &mut Context, ast: &syn::DeriveInput) -> Option<TokenStream> {\n\n match &ast.data {\n\n syn::Data::Enum(_) => (),\n\n _ => {\n\n ctx.error_spanned_by(ast, \"#[derive(ScannableEnum)] is only supported on enums.\");\n\n return None;\n\n }\n\n };\n\n let name = &ast.ident;\n\n let code = quote! {\n\n impl memscanner::ScannableValue<#name> for #name {\n\n fn scan_val(&mut self, mem: &dyn memscanner::MemReader, addr: u64)\n\n -> Result<(), Error> {\n\n memscanner::macro_helpers::read_enum(self, mem, addr)\n\n }\n\n }\n\n };\n\n Some(code)\n\n}\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 19, "score": 79339.57006657161 }, { "content": "#[rustfmt::skip]\n\nfn match_signed_integer(input: &str) -> IResult<&str, &str> {\n\n recognize(pair(\n\n opt(\n\n alt((\n\n tag(\"-\"),\n\n tag(\"+\")\n\n ))\n\n ),\n\n digit1\n\n ))(input)\n\n}\n\n\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 20, "score": 76887.44487423102 }, { "content": "fn scannable_impl(ctx: &mut Context, ast: &syn::DeriveInput) -> Option<TokenStream> {\n\n let data = match &ast.data {\n\n syn::Data::Struct(ds) => ds,\n\n _ => {\n\n ctx.error_spanned_by(ast, \"#[derive(Scannable)] is only supported on structs.\");\n\n return None;\n\n }\n\n };\n\n let name = &ast.ident;\n\n let name_str = syn::LitStr::new(&format!(\"{}\", name), ast.ident.span());\n\n\n\n let mut offset_code = quote! {};\n\n let mut read_code = quote! {use memscanner::ScannableValue;};\n\n for f in data.fields.iter() {\n\n let ident = f.ident.as_ref().unwrap();\n\n\n\n // Construct new identifiers.\n\n let ident_str = syn::LitStr::new(&format!(\"{}\", ident), f.ty.span());\n\n let offset = format_ident!(\"{}_offset\", ident);\n\n\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 21, "score": 65425.39212618436 }, { "content": "#[proc_macro_derive(ScannableEnum)]\n\npub fn scannable_enum_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let mut ctx = Context::new();\n\n // Parse the input tokens into a syntax tree\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n let code = scannable_enum_impl(&mut ctx, &ast);\n\n\n\n match ctx.check() {\n\n Ok(_) => match code {\n\n Some(c) => {\n\n //eprintln!(\"code: {}\", c);\n\n proc_macro::TokenStream::from(c)\n\n }\n\n None => {\n\n return quote! {compile_error!(\"Unknown error with #[derive(Scannable)]\")}.into()\n\n }\n\n },\n\n Err(e) => Context::convert_to_compile_errors(e).into(),\n\n }\n\n}\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 22, "score": 64810.311475602975 }, { "content": "/// The `MemReader` trait allows for reading bytes form a memory source.\n\npub trait MemReader {\n\n /// Read bytes `len` bytes at `addr` from the `MemReader` and write them\n\n /// to `buf`. \n\n ///\n\n /// Returns: number of bytes actually read.\n\n fn read(&self, buf: &mut [u8], addr: u64, len: usize) -> usize;\n\n\n\n fn read_u8(&self, addr: u64) -> Option<u8> {\n\n let mut val: Vec<u8> = vec![0; 1];\n\n let read_bytes = self.read(&mut val, addr, 1);\n\n if read_bytes != 1 {\n\n return None;\n\n }\n\n Some(val[0])\n\n }\n\n\n\n fn read_string(&self, addr: u64) -> Option<String> {\n\n let string_limit = 32;\n\n let mut bytes: Vec<u8> = Vec::new();\n\n\n", "file_path": "memscanner/src/lib.rs", "rank": 23, "score": 61584.52912601361 }, { "content": "#[proc_macro_derive(Scannable)]\n\npub fn scannable_macro(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let mut ctx = Context::new();\n\n // Parse the input tokens into a syntax tree\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n let code = scannable_impl(&mut ctx, &ast);\n\n\n\n match ctx.check() {\n\n Ok(_) => match code {\n\n Some(c) => {\n\n //eprintln!(\"code: {}\", c);\n\n proc_macro::TokenStream::from(c)\n\n }\n\n None => {\n\n return quote! {compile_error!(\"Unknown error with #[derive(Scannable)]\")}.into()\n\n }\n\n },\n\n Err(e) => Context::convert_to_compile_errors(e).into(),\n\n }\n\n}\n\n\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 24, "score": 50429.36324295899 }, { "content": " assert_eq!(parse_match(\"**\"), Ok((\"\", Match::Any)));\n\n assert_eq!(parse_match(\"^^\"), Ok((\"\", Match::Position)));\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn parse_i32_test() -> Result<(), Error> {\n\n let ints = vec![i32::min_value(), i32::max_value(), 0];\n\n for i in ints {\n\n assert_eq!(parse_i32(&format!(\"{}\", i)), Ok((\"\", i)));\n\n }\n\n\n\n assert_eq!(\n\n parse_i32(\"a1\"),\n\n Err(nom::Err::Error((\"a1\", nom::error::ErrorKind::Digit)))\n\n );\n\n assert_eq!(parse_i32(\"1a\"), Ok((\"a\", 1)));\n\n\n\n Ok(())\n\n }\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 29, "score": 46127.89395908771 }, { "content": "\n\n #[test]\n\n fn parse_ptr_test() -> Result<(), Error> {\n\n assert_eq!(parse_ptr(\"ptr(-1)\"), Ok((\"\", Op::Ptr(-1))));\n\n assert_eq!(parse_ptr(\"ptr(8)\"), Ok((\"\", Op::Ptr(8))));\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn parse_pattern_test() -> Result<(), Error> {\n\n assert_eq!(\n\n parse_op(\"asm(01234567********^^^^^^^^89abcdef)\"),\n\n Ok((\n\n \"\",\n\n Op::Asm(vec![\n\n Match::Literal(0x01),\n\n Match::Literal(0x23),\n\n Match::Literal(0x45),\n\n Match::Literal(0x67),\n\n Match::Any,\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 30, "score": 46127.66432426034 }, { "content": " Err(nom::Err::Error((\"ab\", nom::error::ErrorKind::Tag)))\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn parse_position_test() -> Result<(), Error> {\n\n assert_eq!(parse_position(\"^^\"), Ok((\"\", Match::Position)));\n\n assert_eq!(\n\n parse_position(\"**\"),\n\n Err(nom::Err::Error((\"**\", nom::error::ErrorKind::Tag)))\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn parse_match_test() -> Result<(), Error> {\n\n assert_eq!(parse_match(\"ab\"), Ok((\"\", Match::Literal(0xab))));\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 31, "score": 46125.915433016926 }, { "content": " let m = literal_from_hex(\"00\")?;\n\n assert_eq!(m, Match::Literal(0x00));\n\n\n\n let m = literal_from_hex(\"5a\")?;\n\n assert_eq!(m, Match::Literal(0x5a));\n\n\n\n let m = literal_from_hex(\"a5\")?;\n\n assert_eq!(m, Match::Literal(0xa5));\n\n\n\n let m = literal_from_hex(\"ff\")?;\n\n assert_eq!(m, Match::Literal(0xff));\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn parse_any_test() -> Result<(), Error> {\n\n assert_eq!(parse_any(\"**\"), Ok((\"\", Match::Any)));\n\n assert_eq!(\n\n parse_any(\"ab\"),\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 32, "score": 46121.97824878976 }, { "content": "use nom::{\n\n branch::alt,\n\n bytes::complete::{tag, take},\n\n character::complete::digit1,\n\n combinator::{map_res, opt, recognize, value},\n\n multi::many1,\n\n sequence::pair,\n\n IResult,\n\n};\n\n\n\nuse super::{Match, Op};\n\n\n\n#[rustfmt::skip]\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 33, "score": 46120.501637877845 }, { "content": " Match::Any,\n\n Match::Any,\n\n Match::Any,\n\n Match::Position,\n\n Match::Position,\n\n Match::Position,\n\n Match::Position,\n\n Match::Literal(0x89),\n\n Match::Literal(0xab),\n\n Match::Literal(0xcd),\n\n Match::Literal(0xef),\n\n ])\n\n ))\n\n );\n\n Ok(())\n\n }\n\n}\n", "file_path": "memscanner/src/signature/parser.rs", "rank": 34, "score": 46119.45843557183 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TypeConfigIntermediate {\n\n signature: Vec<String>,\n\n array: Option<ArrayConfig>,\n\n fields: HashMap<String, u64>,\n\n}\n\n\n\n/// A configuration describing how to find a piece of memory and map it to\n\n/// a struct.\n\n#[derive(Clone, Debug)]\n\npub struct TypeConfig {\n\n // TODO: Implement a custom deserializer for that type which parses the strings.\n\n // this will avoid the need for the intermediate type above.\n\n pub signature: signature::Signature,\n\n pub array: Option<ArrayConfig>,\n\n pub fields: HashMap<String, u64>,\n\n}\n\n\n\nimpl TypeConfig {\n\n /// Read a json5 config.\n\n pub fn new(reader: &mut impl Read) -> Result<TypeConfig, Error> {\n", "file_path": "memscanner/src/lib.rs", "rank": 35, "score": 43505.28621080489 }, { "content": "/// A struct that can be scanned with `memscanner`.\n\n///\n\n/// This is normally implemented through the `#[derive(Scannable)]` macro.\n\npub trait Scannable\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n /// Returns a `Resolver` capable of finding the `Scannable` described by\n\n /// the `config`.\n\n fn get_resolver(config: TypeConfig) -> Result<Box<Resolver<Self>>, Error>;\n\n /// Returns a `Resolver` capable of finding the `Scannable` described by\n\n /// the `config`. This `Scannable` will read into a Vec.\n\n fn get_array_resolver(config: TypeConfig) -> Result<Box<ArrayResolver<Self>>, Error>;\n\n}\n\n\n", "file_path": "memscanner/src/lib.rs", "rank": 36, "score": 40199.59273017684 }, { "content": "/// A value that can be scanned as a member of a `Scannable` struct.\n\npub trait ScannableValue<T> {\n\n /// Scans the value at `addr` using `mem` to read its value.\n\n fn scan_val(&mut self, mem: &dyn MemReader, addr: u64) -> Result<(), Error>;\n\n}\n\n\n\n// A macro to generate implementations of ScannableValue for types that have\n\n// direct MemReader readers.\n\nmacro_rules! scannable_value_impl {\n\n ($type: ty, $func_name: tt) => {\n\n impl ScannableValue<$type> for $type {\n\n fn scan_val(&mut self, mem: &dyn MemReader, addr: u64) -> Result<(), Error> {\n\n use failure::format_err;\n\n *self = mem\n\n .$func_name(addr)\n\n .ok_or(format_err!(\"can't read value\"))?;\n\n Ok(())\n\n }\n\n }\n\n };\n\n}\n", "file_path": "memscanner/src/lib.rs", "rank": 37, "score": 36952.83021638907 }, { "content": "use super::MemReader;\n\n\n\n/// A `MemReader` implementation that is backed by a buffer. Useful for\n\n/// writing tests.\n\npub struct TestMemReader {\n\n pub mem: Vec<u8>,\n\n pub start_addr: u64,\n\n}\n\n\n\nimpl MemReader for TestMemReader {\n\n fn read(&self, buf: &mut [u8], addr: u64, len: usize) -> usize {\n\n let index = (addr - self.start_addr) as usize;\n\n let read_len = if index + len > self.mem.len() {\n\n self.mem.len() - index\n\n } else {\n\n len\n\n };\n\n\n\n buf.copy_from_slice(&self.mem[index..(index + read_len)]);\n\n\n\n read_len\n\n }\n\n}\n", "file_path": "memscanner/src/test.rs", "rank": 38, "score": 24700.521024733316 }, { "content": "#[cfg(windows)]\n\npub mod win;\n\n\n\n#[cfg(windows)]\n\npub use win::Process;\n", "file_path": "memscanner/src/process/mod.rs", "rank": 39, "score": 23798.27771307223 }, { "content": " Ok(())\n\n }\n\n\n\n #[test]\n\n fn enum_test() -> Result<(), Error> {\n\n let config = get_enum_test_type_config();\n\n let mem = get_test_mem_reader();\n\n\n\n let resolver = EnumTestObject::get_resolver(config)?;\n\n let scanner = resolver(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64)?;\n\n\n\n let mut obj: EnumTestObject = Default::default();\n\n scanner(&mut obj, &mem)?;\n\n assert_eq!(obj.e, TestEnum::Value88);\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "memscanner_test/src/lib.rs", "rank": 40, "score": 23564.914145473696 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use memscanner::test::TestMemReader;\n\n use memscanner::{Scannable, ScannableEnum, Signature, TypeConfig};\n\n\n\n use failure::{format_err, Error};\n\n use num_derive::FromPrimitive;\n\n\n\n #[derive(Debug, Default, Scannable)]\n\n struct TestObject {\n\n value1: u8,\n\n value2: u32,\n\n }\n\n\n\n #[derive(Debug, Default, Scannable)]\n\n struct StringTestObject {\n\n s: String,\n\n }\n\n\n\n #[repr(u8)]\n", "file_path": "memscanner_test/src/lib.rs", "rank": 41, "score": 23563.6913203873 }, { "content": " Ok(())\n\n }\n\n\n\n #[test]\n\n fn array_test() -> Result<(), Error> {\n\n let config = get_array_test_type_config();\n\n let mem = get_array_test_mem_reader();\n\n\n\n println!(\"{:?}\", &config);\n\n let resolver = TestObject::get_array_resolver(config)?;\n\n let scanner = resolver(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64)?;\n\n\n\n let mut obj = Vec::new();\n\n scanner(&mut obj, &mem)?;\n\n println!(\"{:?}\", &obj);\n\n assert_eq!(obj[0].value1, 0x88);\n\n assert_eq!(obj[0].value2, 0xffeeddcc);\n\n assert_eq!(obj[1].value1, 0x00);\n\n assert_eq!(obj[1].value2, 0x77665544);\n\n\n", "file_path": "memscanner_test/src/lib.rs", "rank": 42, "score": 23563.042356629463 }, { "content": " #[test]\n\n fn type_config_test() {\n\n let config = get_test_type_config();\n\n assert_eq!(\n\n config.signature,\n\n Signature::new(&vec![\"asm(00112233^^^^^^^^********)\".to_string()]).unwrap()\n\n );\n\n assert_eq!(\n\n config.fields,\n\n [(\"value1\".to_string(), 0u64), (\"value2\".to_string(), 4u64)]\n\n .iter()\n\n .cloned()\n\n .collect()\n\n );\n\n }\n\n\n\n #[test]\n\n fn object_test() -> Result<(), Error> {\n\n let config = get_test_type_config();\n\n let mem = get_test_mem_reader();\n", "file_path": "memscanner_test/src/lib.rs", "rank": 43, "score": 23563.037637346377 }, { "content": " #[derive(Debug, FromPrimitive, PartialEq, ScannableEnum)]\n\n enum TestEnum {\n\n Unknown = 0,\n\n Value1 = 1,\n\n Value88 = 0x88,\n\n }\n\n\n\n impl Default for TestEnum {\n\n fn default() -> Self {\n\n TestEnum::Unknown\n\n }\n\n }\n\n\n\n #[derive(Debug, Default, Scannable)]\n\n struct EnumTestObject {\n\n e: TestEnum,\n\n }\n\n\n\n fn get_test_mem_reader() -> TestMemReader {\n\n #[rustfmt::skip]\n", "file_path": "memscanner_test/src/lib.rs", "rank": 44, "score": 23562.70004389221 }, { "content": "\n\n let resolver = TestObject::get_resolver(config)?;\n\n let scanner = resolver(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64)?;\n\n\n\n let mut obj: TestObject = Default::default();\n\n scanner(&mut obj, &mem)?;\n\n assert_eq!(obj.value1, 0x88);\n\n assert_eq!(obj.value2, 0xffeeddcc);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn string_test() -> Result<(), Error> {\n\n let config = get_string_test_type_config();\n\n let mem = get_string_test_mem_reader();\n\n\n\n let resolver = StringTestObject::get_resolver(config)?;\n\n let scanner = resolver(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64)?;\n\n\n", "file_path": "memscanner_test/src/lib.rs", "rank": 45, "score": 23562.591854708564 }, { "content": " let mut obj: StringTestObject = Default::default();\n\n scanner(&mut obj, &mem)?;\n\n assert_eq!(obj.s, \"Memscanner is best scanner!\");\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn string_limit_test() -> Result<(), Error> {\n\n let config = get_string_test_type_config();\n\n let mem = get_string_limit_test_mem_reader();\n\n\n\n let resolver = StringTestObject::get_resolver(config)?;\n\n let scanner = resolver(&mem, mem.start_addr, mem.start_addr + mem.mem.len() as u64)?;\n\n\n\n let mut obj: StringTestObject = Default::default();\n\n scanner(&mut obj, &mem)?;\n\n assert_eq!(obj.s.len(), 32);\n\n assert_eq!(obj.s, \"Memscanner is best scanner! Mem\");\n\n\n", "file_path": "memscanner_test/src/lib.rs", "rank": 46, "score": 23562.140107676238 }, { "content": " {\n\n signature: [\\\"asm(00112233^^^^^^^^********)\\\"],\n\n fields: {\n\n s: 0x0,\n\n }\n\n }\"\n\n .as_bytes();\n\n TypeConfig::new(&mut text).unwrap()\n\n }\n\n\n\n fn get_enum_test_type_config() -> TypeConfig {\n\n let mut text = \"\n\n {\n\n signature: [\\\"asm(00112233^^^^^^^^********)\\\"],\n\n fields: {\n\n e: 0x0,\n\n }\n\n }\"\n\n .as_bytes();\n\n TypeConfig::new(&mut text).unwrap()\n", "file_path": "memscanner_test/src/lib.rs", "rank": 47, "score": 23557.451131456783 }, { "content": " start_addr: 0x1000,\n\n };\n\n r\n\n }\n\n\n\n fn get_test_type_config() -> TypeConfig {\n\n let mut text = \"\n\n {\n\n signature: [\\\"asm(00112233^^^^^^^^********)\\\"],\n\n fields: {\n\n value1: 0x0,\n\n value2: 0x4,\n\n }\n\n }\"\n\n .as_bytes();\n\n TypeConfig::new(&mut text).unwrap()\n\n }\n\n\n\n fn get_string_test_type_config() -> TypeConfig {\n\n let mut text = \"\n", "file_path": "memscanner_test/src/lib.rs", "rank": 48, "score": 23556.942990946696 }, { "content": " }\n\n\n\n fn get_array_test_type_config() -> TypeConfig {\n\n let mut text = \"\n\n {\n\n signature: [\\\"asm(00112233^^^^^^^^********)\\\"],\n\n array: {\n\n element_size: 8,\n\n element_count: 2,\n\n uses_pointer_table: true,\n\n },\n\n fields: {\n\n value1: 0x0,\n\n value2: 0x4,\n\n }\n\n }\"\n\n .as_bytes();\n\n TypeConfig::new(&mut text).unwrap()\n\n }\n\n\n", "file_path": "memscanner_test/src/lib.rs", "rank": 49, "score": 23556.247292787346 }, { "content": " let r = TestMemReader {\n\n mem: vec![\n\n 0xff, 0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33,\n\n 0x04, 0x00, 0x00, 0x00, 0x44, 0x55, 0x66, 0x77,\n\n 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff,\n\n ],\n\n start_addr: 0x1000,\n\n };\n\n r\n\n }\n\n\n\n fn get_string_limit_test_mem_reader() -> TestMemReader {\n\n #[rustfmt::skip]\n\n let r = TestMemReader {\n\n mem: vec![\n\n 0xff, 0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33,\n\n 0x04, 0x00, 0x00, 0x00, 0x44, 0x55, 0x66, 0x77,\n\n 0x4D, 0x65, 0x6D, 0x73, 0x63, 0x61, 0x6E, 0x6E,\n\n 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x62, 0x65,\n\n 0x73, 0x74, 0x20, 0x73, 0x63, 0x61, 0x6E, 0x6E,\n", "file_path": "memscanner_test/src/lib.rs", "rank": 50, "score": 23555.625033176646 }, { "content": " 0x73, 0x63, 0x61, 0x6E, 0x6E, 0x65, 0x72, 0x20,\n\n 0x69, 0x73, 0x20, 0x62, 0x65, 0x73, 0x74, 0x20,\n\n 0x73, 0x63, 0x61, 0x6E, 0x6E, 0x65, 0x72, 0x21,\n\n ],\n\n start_addr: 0x1000,\n\n };\n\n r\n\n }\n\n\n\n fn get_array_test_mem_reader() -> TestMemReader {\n\n #[rustfmt::skip]\n\n let r = TestMemReader {\n\n mem: vec![\n\n 0xff, 0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33, // 0x1000\n\n 0x04, 0x00, 0x00, 0x00, 0x44, 0x55, 0x66, 0x77, // 0x1008\n\n 0x28, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x1010\n\n 0x20, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // 0x1018\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, // 0x1020\n\n 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, // 0x1028\n\n ],\n", "file_path": "memscanner_test/src/lib.rs", "rank": 51, "score": 23554.263455377783 }, { "content": " 0x65, 0x72, 0x21, 0x20, 0x20, 0x4D, 0x65, 0x6D,\n\n 0x73, 0x63, 0x61, 0x6E, 0x6E, 0x65, 0x72, 0x20,\n\n 0x69, 0x73, 0x20, 0x62, 0x65, 0x73, 0x74, 0x20,\n\n 0x73, 0x63, 0x61, 0x6E, 0x6E, 0x65, 0x72, 0x21,\n\n ],\n\n start_addr: 0x1000,\n\n };\n\n r\n\n }\n\n\n\n fn get_string_test_mem_reader() -> TestMemReader {\n\n #[rustfmt::skip]\n\n let r = TestMemReader {\n\n mem: vec![\n\n 0xff, 0xff, 0xff, 0xff, 0x00, 0x11, 0x22, 0x33,\n\n 0x04, 0x00, 0x00, 0x00, 0x44, 0x55, 0x66, 0x77,\n\n 0x4D, 0x65, 0x6D, 0x73, 0x63, 0x61, 0x6E, 0x6E,\n\n 0x65, 0x72, 0x20, 0x69, 0x73, 0x20, 0x62, 0x65,\n\n 0x73, 0x74, 0x20, 0x73, 0x63, 0x61, 0x6E, 0x6E,\n\n 0x65, 0x72, 0x21, 0x00, 0x20, 0x4D, 0x65, 0x6D,\n", "file_path": "memscanner_test/src/lib.rs", "rank": 52, "score": 23553.941325645643 }, { "content": "pub mod macro_helpers;\n\npub mod process;\n\npub mod signature;\n\npub mod test;\n\n\n\nuse failure::Error;\n\nuse json5;\n\nuse serde::Deserialize;\n\nuse std::collections::HashMap;\n\nuse std::convert::TryInto;\n\nuse std::io::Read;\n\n\n\npub use memscanner_derive::{Scannable, ScannableEnum};\n\npub use signature::Signature;\n\n\n\nmacro_rules! read_type_impl {\n\n ($type: ty, $func_name: tt) => {\n\n fn $func_name (&self, addr: u64) -> Option<$type> {\n\n let len = std::mem::size_of::<$type>();\n\n let mut buf: Vec<u8> = vec![0; len];\n", "file_path": "memscanner/src/lib.rs", "rank": 53, "score": 22.677569636175967 }, { "content": "///\n\n/// Returns a `Scanner` for reading the `Scannable`\n\npub type Resolver<T> = dyn Fn(&dyn MemReader, u64, u64) -> Result<Box<Scanner<T>>, Error>;\n\n\n\npub type ArrayResolver<T> = dyn Fn(&dyn MemReader, u64, u64) -> Result<Box<ArrayScanner<T>>, Error>;\n\n/// A function capable of reading a `Scannable` from a `MemReader`\n\n///\n\n/// # Arguments\n\n/// * `obj` - Scannable to update.\n\n/// * `mem_reader` - The `MemReader` to use to resolve the `Scannable`.\n\npub type Scanner<T> = dyn Fn(&mut T, &dyn MemReader) -> Result<(), Error>;\n\n\n\npub type ArrayScanner<T> = dyn Fn(&mut Vec<T>, &dyn MemReader) -> Result<(), Error>;\n", "file_path": "memscanner/src/lib.rs", "rank": 54, "score": 20.93840100761348 }, { "content": "\n\n let scanner = move |vec: &mut Vec<#name>, mem: &dyn memscanner::MemReader|\n\n -> Result<(), failure::Error> {\n\n use std::ops::IndexMut;\n\n use memscanner::MemReader;\n\n use memscanner::test::TestMemReader;\n\n use memscanner::macro_helpers::*;\n\n\n\n // This requires that the type implement Default.\n\n vec.resize_with(array_config.element_count as usize,\n\n Default::default);\n\n\n\n let mut cached_mem = new_mem_cache(&array_config);\n\n\n\n for i in 0..(array_config.element_count as usize){\n\n let obj = vec.index_mut(i);\n\n let base_addr = get_array_base_addr(\n\n &array_config,\n\n base_addr,\n\n i,\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 55, "score": 17.980479121874648 }, { "content": " for i in 0..string_limit {\n\n let b = self.read_u8(addr + i)?;\n\n if b == 0x0 {\n\n break;\n\n }\n\n bytes.push(b);\n\n }\n\n\n\n Some(String::from_utf8_lossy(&bytes).to_string())\n\n }\n\n\n\n read_type_impl!(u16, read_u16);\n\n read_type_impl!(i16, read_i16);\n\n read_type_impl!(u32, read_u32);\n\n read_type_impl!(i32, read_i32);\n\n read_type_impl!(u64, read_u64);\n\n read_type_impl!(i64, read_i64);\n\n\n\n read_float_impl!(f32, u32, read_f32);\n\n read_float_impl!(f64, u64, read_f64);\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize)]\n\npub struct ArrayConfig {\n\n pub element_size: u64,\n\n pub element_count: u64,\n\n pub uses_pointer_table: Option<bool>,\n\n}\n\n\n", "file_path": "memscanner/src/lib.rs", "rank": 56, "score": 16.92606746934683 }, { "content": " self.base_contents = buf;\n\n Ok(())\n\n }\n\n\n\n /// Unloads the cached copy, if any, of the process' BaseModule.\n\n pub fn unload_base(&mut self) {\n\n self.base_contents = Box::new(Vec::new());\n\n }\n\n}\n\n\n\nimpl Drop for Process {\n\n fn drop(&mut self) {\n\n unsafe {\n\n CloseHandle(self.handle);\n\n }\n\n }\n\n}\n\n\n\nimpl MemReader for Process {\n\n fn read(&self, buf: &mut [u8], addr: u64, len: usize) -> usize {\n", "file_path": "memscanner/src/process/win.rs", "rank": 57, "score": 15.270026875835468 }, { "content": "use super::test::TestMemReader;\n\nuse super::ArrayConfig;\n\nuse super::MemReader;\n\nuse failure::{format_err, Error};\n\nuse num_traits::FromPrimitive;\n\n\n\nuse std::mem::size_of_val;\n\n\n", "file_path": "memscanner/src/macro_helpers.rs", "rank": 58, "score": 14.779303188022208 }, { "content": "\n\n fn get_resolver(config: memscanner::TypeConfig) -> Result<Box<memscanner::Resolver<Self>>, failure::Error> {\n\n #offset_code\n\n\n\n let resolver = move |mem: &dyn memscanner::MemReader,\n\n start_addr: u64,\n\n end_addr: u64|\n\n -> Result<Box<memscanner::Scanner<Self>>, failure::Error> {\n\n let base_addr = config\n\n .signature\n\n .resolve(mem, start_addr, end_addr)\n\n .ok_or(format_err! {\"Can't resolve base address\"})?;\n\n\n\n let scanner = move |obj: &mut Self, mem: &dyn memscanner::MemReader| -> Result<(), failure::Error> {\n\n #read_code\n\n Ok(())\n\n };\n\n Ok(Box::new(scanner))\n\n };\n\n Ok(Box::new(resolver))\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 59, "score": 14.22795085932804 }, { "content": " let read_bytes = self.read(&mut buf, addr, len);\n\n if read_bytes != len {\n\n return None;\n\n }\n\n Some(<$type>::from_ne_bytes((&buf as &[u8]).try_into().ok()?))\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! read_float_impl {\n\n ($type: ty, $int_type: ty, $func_name: tt) => {\n\n fn $func_name (&self, addr: u64) -> Option<$type> {\n\n let len = std::mem::size_of::<$type>();\n\n let mut buf: Vec<u8> = vec![0; len];\n\n let read_bytes = self.read(&mut buf, addr, len);\n\n if read_bytes != len {\n\n return None;\n\n }\n\n Some(<$type>::from_bits(<$int_type>::from_ne_bytes((&buf as &[u8]).try_into().ok()?)))\n\n }\n\n }\n\n}\n\n\n\n/// The `MemReader` trait allows for reading bytes form a memory source.\n", "file_path": "memscanner/src/lib.rs", "rank": 60, "score": 14.204991201480748 }, { "content": " }\n\n None\n\n }\n\n }\n\n\n\n pub fn read_memory(&self, buf: &mut [u8], addr: LPVOID, len: usize) -> usize {\n\n let read_len = if buf.len() < len { buf.len() } else { len };\n\n\n\n let mut bytes_read: usize = 0;\n\n\n\n let success = unsafe {\n\n memoryapi::ReadProcessMemory(\n\n self.handle,\n\n addr,\n\n buf.as_mut_ptr() as LPVOID,\n\n read_len as SIZE_T,\n\n &mut bytes_read as *mut SIZE_T,\n\n )\n\n };\n\n if success == TRUE {\n", "file_path": "memscanner/src/process/win.rs", "rank": 61, "score": 12.81987299207272 }, { "content": " let mut buffer = String::new();\n\n reader.read_to_string(&mut buffer)?;\n\n\n\n let inter: TypeConfigIntermediate = json5::from_str(&buffer)?;\n\n let sig = signature::Signature::new(&inter.signature)?;\n\n\n\n Ok(TypeConfig {\n\n signature: sig,\n\n array: inter.array,\n\n fields: inter.fields,\n\n })\n\n }\n\n}\n\n\n\n/// A function capable of resolving the location of a `Scannable`.\n\n///\n\n/// # Arguments\n\n/// * `mem_reader` - The `MemReader` to use to resolve the `Scannable`.\n\n/// * `start_addr` - Address to start the resolution at.\n\n/// * `end_addr` - Address to end the resolution at. (non-inclusive)\n", "file_path": "memscanner/src/lib.rs", "rank": 62, "score": 12.38161093042318 }, { "content": " }\n\n\n\n fn get_array_resolver(config: memscanner::TypeConfig)\n\n -> Result<Box<memscanner::ArrayResolver<#name>>, failure::Error> {\n\n use failure::format_err;\n\n let array_config = config.array\n\n .as_ref()\n\n .ok_or(format_err!(\"Can't create resolver for Vec<{}>: no array config.\", #name_str))?.clone();\n\n\n\n #offset_code\n\n\n\n let resolver = move |mem: &dyn memscanner::MemReader,\n\n start_addr: u64,\n\n end_addr: u64|\n\n -> Result<Box<memscanner::ArrayScanner<Self>>, failure::Error> {\n\n let base_addr = config\n\n .signature\n\n .resolve(mem, start_addr, end_addr)\n\n .ok_or(format_err! {\"Can't resolve base address\"})?;\n\n let array_config = array_config.clone();\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 63, "score": 12.36617346172887 }, { "content": "\n\nscannable_value_impl!(String, read_string);\n\n\n\nscannable_value_impl!(u8, read_u8);\n\nscannable_value_impl!(u16, read_u16);\n\nscannable_value_impl!(i16, read_i16);\n\nscannable_value_impl!(u32, read_u32);\n\nscannable_value_impl!(i32, read_i32);\n\nscannable_value_impl!(u64, read_u64);\n\nscannable_value_impl!(i64, read_i64);\n\n\n\nscannable_value_impl!(f32, read_f32);\n\nscannable_value_impl!(f64, read_f64);\n", "file_path": "memscanner/src/lib.rs", "rank": 64, "score": 12.174056325162187 }, { "content": " // enforced.\n\n errors: Option<Vec<syn::Error>>,\n\n}\n\n\n\nimpl Context {\n\n /// Create a new context object.\n\n ///\n\n /// This object contains no errors, but will still trigger a panic if it is not `check`ed.\n\n pub fn new() -> Self {\n\n Context {\n\n errors: Some(Vec::new()),\n\n }\n\n }\n\n\n\n /// Add an error to the context object with a tokenenizable object.\n\n ///\n\n /// The object is used for spanning in error messages.\n\n pub fn error_spanned_by<A: ToTokens, T: Display>(&mut self, obj: A, msg: T) {\n\n self.errors\n\n .as_mut()\n", "file_path": "memscanner_derive/src/context.rs", "rank": 65, "score": 12.010246333453553 }, { "content": " bytes_read\n\n } else {\n\n 0\n\n }\n\n }\n\n\n\n /// Load a cached copy of the process' BaseModule.\n\n ///\n\n /// This allows for much faster resolving of `Signature`s\n\n pub fn load_base(&mut self) -> Result<(), Error> {\n\n let mut buf = Box::new(vec![0; self.base_size]);\n\n\n\n let read_size = self.read_memory(&mut buf, self.base_addr, self.base_size);\n\n if read_size != self.base_size {\n\n return Err(format_err!(\n\n \"only read {} bytes of {}.\",\n\n read_size,\n\n self.base_size\n\n ));\n\n }\n", "file_path": "memscanner/src/process/win.rs", "rank": 66, "score": 11.822517891637142 }, { "content": "use super::super::MemReader;\n\nuse failure::{format_err, Error};\n\nuse std::ffi::CStr;\n\nuse std::mem::size_of;\n\nuse winapi::shared::basetsd::SIZE_T;\n\nuse winapi::shared::minwindef::{DWORD, FALSE, HMODULE, LPVOID, MAX_PATH, TRUE};\n\nuse winapi::shared::ntdef::{HANDLE, NULL};\n\nuse winapi::um::handleapi::CloseHandle;\n\nuse winapi::um::memoryapi;\n\nuse winapi::um::processthreadsapi;\n\nuse winapi::um::psapi;\n\nuse winapi::um::winnt::{PROCESS_QUERY_INFORMATION, PROCESS_VM_READ};\n\n\n\npub struct Process {\n\n handle: HANDLE,\n\n name: String,\n\n\n\n pub base_addr: LPVOID,\n\n pub base_size: usize,\n\n entry_point: LPVOID,\n", "file_path": "memscanner/src/process/win.rs", "rank": 67, "score": 11.718667446983211 }, { "content": " let mut procs: Vec<DWORD> = vec![0; 1024];\n\n let mut cb_needed: DWORD = 0;\n\n\n\n psapi::EnumProcesses(\n\n procs.as_mut_ptr(),\n\n (procs.len() * size_of::<DWORD>()) as u32,\n\n &mut cb_needed as *mut DWORD,\n\n );\n\n\n\n let num_procs = cb_needed as usize / size_of::<DWORD>();\n\n\n\n for i in 0..num_procs {\n\n let process = match Process::open_by_pid(procs[i]) {\n\n Some(p) => p,\n\n None => continue,\n\n };\n\n\n\n if process.name == name {\n\n return Some(process);\n\n }\n", "file_path": "memscanner/src/process/win.rs", "rank": 68, "score": 10.521522822131374 }, { "content": " .unwrap()\n\n // Curb monomorphization from generating too many identical methods.\n\n .push(syn::Error::new_spanned(obj.into_token_stream(), msg));\n\n }\n\n\n\n /// Consume this object, producing a formatted error string if there are errors.\n\n pub fn check(mut self) -> Result<(), Vec<syn::Error>> {\n\n let errors = self.errors.take().unwrap();\n\n match errors.len() {\n\n 0 => Ok(()),\n\n _ => Err(errors),\n\n }\n\n }\n\n\n\n pub fn convert_to_compile_errors(errors: Vec<syn::Error>) -> proc_macro2::TokenStream {\n\n let compile_errors = errors.iter().map(syn::Error::to_compile_error);\n\n quote!(#(#compile_errors)*)\n\n }\n\n}\n\n\n\nimpl Drop for Context {\n\n fn drop(&mut self) {\n\n if !thread::panicking() && self.errors.is_some() {\n\n panic!(\"forgot to check for errors\");\n\n }\n\n }\n\n}\n", "file_path": "memscanner_derive/src/context.rs", "rank": 69, "score": 10.291197515692593 }, { "content": " // The code that looks up the field's offset in the config and saves it.\n\n offset_code.extend(quote! {\n\n let #offset = config\n\n .fields\n\n .get(#ident_str)\n\n .ok_or(format_err!(\"{} field offset not found\", #ident_str))?\n\n .clone();\n\n });\n\n\n\n // The code that reads the field's value and stores it in the object.\n\n read_code.extend(quote! {\n\n obj.#ident.scan_val(mem, #offset + base_addr)\n\n .map_err(|e| format_err!(\"can't read {}: {}\", #ident_str, e))?;\n\n });\n\n }\n\n\n\n // Resolver and Scanner are implemented as closures so that the we can\n\n // store the config and offsets without leading new types.\n\n let code = quote! {\n\n impl memscanner::Scannable for #name {\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 70, "score": 9.390462148681621 }, { "content": " .read_u32(addr)\n\n .ok_or(format_err!(\"Can't read at %0x{:x}\", addr))?;\n\n *e = T::from_u32(v).unwrap_or(Default::default());\n\n }\n\n 8 => {\n\n let v = mem\n\n .read_u64(addr)\n\n .ok_or(format_err!(\"Can't read at %0x{:x}\", addr))?;\n\n *e = T::from_u64(v).unwrap_or(Default::default());\n\n }\n\n s => return Err(format_err!(\"Unsupported enums of size {}.\", s)),\n\n };\n\n Ok(())\n\n}\n", "file_path": "memscanner/src/macro_helpers.rs", "rank": 71, "score": 9.19867145829204 }, { "content": " let base_addr = self.base_addr as u64;\n\n if addr >= base_addr {\n\n let start_index = (addr - base_addr) as usize;\n\n let end_index = start_index + len - 1;\n\n if end_index < self.base_contents.len() {\n\n buf.copy_from_slice(&self.base_contents[start_index..=end_index]);\n\n return len;\n\n }\n\n }\n\n self.read_memory(buf, addr as LPVOID, len)\n\n }\n\n}\n", "file_path": "memscanner/src/process/win.rs", "rank": 72, "score": 7.6569196647568605 }, { "content": " let success = psapi::GetModuleInformation(\n\n proc_handle,\n\n module,\n\n &mut info as *mut psapi::MODULEINFO,\n\n size_of::<psapi::MODULEINFO>() as DWORD,\n\n );\n\n if success == FALSE {\n\n return None;\n\n }\n\n\n\n proc.base_addr = info.lpBaseOfDll;\n\n proc.base_size = info.SizeOfImage as usize;\n\n proc.entry_point = info.EntryPoint;\n\n\n\n Some(proc)\n\n }\n\n }\n\n\n\n pub fn open_by_name(name: &str) -> Option<Process> {\n\n unsafe {\n", "file_path": "memscanner/src/process/win.rs", "rank": 73, "score": 7.365934165352259 }, { "content": "// This is derived from serede's context object. The Refcell is removed\n\n// in favor of passing mutable references.\n\n//\n\n// Serde code licenced under dual MIT/Apache-2 license. See:\n\n// https://github.com/serde-rs/serde\n\n\n\nuse proc_macro2;\n\nuse quote::{quote, ToTokens};\n\nuse std::fmt::Display;\n\nuse std::thread;\n\nuse syn;\n\n\n\n/// A type to collect errors together and format them.\n\n///\n\n/// Dropping this object will cause a panic. It must be consumed using `check`.\n\n///\n\n/// References can be shared since this type uses run-time exclusive mut checking.\n\n#[derive(Default)]\n\npub struct Context {\n\n // The contents will be set to `None` during checking. This is so that checking can be\n", "file_path": "memscanner_derive/src/context.rs", "rank": 74, "score": 7.086015978504279 }, { "content": " base_contents: Box::new(Vec::new()),\n\n };\n\n\n\n if success == FALSE {\n\n return None;\n\n }\n\n\n\n // Read process name\n\n let mut raw_name: Vec<i8> = vec![0; MAX_PATH];\n\n psapi::GetModuleBaseNameA(\n\n proc_handle,\n\n module,\n\n raw_name.as_mut_ptr(),\n\n raw_name.len() as DWORD,\n\n );\n\n let name = CStr::from_ptr(raw_name.as_ptr()).to_string_lossy();\n\n proc.name = name.into_owned();\n\n\n\n let mut info: psapi::MODULEINFO = Default::default();\n\n\n", "file_path": "memscanner/src/process/win.rs", "rank": 75, "score": 7.0512954892497905 }, { "content": " return None;\n\n }\n\n\n\n //\n\n let mut module: HMODULE = std::ptr::null_mut();\n\n let mut cb_needed: DWORD = 0;\n\n let success = psapi::EnumProcessModules(\n\n proc_handle,\n\n &mut module as *mut HMODULE,\n\n size_of::<HMODULE>() as DWORD,\n\n &mut cb_needed as *mut DWORD,\n\n );\n\n\n\n // From here on, handle will automatically close.\n\n let mut proc = Process {\n\n handle: proc_handle,\n\n name: \"\".to_string(),\n\n base_addr: NULL,\n\n base_size: 0,\n\n entry_point: NULL,\n", "file_path": "memscanner/src/process/win.rs", "rank": 76, "score": 7.038057861439894 }, { "content": "\n\n base_contents: Box<Vec<u8>>,\n\n}\n\n\n\nimpl Process {\n\n pub fn open_by_pid(pid: DWORD) -> Option<Process> {\n\n // This procedure is adopted from:\n\n // https://docs.microsoft.com/en-us/windows/win32/psapi/enumerating-all-processes\n\n if pid == 0 {\n\n return None;\n\n }\n\n\n\n unsafe {\n\n let proc_handle = processthreadsapi::OpenProcess(\n\n PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,\n\n FALSE,\n\n pid,\n\n );\n\n\n\n if proc_handle == NULL {\n", "file_path": "memscanner/src/process/win.rs", "rank": 77, "score": 7.024986417423678 }, { "content": " mem)?;\n\n\n\n // Pointer tables can have null entries. Set those to the default value.\n\n if base_addr == 0x0 {\n\n *obj = Default::default();\n\n continue;\n\n }\n\n\n\n update_mem_cache(mem, &mut cached_mem, base_addr, array_config.element_size)\n\n .map_err(|e| format_err!(\"{} of {}: \", i, #name_str))?;\n\n\n\n #read_code\n\n }\n\n Ok(())\n\n };\n\n Ok(Box::new(scanner))\n\n };\n\n Ok(Box::new(resolver))\n\n }\n\n }\n\n };\n\n\n\n Some(code)\n\n}\n\n\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 78, "score": 6.9904155141761235 }, { "content": "extern crate proc_macro;\n\nmod context;\n\n\n\nuse context::Context;\n\nuse proc_macro2::TokenStream;\n\nuse quote::{format_ident, quote};\n\nuse syn::spanned::Spanned;\n\nuse syn::{parse_macro_input, DeriveInput};\n\n\n", "file_path": "memscanner_derive/src/lib.rs", "rank": 79, "score": 4.944471097693741 } ]
Rust
src/utils.rs
ruoshui-git/mks66-w11_animation
2324fb210b1b88c44e712d7a5a9790b975223069
use std::fs::File; use std::path::Path; use indicatif::ProgressStyle; use crate::{ canvas::Canvas, light::{self, LightProps}, }; pub(crate) fn create_file(filepath: &str) -> File { let path = Path::new(filepath); let display = path.display(); match File::create(&path) { Err(why) => panic!("Could not create {}: {}", display, why), Ok(file) => file, } } pub(crate) fn polar_to_xy(mag: f64, angle_degrees: f64) -> (f64, f64) { let (dy, dx) = angle_degrees.to_radians().sin_cos(); (dx * mag, dy * mag) } pub(crate) fn compute_bezier3_coef(p0: f64, p1: f64, p2: f64, p3: f64) -> (f64, f64, f64, f64) { ( -p0 + 3.0 * (p1 - p2) + p3, 3.0 * p0 - 6.0 * p1 + 3.0 * p2, 3.0 * (-p0 + p1), p0, ) } pub(crate) fn compute_hermite3_coef(p0: f64, p1: f64, r0: f64, r1: f64) -> (f64, f64, f64, f64) { ( 2.0 * (p0 - p1) + r0 + r1, 3.0 * (-p0 + p1) - 2.0 * r0 - r1, r0, p0, ) } use crate::{Matrix, PPMImg}; use std::{fs, process::Command}; use super::RGB; pub(crate) fn display_ppm(img: &PPMImg) { let tmpfile_name = "tmp.ppm"; img.write_binary(tmpfile_name) .expect("Error writing to file"); let mut cmd = Command::new(if cfg!(windows) { "imdisplay" } else { "display" }); let mut display = cmd .arg(tmpfile_name) .spawn() .unwrap(); let _result = display.wait().unwrap(); fs::remove_file(tmpfile_name).expect("Error removing tmp file"); } pub(crate) fn display_edge_matrix(m: &Matrix, ndc: bool, color: RGB) { let mut img = PPMImg::new(500, 500, 225); if ndc { img.render_ndc_edges_n1to1(m, color); } else { img.render_edge_matrix(m, color); } display_ppm(&img); } pub(crate) fn display_polygon_matrix(m: &Matrix, ndc: bool) { let mut img = PPMImg::with_bg(500, 500, 225, RGB::BLACK); if ndc { unimplemented!("Displaying polygon matrix in ndc is not implemented."); } else { img.render_polygon_matrix(m, &LightProps::DEFAULT_PROPS, &light::default_lights()); } display_ppm(&img); } pub fn mapper(instart: f64, inend: f64, outstart: f64, outend: f64) -> impl Fn(f64) -> f64 { let slope = (outend - outstart) / (inend - instart); move |x| outstart + slope * (x - instart) } pub fn shark_spinner_style() -> ProgressStyle { ProgressStyle::default_spinner() .template("\t[{elapsed}] {spinner:.green} {wide_msg}") .tick_strings(&[ "▐|\\____________▌", "▐_|\\___________▌", "▐__|\\__________▌", "▐___|\\_________▌", "▐____|\\________▌", "▐_____|\\_______▌", "▐______|\\______▌", "▐_______|\\_____▌", "▐________|\\____▌", "▐_________|\\___▌", "▐__________|\\__▌", "▐___________|\\_▌", "▐____________|\\▌", "▐____________/|▌", "▐___________/|_▌", "▐__________/|__▌", "▐_________/|___▌", "▐________/|____▌", "▐_______/|_____▌", "▐______/|______▌", "▐_____/|_______▌", "▐____/|________▌", "▐___/|_________▌", "▐__/|__________▌", "▐_/|___________▌", "▐/|____________▌", ]) }
use std::fs::File; use std::path::Path; use indicatif::ProgressStyle; use crate::{ canvas::Canvas, light::{self, LightProps}, }; pub(crate) fn create_file(filepath: &str) -> File { let path = Path::new(filepath); let display = path.display(); match File::create(&path) { Err(why) => panic!("Could not create {}: {}", display, why), Ok(file) => file, } } pub(crate) fn polar_to_xy(mag: f64, angle_degrees: f64) -> (f64, f64) { let (dy, dx) = angle_degrees.to_radians().sin_cos(); (dx * mag, dy * mag) } pub(crate) fn compute_bezier3_coef(p0: f64, p1: f64, p2: f64, p3: f64) -> (f64, f64, f64, f64) { ( -p0 + 3.0 * (p1 - p2) + p3, 3.0 * p0 - 6.0 * p1 + 3.0 * p2, 3.0 * (-p0 + p1), p0, ) } pub(crate) fn compute_hermite3_coef(p0: f64, p1: f64, r0: f64, r1: f64) -> (f64, f64, f64, f64) { ( 2.0 * (p0 - p1) + r0 + r1, 3.0 * (-p0 + p1) - 2.0 * r0 - r1, r0, p0, ) } use crate::{Matrix, PPMImg}; use std::{fs, process::Command}; use super::RGB; pub(crate) fn display_ppm(img: &PPMImg) { let tmpfile_name = "tmp.ppm"; img.write_binary(tmpfile_name) .expect("Error writing to file"); let mut cmd = Command::new(if cfg!(windows) { "imdisplay" } else { "display" }); let mut display = cmd .arg(tmpfile_name) .spawn() .unwrap(); let _result = display.wait().unwrap(); fs::remove_file(tmpfile_name).expect("Error removing tmp file"); } pub(crate) fn display_edge_matrix(m: &Matrix, ndc: bool, color: RGB) { let mut img = PPMImg::new(500, 500, 225); if ndc {
]) }
img.render_ndc_edges_n1to1(m, color); } else { img.render_edge_matrix(m, color); } display_ppm(&img); } pub(crate) fn display_polygon_matrix(m: &Matrix, ndc: bool) { let mut img = PPMImg::with_bg(500, 500, 225, RGB::BLACK); if ndc { unimplemented!("Displaying polygon matrix in ndc is not implemented."); } else { img.render_polygon_matrix(m, &LightProps::DEFAULT_PROPS, &light::default_lights()); } display_ppm(&img); } pub fn mapper(instart: f64, inend: f64, outstart: f64, outend: f64) -> impl Fn(f64) -> f64 { let slope = (outend - outstart) / (inend - instart); move |x| outstart + slope * (x - instart) } pub fn shark_spinner_style() -> ProgressStyle { ProgressStyle::default_spinner() .template("\t[{elapsed}] {spinner:.green} {wide_msg}") .tick_strings(&[ "▐|\\____________▌", "▐_|\\___________▌", "▐__|\\__________▌", "▐___|\\_________▌", "▐____|\\________▌", "▐_____|\\_______▌", "▐______|\\______▌", "▐_______|\\_____▌", "▐________|\\____▌", "▐_________|\\___▌", "▐__________|\\__▌", "▐___________|\\_▌", "▐____________|\\▌", "▐____________/|▌", "▐___________/|_▌", "▐__________/|__▌", "▐_________/|___▌", "▐________/|____▌", "▐_______/|_____▌", "▐______/|______▌", "▐_____/|_______▌", "▐____/|________▌", "▐___/|_________▌", "▐__/|__________▌", "▐_/|___________▌", "▐/|____________▌",
random
[ { "content": "// generate transformation matrices\n\n/// Generate a translation matrix with (dx, dy, dz)\n\npub fn mv(dx: f64, dy: f64, dz: f64) -> Matrix {\n\n let mut m = Matrix::ident(4);\n\n\n\n m.set(3, 0, dx);\n\n m.set(3, 1, dy);\n\n m.set(3, 2, dz);\n\n m\n\n}\n\n\n", "file_path": "src/matrix/transform.rs", "rank": 0, "score": 196971.27843721444 }, { "content": "/// Generate a scale matrix with (sx, sy, sz)\n\npub fn scale(sx: f64, sy: f64, sz: f64) -> Matrix {\n\n let mut m = Matrix::ident(4);\n\n m.set(0, 0, sx);\n\n m.set(1, 1, sy);\n\n m.set(2, 2, sz);\n\n m\n\n}\n\n\n\n#[rustfmt::skip]\n\n pub fn rotatex(angle_deg: f64) -> Matrix {\n\n // let mut m = Matrix::ident(4);\n\n // m.set(1, 1, angle_deg.to_radians().cos());\n\n // m.set(2, 2, angle_deg.to_radians().cos());\n\n // m.set(1, 2, -angle_deg.to_radians().sin());\n\n // m.set(2, 1, angle_deg.to_radians().sin());\n\n // m\n\n let a = angle_deg.to_radians();\n\n Matrix::new(\n\n 4,\n\n 4,\n", "file_path": "src/matrix/transform.rs", "rank": 1, "score": 126157.61102810893 }, { "content": "#[rustfmt::skip]\n\npub fn perspective(fov_rad: f64, aspect: f64, near: f64, far: f64) -> Matrix {\n\n\n\n \n\n let f = 1. / (fov_rad / 2.).tan();\n\n let range_inv = 1. / (near - far);\n\n // Matrix::new(4, 4, vec![\n\n // f / aspect, 0., 0., 0.,\n\n // 0., f, 0., 0.,\n\n // 0., 0., (near + far) * range_inv, -1.,\n\n // 0., 0., near * far * range_inv * 2., 0.,\n\n // ]);\n\n Matrix::new(4, 4, vec![\n\n f / aspect, 0., 0., 0.,\n\n 0., f, 0., 0.,\n\n 0., 0., (near + far) * range_inv, near * far * range_inv * 2.,\n\n 0., 0., -1., 0.,\n\n ]);\n\n\n\n todo!(\"Impl clipping\");\n\n}\n\n\n\n/// Construct an orthographic projection matrix\n", "file_path": "src/matrix/projections.rs", "rank": 2, "score": 123017.61760873106 }, { "content": "#[rustfmt::skip]\n\npub fn orthographic(left: f64, right: f64, bottom: f64, top: f64, near: f64, far: f64) -> Matrix {\n\n // Each of the parameters represents the plane of the bounding box\n\n let lr = 1. / (left - right);\n\n let bt = 1. / (bottom - top);\n\n let nf = 1. / (near - far);\n\n\n\n let row4col1 = (left + right) * lr;\n\n let row4col2 = (top + bottom) * bt;\n\n let row4col3 = (far + near) * nf;\n\n Matrix::new(4, 4, vec![\n\n -2. * lr, 0., 0., 0.,\n\n 0., -2. * bt, 0., 0.,\n\n 0., 0., 2. * nf, 0.,\n\n row4col1, row4col2, row4col3, 1.,\n\n ])\n\n}\n\n\n\nimpl Matrix {\n\n //\n\n /// This should be used only after perspective divide and before rendered onto the canvas\n", "file_path": "src/matrix/projections.rs", "rank": 3, "score": 121286.38694053322 }, { "content": "#[rustfmt::skip]\n\npub fn rotatez(angle_deg: f64) -> Matrix {\n\n let a = angle_deg.to_radians();\n\n Matrix::new(4, 4, vec![\n\n a.cos(), a.sin(), 0., 0.,\n\n -a.sin(), a.cos(), 0., 0.,\n\n 0., 0., 1., 0.,\n\n 0., 0., 0., 1.,\n\n ])\n\n // m.set(0, 0, angle_deg.to_radians().cos());\n\n // m.set(1, 1, angle_deg.to_radians().cos());\n\n // m.set(1, 0, angle_deg.to_radians().sin());\n\n // m.set(0, 1, -angle_deg.to_radians().sin());\n\n}\n\n\n\nimpl Matrix {\n\n /// Correct edges after projection by dividing all values of point by w\n\n pub fn perspective_divide(&mut self) {\n\n for point in self.mut_iter_by_row() {\n\n let (x, y, z, w) = (point[0], point[1], point[2], point[3]);\n\n point[0] = x / w;\n\n point[1] = y / w;\n\n point[2] = z / w;\n\n point[3] = 1.;\n\n }\n\n }\n\n}\n", "file_path": "src/matrix/transform.rs", "rank": 4, "score": 117213.18675925981 }, { "content": "fn fmin3(a: f64, b: f64, c: f64, prec: i32) -> f64 {\n\n fmin2(fmin2(a, b, prec), c, prec)\n\n}\n\n\n\n// impl From<Vec3> for RGB {\n\n\n\n// }\n", "file_path": "src/colors.rs", "rank": 5, "score": 114887.74881563312 }, { "content": "fn fmin2(a: f64, b: f64, prec: i32) -> f64 {\n\n let fprec = prec as f64;\n\n cmp::min((a * fprec).round() as i32, (b * fprec).round() as i32) as f64 / fprec\n\n}\n\n\n", "file_path": "src/colors.rs", "rank": 6, "score": 113812.36122630726 }, { "content": "fn fmax2(a: f64, b: f64, prec: i32) -> f64 {\n\n let fprec = prec as f64;\n\n cmp::max((a * fprec).round() as i32, (b * fprec).round() as i32) as f64 / fprec\n\n}\n\n\n", "file_path": "src/colors.rs", "rank": 7, "score": 113812.36122630726 }, { "content": "fn parse_display(i: &str) -> IResult<&str, Misc> {\n\n let (i, _) = ws(tag(\"display\"))(i)?;\n\n Ok((i, Misc::Display))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 9, "score": 97925.11156907493 }, { "content": "fn parse_cmd(i: &str) -> IResult<&str, Command> {\n\n let (i, cmd) = alt((\n\n parse_push,\n\n parse_pop,\n\n parse_tr_cmd,\n\n parse_shape_cmd,\n\n parse_animate_cmd,\n\n parse_lighting_cmd,\n\n parse_misc_cmb,\n\n ))(i)?;\n\n Ok((i, cmd))\n\n}\n\n\n\n/// Parses a single line\n\n///\n\n/// Returns None in as data if only comment is preset\n\npub(crate) fn parse_line(i: &str) -> IResult<&str, Option<Command>> {\n\n all_consuming(terminated(opt(parse_cmd), opt(ws(parse_comment))))(i)\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 10, "score": 97855.27825640587 }, { "content": "fn parse_save_file(i: &str) -> IResult<&str, Misc> {\n\n let (i, (_, filename)) = pair(ws(tag(\"save\")), ws(symbol))(i)?;\n\n Ok((i, Misc::Save(filename.to_owned())))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 11, "score": 95211.24120142612 }, { "content": "fn parse_animate_cmd(i: &str) -> IResult<&str, Command> {\n\n let (i, animate) = alt((\n\n parse_basename,\n\n parse_set_knob,\n\n parse_save_knobs,\n\n parse_tween,\n\n parse_num_frames,\n\n parse_vary,\n\n parse_set_all_knobs,\n\n ))(i)?;\n\n Ok((i, Command::AnimateCmd(animate)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 12, "score": 95144.28777261195 }, { "content": "fn parse_lighting_cmd(i: &str) -> IResult<&str, Command> {\n\n let (i, lighting) = alt((parse_light, parse_ambient, parse_constants, parse_shading))(i)?;\n\n Ok((i, Command::LightingCmd(lighting)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 13, "score": 95144.28777261195 }, { "content": "fn parse_shape_cmd(i: &str) -> IResult<&str, Command> {\n\n let (i, shape) = alt((\n\n parse_sphere,\n\n parse_torus,\n\n parse_box,\n\n parse_line_shape,\n\n parse_mesh,\n\n ))(i)?;\n\n Ok((i, Command::ShapeCmd(shape)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 14, "score": 95144.28777261195 }, { "content": "fn parse_tr_cmd(input: &str) -> IResult<&str, Command> {\n\n let (input, tr) = alt((parse_move, parse_rotate, parse_scale))(input)?;\n\n Ok((input, Command::TransformCmd(tr)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 15, "score": 92628.35795000683 }, { "content": "fn parse_comment(i: &str) -> IResult<&str, (&str, &str)> {\n\n pair(tag(\"//\"), is_not(\"\\n\\r\"))(i)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Transform {\n\n Move {\n\n values: Point,\n\n knob: Option<Symbol>,\n\n },\n\n Scale {\n\n values: Point,\n\n knob: Option<Symbol>,\n\n },\n\n Rotate {\n\n axis: char,\n\n degrees: f64,\n\n knob: Option<Symbol>,\n\n },\n\n}\n", "file_path": "src/mdl/ast.rs", "rank": 16, "score": 85733.72223833745 }, { "content": "/// Parsing a symbol that starts with a letter and may contain underscores, letters and numbers\n\nfn symbol(input: &str) -> IResult<&str, &str> {\n\n recognize(pair(\n\n alpha1,\n\n many0(alt((alphanumeric1, tag(\"-\"), tag(\".\"), tag(\"_\")))),\n\n ))(input)\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 17, "score": 83328.18799587553 }, { "content": "fn uint(i: &str) -> IResult<&str, u32> {\n\n map_res(take_while1(|c: char| c.is_digit(10)), u32::from_str)(i)\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 18, "score": 77504.079746124 }, { "content": "/// Make a new matrix stack\n\nfn new_stack() -> Vec<Matrix> {\n\n vec![Matrix::ident(4)]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::PPMImg;\n\n\n\n use super::*;\n\n use crate::utils;\n\n #[test]\n\n fn test_line() {\n\n let mut img = PPMImg::new(500, 500, 255);\n\n img.draw_line((0., 0., 0.), (100., 100., 100.), RGB::WHITE);\n\n\n\n utils::display_ppm(&img);\n\n }\n\n}\n", "file_path": "src/drawer.rs", "rank": 19, "score": 77034.53923349535 }, { "content": "fn parse_shading(i: &str) -> IResult<&str, Lighting> {\n\n let (i, _) = ws(tag(\"shading\"))(i)?;\n\n let (i, mode) = ws(alt((\n\n value(ShadingMode::Wireframe, tag(\"wireframe\")),\n\n value(ShadingMode::Flat, tag(\"flat\")),\n\n value(ShadingMode::Gouraud, tag(\"gouraud\")),\n\n value(ShadingMode::Phong, tag(\"phong\")),\n\n value(ShadingMode::Raytrace, tag(\"raytrace\")),\n\n )))(i)?;\n\n Ok((i, Lighting::Shading(mode)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 20, "score": 75575.25250260254 }, { "content": "fn parse_focal(i: &str) -> IResult<&str, Misc> {\n\n let (i, (_, value)) = pair(ws(tag(\"focal\")), ws(double))(i)?;\n\n Ok((i, Misc::Focal(value)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 21, "score": 75575.25250260254 }, { "content": "fn parse_cam(i: &str) -> IResult<&str, Misc> {\n\n let (i, (_, eye, aim)) = tuple((tag(\"camera\"), triple_float, triple_float))(i)?;\n\n Ok((i, Misc::Camera { eye, aim }))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 22, "score": 75575.25250260254 }, { "content": "fn triple_float(i: &str) -> IResult<&str, Point> {\n\n map(tuple((ws(double), ws(double), ws(double))), |(x, y, z)| {\n\n Point(x, y, z)\n\n })(i)\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 23, "score": 75575.25250260254 }, { "content": "fn parse_box(i: &str) -> IResult<&str, Shape> {\n\n let (i, _) = ws(tag(\"box\"))(i)?;\n\n let (i, c) = opt_symbol(i)?;\n\n let (i, p0) = triple_float(i)?;\n\n let (i, dims) = triple_float(i)?;\n\n let (i, cor) = opt_symbol(i)?;\n\n Ok((\n\n i,\n\n Shape::Box {\n\n constants: c,\n\n corner: p0,\n\n height: dims.0,\n\n width: dims.1,\n\n depth: dims.2,\n\n coord: cor,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 24, "score": 75575.25250260254 }, { "content": "fn parse_vary(i: &str) -> IResult<&str, Animate> {\n\n let (i, (_, knob, start_frame, end_frame, start_val, end_val)) = tuple((\n\n ws(tag(\"vary\")),\n\n ws(symbol),\n\n ws(uint),\n\n ws(uint),\n\n ws(double),\n\n ws(double),\n\n ))(i)?;\n\n Ok((\n\n i,\n\n Animate::Vary(VaryInfo {\n\n knob: Symbol(knob.to_owned()),\n\n start_frame,\n\n end_frame,\n\n start_val,\n\n end_val,\n\n }),\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 25, "score": 75575.25250260254 }, { "content": "fn parse_tween(i: &str) -> IResult<&str, Animate> {\n\n let (i, _) = ws(tag(\"tween\"))(i)?;\n\n let (i, start_frame) = ws(uint)(i)?;\n\n let (i, end_frame) = ws(uint)(i)?;\n\n let (i, knoblist0) = ws(symbol)(i)?;\n\n let (i, knoblist1) = ws(symbol)(i)?;\n\n Ok((\n\n i,\n\n Animate::Tween {\n\n start_frame,\n\n end_frame,\n\n knoblist0: Symbol(knoblist0.to_owned()),\n\n knoblist1: Symbol(knoblist1.to_owned()),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 26, "score": 75575.25250260254 }, { "content": "fn parse_basename(i: &str) -> IResult<&str, Animate> {\n\n let (i, _) = ws(tag(\"basename\"))(i)?;\n\n let (i, name) = ws(symbol)(i)?;\n\n Ok((i, Animate::Basename(name.to_owned())))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 27, "score": 75575.25250260254 }, { "content": "fn parse_constants(i: &str) -> IResult<&str, Lighting> {\n\n let (i, _) = ws(tag(\"constants\"))(i)?;\n\n let (i, (name, kr, kg, kb, ir, ig, ib)): (\n\n &str,\n\n (\n\n &str,\n\n Point,\n\n Point,\n\n Point,\n\n Option<f64>,\n\n Option<f64>,\n\n Option<f64>,\n\n ),\n\n ) = tuple((\n\n ws(symbol),\n\n triple_float,\n\n triple_float,\n\n triple_float,\n\n opt(ws(double)),\n\n opt(ws(double)),\n", "file_path": "src/mdl/ast.rs", "rank": 28, "score": 75575.25250260254 }, { "content": "fn parse_light(i: &str) -> IResult<&str, Lighting> {\n\n let (i, (_, name, color_triple, location)) =\n\n tuple((ws(tag(\"light\")), ws(symbol), triple_float, triple_float))(i)?;\n\n Ok((\n\n i,\n\n Lighting::Light {\n\n name: Symbol(name.to_owned()),\n\n color: color_triple.into(),\n\n location,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 29, "score": 75575.25250260254 }, { "content": "fn parse_ambient(i: &str) -> IResult<&str, Lighting> {\n\n let (i, (_, triple)) = pair(ws(tag(\"ambient\")), ws(triple_float))(i)?;\n\n Ok((i, Lighting::Ambient(triple.into())))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 30, "score": 75575.25250260254 }, { "content": "fn parse_mesh(i: &str) -> IResult<&str, Shape> {\n\n let (i, _) = ws(tag(\"mesh\"))(i)?;\n\n let (i, c) = opt_symbol(i)?;\n\n let (i, filename) = ws(preceded(tag(\":\"), symbol))(i)?;\n\n let (i, coord) = opt_symbol(i)?;\n\n Ok((\n\n i,\n\n Shape::Mesh {\n\n constants: c,\n\n filename: filename.to_owned(),\n\n coord,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 31, "score": 75575.25250260254 }, { "content": "fn parse_torus(input: &str) -> IResult<&str, Shape> {\n\n let (input, _) = ws(tag(\"torus\"))(input)?;\n\n let (input, constants) = opt_symbol(input)?;\n\n let (input, center) = triple_float(input)?;\n\n let (input, r0) = ws(double)(input)?;\n\n let (input, r1) = ws(double)(input)?;\n\n let (input, coord) = opt_symbol(input)?;\n\n Ok((\n\n input,\n\n Shape::Torus {\n\n constants,\n\n center,\n\n r0,\n\n r1,\n\n coord,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 32, "score": 73783.07689386154 }, { "content": "fn parse_pop(input: &str) -> IResult<&str, Command> {\n\n let (input, _) = ws(tag(\"pop\"))(input)?;\n\n Ok((input, Command::Pop))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 33, "score": 73783.07689386154 }, { "content": "fn parse_set_knob(i: &str) -> IResult<&str, Animate> {\n\n let (i, _) = ws(tag(\"set\"))(i)?;\n\n let (i, name) = ws(symbol)(i)?;\n\n let (i, value) = ws(double)(i)?;\n\n Ok((\n\n i,\n\n Animate::SetKnob {\n\n name: Symbol(name.to_owned()),\n\n value,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 34, "score": 73783.07689386154 }, { "content": "fn parse_rotate(input: &str) -> IResult<&str, Transform> {\n\n let (input, _) = ws(tag(\"rotate\"))(input)?;\n\n let (input, axis) = ws(one_of(\"xyz\"))(input)?;\n\n let (input, degrees) = ws(double)(input)?;\n\n let (input, knob) = ws(opt_symbol)(input)?;\n\n Ok((\n\n input,\n\n Transform::Rotate {\n\n axis,\n\n degrees,\n\n knob,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 35, "score": 73783.07689386154 }, { "content": "fn parse_push(input: &str) -> IResult<&str, Command> {\n\n let (input, _) = ws(tag(\"push\"))(input)?;\n\n Ok((input, Command::Push))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 36, "score": 73783.07689386154 }, { "content": "fn parse_save_knobs(i: &str) -> IResult<&str, Animate> {\n\n let (i, _) = ws(tag(\"save_knobs\"))(i)?;\n\n let (i, knoblist) = ws(symbol)(i)?;\n\n Ok((i, Animate::SaveKnobList(Symbol(knoblist.to_owned()))))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 37, "score": 73783.07689386154 }, { "content": "fn parse_line_shape(i: &str) -> IResult<&str, Shape> {\n\n let (i, _) = ws(tag(\"line\"))(i)?;\n\n let (i, c) = opt_symbol(i)?;\n\n let (i, p0) = triple_float(i)?;\n\n let (i, cor0) = opt_symbol(i)?;\n\n let (i, p1) = triple_float(i)?;\n\n let (i, cor1) = opt_symbol(i)?;\n\n Ok((\n\n i,\n\n Shape::Line {\n\n constants: c,\n\n point0: p0,\n\n coord0: cor0,\n\n point1: p1,\n\n coord1: cor1,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 38, "score": 73783.07689386154 }, { "content": "fn parse_misc_cmb(i: &str) -> IResult<&str, Command> {\n\n let (i, misc) = alt((\n\n parse_save_cor,\n\n parse_cam,\n\n parse_save_file,\n\n parse_gen_rayfiles,\n\n parse_focal,\n\n parse_display,\n\n ))(i)?;\n\n Ok((i, Command::MiscCmd(misc)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const MISC_CASES: [(&str, Misc); 1] = [(\n\n \"camera 1 2 3 10 20 30 \",\n\n Misc::Camera {\n\n eye: Point(1., 2., 3.),\n", "file_path": "src/mdl/ast.rs", "rank": 39, "score": 73783.07689386154 }, { "content": "fn parse_set_all_knobs(i: &str) -> IResult<&str, Animate> {\n\n let (i, (_, value)) = pair(ws(tag(\"setknobs\")), ws(double))(i)?;\n\n Ok((i, Animate::SetAllKnobs(value)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 40, "score": 73783.07689386154 }, { "content": "fn parse_sphere(input: &str) -> IResult<&str, Shape> {\n\n let (input, _) = ws(tag(\"sphere\"))(input)?;\n\n let (input, constants) = opt_symbol(input)?;\n\n let (input, center) = ws(triple_float)(input)?;\n\n let (input, r) = ws(double)(input)?;\n\n let (input, coord) = opt_symbol(input)?;\n\n Ok((\n\n input,\n\n Shape::Sphere {\n\n constants,\n\n center,\n\n r,\n\n coord,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 41, "score": 73783.07689386154 }, { "content": "fn parse_num_frames(i: &str) -> IResult<&str, Animate> {\n\n let (i, (_, num)) = pair(ws(tag(\"frames\")), ws(uint))(i)?;\n\n Ok((i, Animate::Frames(num)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 42, "score": 73783.07689386154 }, { "content": "fn parse_gen_rayfiles(i: &str) -> IResult<&str, Misc> {\n\n let (i, _) = ws(tag(\"generate_rayfiles\"))(i)?;\n\n Ok((i, Misc::GenerateRayfiles))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 43, "score": 73783.07689386154 }, { "content": "fn parse_save_cor(i: &str) -> IResult<&str, Misc> {\n\n let (i, (_, name)) = pair(ws(tag(\"save_coord_system\")), ws(symbol))(i)?;\n\n Ok((i, Misc::SaveCoord(Symbol(name.to_owned()))))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 44, "score": 73783.07689386154 }, { "content": "fn parse_move(input: &str) -> IResult<&str, Transform> {\n\n let (input, _) = ws(tag(\"move\"))(input)?;\n\n let (input, point) = triple_float(input)?;\n\n let (input, knob) = ws(opt_symbol)(input)?;\n\n Ok((\n\n input,\n\n Transform::Move {\n\n values: point,\n\n knob,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 45, "score": 73783.07689386154 }, { "content": "fn parse_scale(input: &str) -> IResult<&str, Transform> {\n\n let (input, _) = ws(tag(\"scale\"))(input)?;\n\n let (input, point) = triple_float(input)?;\n\n let (input, knob) = ws(opt_symbol)(input)?;\n\n Ok((\n\n input,\n\n Transform::Scale {\n\n values: point,\n\n knob,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 46, "score": 73783.07689386154 }, { "content": "fn opt_symbol(i: &str) -> IResult<&str, Option<Symbol>> {\n\n let (i, s) = opt(ws(symbol))(i)?;\n\n Ok((i, Symbol::from_opt(s)))\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 47, "score": 72772.81285722711 }, { "content": "/// A combinator that takes a parser `inner` and produces a parser that also consumes leading whitespace, returning the output of `inner`.\n\nfn ws<'a, F: 'a, O, E: ParseError<&'a str>>(inner: F) -> impl Fn(&'a str) -> IResult<&'a str, O, E>\n\nwhere\n\n F: Fn(&'a str) -> IResult<&'a str, O, E>,\n\n{\n\n delimited(multispace0, inner, multispace0)\n\n // preceded(space0, inner)\n\n}\n\n\n", "file_path": "src/mdl/ast.rs", "rank": 48, "score": 71258.03374634359 }, { "content": "/// Parse floats from a line and return them in a vec. Panic on error.\n\nfn parse_floats(line: String) -> Vec<f64> {\n\n line.split(' ')\n\n .map(|x| x.parse::<f64>().expect(\"Error parsing numbers\"))\n\n .collect()\n\n}\n\n\n\nimpl DWScript {\n\n pub fn new(filename: &str) -> Self {\n\n DWScript {\n\n filename: filename.to_string(),\n\n drawer: Drawer::new(PPMImg::new(500, 500, 255)),\n\n tmpfile_name: String::from(\"tmp.ppm\"),\n\n }\n\n }\n\n\n\n pub fn exec(&mut self) {\n\n let _f = File::open(&self.filename).expect(\"Error opening file\");\n\n let f = BufReader::new(_f);\n\n let mut lines = f.lines().enumerate();\n\n while let Some((num, line)) = lines.next() {\n", "file_path": "src/mdl/old_parser.rs", "rank": 49, "score": 65705.88168740465 }, { "content": "/// Subprocess (and run) `(magick) convert` with the given `args`\n\npub fn magick(args: Vec<&str>) -> Child {\n\n Command::new(if cfg!(windows) { \"magick\" } else { \"convert\" })\n\n .args(args)\n\n .spawn()\n\n .expect(\"Can't spawn imagemagick\")\n\n}\n\n\n", "file_path": "src/processes.rs", "rank": 50, "score": 63233.11496115381 }, { "content": "/// Subprocess (and run) `(magick) convert` with a piped stdin with the given `args`\n\npub fn pipe_to_magick(args: Vec<&str>) -> Child {\n\n Command::new(if cfg!(windows) { \"magick\" } else { \"convert\" })\n\n .args(args)\n\n .stdin(Stdio::piped())\n\n .spawn()\n\n .expect(\"Can't spawn imagemagick\")\n\n}\n\n\n", "file_path": "src/processes.rs", "rank": 51, "score": 61304.09867814517 }, { "content": "/// Wait for `magick` to exit with the appropriate printlns. This is not designed to be composable. It's used usually as the last statement in program.\n\npub fn wait_for_magick(mut magick: Child) -> std::process::ExitStatus {\n\n // println!(\"Waiting for magick to exit...\");\n\n magick.wait().expect(\"Failed to wait on magick\")\n\n // println!(\"magick {}\", exit_status);\n\n}\n", "file_path": "src/processes.rs", "rank": 52, "score": 59506.54470791889 }, { "content": "pub fn compute_color(\n\n props: &LightProps,\n\n lights: &[Light],\n\n surface_normal: Vec3,\n\n view_vec: Vec3,\n\n surface_location: Vec3,\n\n) -> RGB {\n\n let mut color = Vec3(0., 0., 0.);\n\n\n\n let normaln = surface_normal.norm();\n\n let viewn = view_vec.norm();\n\n\n\n for light in lights.iter() {\n\n // lights are additive, so sum up all the effects of light on this surface\n\n color = (color\n\n + match light {\n\n Light::Ambient(ambient) => props.areflect.mul_across(Vec3::from(ambient)),\n\n Light::Point {\n\n color: pt_color,\n\n location: pt_location,\n", "file_path": "src/light.rs", "rank": 53, "score": 59045.06250053017 }, { "content": "fn transform_with_knob(\n\n knobs: &SymTable<f64>,\n\n op_symbol: &Option<Symbol>,\n\n run_with_knob: impl Fn(&f64) -> Matrix,\n\n run_without_knob: impl Fn() -> Matrix,\n\n) -> EngineResult<Matrix> {\n\n Ok(match knobs.find(op_symbol) {\n\n Ok(op_knob) => {\n\n if let Some(knob) = op_knob {\n\n run_with_knob(knob)\n\n } else {\n\n run_without_knob()\n\n }\n\n }\n\n Err(e) => match e {\n\n EngineError::SymbolNotFound { name } => {\n\n eprintln!(\n\n \"knob {} not found, ignoring this knob and applying static transformation\",\n\n name\n\n );\n", "file_path": "src/mdl/exec.rs", "rank": 54, "score": 32746.086137408885 }, { "content": "#![allow(dead_code)]\n\n\n\nuse std::cmp;\n\nuse std::convert;\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Debug)]\n\npub struct RGB {\n\n pub red: u16,\n\n pub green: u16,\n\n pub blue: u16,\n\n}\n\n\n\n// Constructor and some useful \"constants\"\n\nimpl RGB {\n\n pub const WHITE: RGB = RGB {\n\n red: 255,\n\n green: 255,\n\n blue: 255,\n\n };\n\n\n", "file_path": "src/colors.rs", "rank": 55, "score": 31813.315922114998 }, { "content": "///\n\n/// range: [0, 1]\n\n#[derive(Copy, Clone)]\n\npub struct HSL {\n\n pub h: f64,\n\n pub s: f64,\n\n pub l: f64,\n\n}\n\n\n\nimpl convert::From<HSL> for RGB {\n\n // https://en.wikipedia.org/wiki/HSL_and_HSV#HSL_to_RGB_alternative\n\n fn from(hsl: HSL) -> RGB {\n\n let hue = (hsl.h * 360.0).round() as i32;\n\n let a = hsl.s * hsl.l.min(1.0 - hsl.l);\n\n let f = |n: i32| {\n\n hsl.l\n\n - a * (-1.0f64).max(\n\n ((n + hue / 30) % 12 - 3)\n\n .min(9 - (n + hue / 30) % 12)\n\n .min(1) as f64,\n", "file_path": "src/colors.rs", "rank": 56, "score": 31813.108313137767 }, { "content": " pub const BLACK: RGB = RGB {\n\n red: 0,\n\n green: 0,\n\n blue: 0,\n\n };\n\n\n\n pub fn gray(depth: u16) -> Self {\n\n RGB {\n\n red: depth,\n\n green: depth,\n\n blue: depth,\n\n }\n\n }\n\n\n\n pub fn new(red: u16, green: u16, blue: u16) -> Self {\n\n RGB { red, green, blue }\n\n }\n\n}\n\n\n\n/// Hue, Saturation, Luminosity\n", "file_path": "src/colors.rs", "rank": 57, "score": 31809.847669017807 }, { "content": " )\n\n };\n\n let (red, green, blue) = (\n\n f(0).round() as i32,\n\n f(8).round() as i32,\n\n f(4).round() as i32,\n\n );\n\n assert!(red == 1 || red == 0);\n\n assert!(green == 1 || green == 0);\n\n assert!(blue == 1 || blue == 0);\n\n RGB {\n\n red: (f(0) * 255.0) as u16,\n\n green: (f(8) * 255.0) as u16,\n\n blue: (f(4) * 255.0) as u16,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/colors.rs", "rank": 58, "score": 31808.54878945034 }, { "content": " }\n\n\n\n fn write_to_buf<T: Write>(&self, writer: &mut T) -> io::Result<()> {\n\n self.write_bin_to_buf(writer)\n\n }\n\n}\n\n\n\n// this will stay here during trait refactor, since it has assumption about the internal data structure for Img\n\nimpl PPMImg {\n\n /// Fill an area in img with color calculated by `fill`,\n\n /// starting at (x, y) and ending when encounters bound color `bound`.\n\n ///\n\n /// Note: This function uses the fact that PPMImg is stored as a `Vec` with an `index` method.\n\n pub fn bound4_fill_with_fn(\n\n &mut self,\n\n x: i32,\n\n y: i32,\n\n fill: impl Fn(f64, f64) -> RGB,\n\n bound: RGB,\n\n ) {\n", "file_path": "src/img.rs", "rank": 59, "score": 31729.9044592202 }, { "content": "use std::{convert::TryInto, process::ExitStatus};\n\n\n\nuse std::{\n\n fmt::Debug,\n\n io::{self, prelude::Write},\n\n};\n\n// internal use\n\nuse crate::{processes::pipe_to_magick, processes::wait_for_magick, utils, Canvas, RGB};\n\nuse io::BufWriter;\n\n\n\npub struct PPMImg {\n\n height: u32,\n\n width: u32,\n\n depth: u16, // max = 2^16\n\n pub x_wrap: bool,\n\n pub y_wrap: bool,\n\n pub invert_y: bool,\n\n data: Vec<RGB>,\n\n zbuf: Vec<f64>,\n\n}\n", "file_path": "src/img.rs", "rank": 60, "score": 31725.88384991834 }, { "content": " buf.write_all(&(t.blue.to_be_bytes()))?;\n\n }\n\n }\n\n\n\n buf.flush()?;\n\n Ok(())\n\n }\n\n pub fn write_binary(&self, filepath: &str) -> io::Result<()> {\n\n self.write_bin_to_buf(&mut utils::create_file(filepath))\n\n }\n\n pub fn write_ascii(&self, filepath: &str) -> io::Result<()> {\n\n let mut file = BufWriter::new(utils::create_file(filepath));\n\n writeln!(file, \"P3\")?;\n\n writeln!(file, \"{} {} {}\", self.width, self.height, self.depth)?;\n\n for t in self.data.iter() {\n\n writeln!(file, \"{} {} {}\", t.red, t.green, t.blue)?;\n\n }\n\n file.flush()?;\n\n Ok(())\n\n }\n", "file_path": "src/img.rs", "rank": 61, "score": 31725.171462894567 }, { "content": " self.width as i32 - y - 1\n\n } else {\n\n y\n\n };\n\n\n\n // now we know that x and y are positive, we can cast without worry\n\n Some((y * self.width as i32 + x).try_into().unwrap())\n\n }\n\n}\n\n\n\nimpl Canvas for PPMImg {\n\n /// Plot a point on this PPMImg at (`x`, `y`, `z`)\n\n ///\n\n /// `z` is used for depth-buffer. Will only plot if `z` is closer to screen (new_z > existing_z).\n\n fn plot(&mut self, x: i32, y: i32, z: f64, color: RGB) {\n\n // make the origin to be lower left corner\n\n let y = self.height as i32 - 1 - y;\n\n if let Some(index) = self.index(x, y) {\n\n if self.zbuf[index] < z {\n\n self.data[index] = color;\n", "file_path": "src/img.rs", "rank": 62, "score": 31724.16451741803 }, { "content": " }\n\n\n\n self.zbuf = vec![f64::NEG_INFINITY; (self.height * self.width).try_into().unwrap()];\n\n }\n\n\n\n fn save(&self, filepath: &str) -> io::Result<ExitStatus> {\n\n // // convert to .png if wanted\n\n // if filepath.ends_with(\".ppm\") {\n\n // self.write_binary(filepath)\n\n // } else {\n\n let mut process = pipe_to_magick(vec![\"ppm:-\", filepath]);\n\n\n\n // This cmd should have a stdnin, so it's ok to unwrap\n\n let mut stdin = process.stdin.take().unwrap();\n\n self.write_bin_to_buf(&mut stdin)?;\n\n\n\n drop(stdin);\n\n\n\n Ok(wait_for_magick(process))\n\n // }\n", "file_path": "src/img.rs", "rank": 63, "score": 31723.85470532118 }, { "content": " self.zbuf[index] = z;\n\n }\n\n }\n\n }\n\n fn width(&self) -> u32 {\n\n self.width\n\n }\n\n fn height(&self) -> u32 {\n\n self.height\n\n }\n\n\n\n fn display(&self) {\n\n utils::display_ppm(&self);\n\n }\n\n\n\n /// Fill image with a certain color\n\n fn clear(&mut self, color: RGB) {\n\n // let bg = self.bg_color;\n\n for d in self.data.iter_mut() {\n\n *d = color;\n", "file_path": "src/img.rs", "rank": 64, "score": 31721.81765524524 }, { "content": " // bg_color,\n\n data: vec![bg_color; (width * height).try_into().unwrap()],\n\n zbuf: vec![f64::NEG_INFINITY; (width * height).try_into().unwrap()],\n\n }\n\n }\n\n\n\n pub fn write_bin_to_buf(&self, writer: &mut dyn Write) -> io::Result<()> {\n\n let mut buf = BufWriter::new(writer);\n\n writeln!(buf, \"P6\")?;\n\n writeln!(buf, \"{} {} {}\", self.width, self.height, self.depth)?;\n\n if self.depth < 256 {\n\n for t in self.data.iter() {\n\n buf.write_all(&[t.red as u8])?;\n\n buf.write_all(&[t.green as u8])?;\n\n buf.write_all(&[t.blue as u8])?;\n\n }\n\n } else {\n\n for t in self.data.iter() {\n\n buf.write_all(&(t.red.to_be_bytes()))?;\n\n buf.write_all(&(t.green.to_be_bytes()))?;\n", "file_path": "src/img.rs", "rank": 65, "score": 31721.803715318816 }, { "content": " let mut points = vec![(x, y)];\n\n while let Some((x, y)) = points.pop() {\n\n if let Some(index) = self.index(x, y) {\n\n let color = self.data[index];\n\n if color == bound {\n\n continue;\n\n }\n\n let fcolor = fill(x as f64, y as f64);\n\n if color == fcolor {\n\n continue;\n\n }\n\n self.data[index] = fcolor;\n\n points.push((x + 1, y));\n\n points.push((x, y + 1));\n\n points.push((x - 1, y));\n\n points.push((x, y - 1));\n\n }\n\n assert!(points.len() <= (self.width * self.height).try_into().unwrap());\n\n }\n\n }\n\n}\n", "file_path": "src/img.rs", "rank": 66, "score": 31719.54333437986 }, { "content": "\n\n/// Two images are eq iff their dimensions, depth, and image data are eq\n\nimpl PartialEq for PPMImg {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.height == other.height\n\n && self.width == other.width\n\n && self.depth == other.depth\n\n && self.data == other.data\n\n }\n\n}\n\n\n\nimpl Eq for PPMImg {}\n\n\n\nimpl Debug for PPMImg {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"PPMImg {{ {} by {}, depth={} }}\",\n\n self.height, self.width, self.depth\n\n )\n", "file_path": "src/img.rs", "rank": 67, "score": 31718.38551404357 }, { "content": " }\n\n}\n\n\n\n// impl constructor and exporter\n\nimpl PPMImg {\n\n /// Createa new PPMImg\n\n /// Default img is filled with black\n\n pub fn new(height: u32, width: u32, depth: u16) -> PPMImg {\n\n Self::with_bg(height, width, depth, RGB::gray(0))\n\n }\n\n\n\n pub fn with_bg(height: u32, width: u32, depth: u16, bg_color: RGB) -> PPMImg {\n\n PPMImg {\n\n height,\n\n width,\n\n depth,\n\n x_wrap: false,\n\n y_wrap: false,\n\n invert_y: false,\n\n // fg_color: RGB::gray(depth),\n", "file_path": "src/img.rs", "rank": 68, "score": 31718.279706404195 }, { "content": "}\n\n\n\nimpl PPMImg {\n\n /// Returns Some(index) if index exists. Otherwise None.\n\n fn index(&self, x: i32, y: i32) -> Option<usize> {\n\n let (width, height) = (\n\n self.width.try_into().unwrap(),\n\n self.height.try_into().unwrap(),\n\n );\n\n if (!self.x_wrap && (x < 0 || x >= width)) || (!self.y_wrap && (y < 0 || y >= height)) {\n\n return None;\n\n }\n\n\n\n let x = if x >= width {\n\n x % width\n\n } else if x < 0 {\n\n let r = x % width;\n\n if r != 0 {\n\n r + width\n\n } else {\n", "file_path": "src/img.rs", "rank": 69, "score": 31714.422085198898 }, { "content": " r\n\n }\n\n } else {\n\n x\n\n };\n\n let y = if y >= height {\n\n y % height\n\n } else if y < 0 {\n\n let r = y % height;\n\n if r != 0 {\n\n r + height\n\n } else {\n\n r\n\n }\n\n } else {\n\n y\n\n };\n\n\n\n // invert y based on config\n\n let y = if self.invert_y {\n", "file_path": "src/img.rs", "rank": 70, "score": 31708.39987748351 }, { "content": "/// Advances a line iterator and panic on error\n\nfn getline_or_error(\n\n line: &mut impl Iterator<Item = (usize, io::Result<String>)>,\n\n) -> (usize, String) {\n\n if let Some((num, line)) = line.next() {\n\n let line = line.expect(\"Error while reading line\").trim().to_string();\n\n (num, line)\n\n } else {\n\n panic!(\"Error reading line\");\n\n }\n\n}\n\n\n", "file_path": "src/mdl/old_parser.rs", "rank": 71, "score": 31476.390873182303 }, { "content": "}\n\n\n\n// print Matrix\n\nimpl fmt::Display for Matrix {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if self.nrows == 0 || self.ncols == 0 {\n\n write!(f, \"Empty matrix ({} by {})\", self.nrows, self.ncols)?;\n\n } else {\n\n writeln!(f, \"Matrix ({} by {}) {{\", self.nrows, self.ncols)?;\n\n\n\n for col_offset in 0..self.ncols {\n\n write!(f, \" \")?; // indentation\n\n for d in self.data.iter().skip(col_offset).step_by(self.ncols) {\n\n write!(f, \"{arg:.prec$} \", arg = d, prec = 2)?;\n\n }\n\n writeln!(f)?; // line change\n\n }\n\n write!(f, \"}}\")?;\n\n }\n\n Ok(())\n", "file_path": "src/matrix.rs", "rank": 72, "score": 30763.997233886304 }, { "content": " pub fn append_row(&mut self, row: &mut Vec<f64>) {\n\n assert_eq!(\n\n self.ncols,\n\n row.len(),\n\n \"Length of edge and matrix column size don't match\"\n\n );\n\n self.data.append(row);\n\n self.nrows += 1;\n\n }\n\n}\n\n\n\n// row and col iter\n\nimpl Matrix {\n\n /// Iterate over a certain row\n\n pub fn row_iter(&self, r: usize) -> impl Iterator<Item = &f64> {\n\n let start = r * self.ncols;\n\n self.data[start..start + self.ncols].iter()\n\n }\n\n\n\n /// Iterate over a certain column\n", "file_path": "src/matrix.rs", "rank": 73, "score": 30762.706380495994 }, { "content": " pub fn col_iter(&self, c: usize) -> impl Iterator<Item = &f64> {\n\n self.data.iter().skip(c).step_by(self.ncols)\n\n }\n\n\n\n /// Interate over the matrix by row, one row at a time\n\n ///\n\n /// Returns an iterator for the row\n\n pub fn iter_by_row(&self) -> std::slice::Chunks<'_, f64> {\n\n self.data.as_slice().chunks(self.ncols)\n\n }\n\n\n\n /// Returns an mut_iter for iterating row by row\n\n pub fn mut_iter_by_row(&mut self) -> impl Iterator<Item = &mut [f64]> {\n\n self.data.as_mut_slice().chunks_exact_mut(self.ncols)\n\n }\n\n}\n\n\n\n// mul\n\nimpl Matrix {\n\n /// Returns (x, y) of a matrix based on ncols and i\n", "file_path": "src/matrix.rs", "rank": 74, "score": 30762.0583535576 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn matrix_equal(m1: &Matrix, m2: &Matrix) -> bool {\n\n m1.nrows == m2.nrows\n\n && m1.ncols == m2.ncols\n\n && m1.data.iter().zip(m2.data.iter()).all(|(a, b)| a == b)\n\n }\n\n\n\n #[test]\n\n #[ignore]\n\n fn print_matrix() {\n\n let m = Matrix::new(\n\n 7,\n\n 5,\n\n vec![\n", "file_path": "src/matrix.rs", "rank": 75, "score": 30761.123990478703 }, { "content": " let a = Matrix::new(1, 3, vec![3.0, 4.0, 2.0]);\n\n let mut b = Matrix::new(\n\n 3,\n\n 4,\n\n vec![13.0, 9.0, 7.0, 15.0, 8.0, 7.0, 4.0, 6.0, 6.0, 4.0, 0.0, 3.0],\n\n );\n\n println!(\"a: {}\", a);\n\n println!(\"b: {}\", b);\n\n println!(\"multiplying...\",);\n\n Matrix::mul_mut_b(&a, &mut b);\n\n println!(\"b: {}\", b);\n\n assert!(matrix_equal(\n\n &b,\n\n &Matrix::new(1, 4, vec![83.0, 63.0, 37.0, 75.0])\n\n ));\n\n }\n\n\n\n #[test]\n\n fn test_new_ident() {\n\n let ident = Matrix::ident(3);\n", "file_path": "src/matrix.rs", "rank": 76, "score": 30760.171983245815 }, { "content": " None\n\n } else {\n\n Some(self.data[self.index(row, col)])\n\n }\n\n }\n\n\n\n pub fn set(&mut self, row: usize, col: usize, data: f64) {\n\n assert!(row < self.nrows && col < self.ncols, \"Index out of bound\");\n\n let i = self.index(row, col);\n\n self.data[i] = data;\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.nrows = 0;\n\n self.data.clear();\n\n }\n\n}\n\n\n\n// add row\n\nimpl Matrix {\n", "file_path": "src/matrix.rs", "rank": 77, "score": 30759.82190452762 }, { "content": " 1.0, 2.0, 3.0, 4.0, 5.0, 1.0, 2.0, 3.0, 4.0, 5.0, 1.0, 2.0, 3.0, 4.0, 5.0, 1.0,\n\n 2.0, 3.0, 4.0, 5.0, 1.0, 2.0, 3.0, 4.0, 5.0, 1.0, 2.0, 3.0, 4.0, 5.0, 1.0, 2.0,\n\n 3.0, 4.0, 5.0,\n\n ],\n\n );\n\n println!(\"M: {}\", m);\n\n println!(\"M: {:?}\", m);\n\n }\n\n\n\n #[test]\n\n fn add_edge() {\n\n let mut m = Matrix::new(0, 4, vec![]);\n\n println!(\"m: {}\", m);\n\n println!(\"Adding (1, 2, 4) and (5, 6, 7) to empty matrix\",);\n\n m.append_edge(&mut vec![1.0, 2.0, 4.0]);\n\n m.append_edge(&mut vec![5.0, 6.0, 7.0]);\n\n println!(\"m: {}\", m);\n\n assert!(\n\n matrix_equal(\n\n &m,\n", "file_path": "src/matrix.rs", "rank": 78, "score": 30759.678382964772 }, { "content": "// identity\n\nimpl Matrix {\n\n /// Make a new identity matrix with size `size`\n\n pub fn ident(size: usize) -> Self {\n\n let mut m = Matrix::new(size, size, vec![0.0; size * size]);\n\n for i in 0..size {\n\n m.set(i, i, 1.0);\n\n }\n\n m\n\n }\n\n\n\n /// Transforms self into an identity matrix\n\n pub fn make_ident(&mut self) {\n\n let ncols = self.ncols;\n\n for (i, d) in self.data.iter_mut().enumerate() {\n\n let (r, c) = Matrix::index_to_rc(i, ncols);\n\n\n\n *d = if r == c { 1.0 } else { 0.0 }\n\n }\n\n }\n", "file_path": "src/matrix.rs", "rank": 79, "score": 30759.643268186268 }, { "content": "\n\nimpl Mul for Matrix {\n\n type Output = Matrix;\n\n fn mul(self, rhs: Self) -> Self::Output {\n\n self._mul(&rhs)\n\n }\n\n}\n\n\n\nimpl MulAssign for Matrix {\n\n fn mul_assign(&mut self, rhs: Self) {\n\n *self = self._mul(&rhs);\n\n }\n\n}\n\n\n\nimpl MulAssign<&Matrix> for Matrix {\n\n fn mul_assign(&mut self, rhs: &Matrix) {\n\n *self = self._mul(&rhs)\n\n }\n\n}\n\n\n", "file_path": "src/matrix.rs", "rank": 80, "score": 30759.528864433116 }, { "content": " println!(\"m is now {}\", m);\n\n assert!(matrix_equal(&m, &Matrix::ident(5)), \"5 x 5 matrix\");\n\n let mut m = Matrix::new(1, 1, vec![50.0]);\n\n m.make_ident();\n\n assert!(\n\n matrix_equal(&m, &Matrix::ident(1)),\n\n \"1 x 1 matrix edge case\"\n\n );\n\n }\n\n}\n", "file_path": "src/matrix.rs", "rank": 81, "score": 30759.42428694396 }, { "content": " assert!(\n\n matrix_equal(\n\n &ident,\n\n &Matrix::new(3, 3, vec![1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0,])\n\n ),\n\n \"3 x 3 matrix\"\n\n );\n\n\n\n assert!(\n\n matrix_equal(&Matrix::ident(1), &Matrix::new(1, 1, vec![1.0])),\n\n \"1 x 1 matrix edge case\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_inplace_ident() {\n\n let mut m = Matrix::new(5, 5, vec![120.0; 25]);\n\n println!(\"m init: {}\", m);\n\n println!(\"Mutating m...\",);\n\n m.make_ident();\n", "file_path": "src/matrix.rs", "rank": 82, "score": 30759.002905218636 }, { "content": " pub fn transposed_mul(&self, other: &Self) -> Self {\n\n assert_eq!(self.nrows, other.ncols, \"nrows of m1 must == ncols of m2\");\n\n let (frows, fcols) = (other.nrows, self.nrows);\n\n let mut fdata = vec![0.0; frows * fcols];\n\n for (i, d) in fdata.iter_mut().enumerate() {\n\n let (r, c) = Self::index_to_rc(i, fcols);\n\n *d = self\n\n .col_iter(c)\n\n .zip(other.row_iter(r))\n\n .fold(0.0, |sum, (a, b)| sum + a * b);\n\n }\n\n Matrix::new(frows, fcols, fdata)\n\n }\n\n\n\n pub fn mul_mut_b(a: &Matrix, b: &mut Matrix) {\n\n *b = a._mul(b);\n\n // println!(\"result: {}\", b);\n\n }\n\n}\n\n\n", "file_path": "src/matrix.rs", "rank": 83, "score": 30758.989029063436 }, { "content": " );\n\n\n\n Matrix {\n\n nrows,\n\n ncols,\n\n data: data.to_owned(),\n\n }\n\n }\n\n\n\n pub fn new(nrows: usize, ncols: usize, data: Vec<f64>) -> Matrix {\n\n assert_eq!(\n\n nrows * ncols,\n\n data.len(),\n\n \"nrows * ncols must == data.len()\"\n\n );\n\n Matrix { nrows, ncols, data }\n\n }\n\n\n\n pub fn get(&self, row: usize, col: usize) -> Option<f64> {\n\n if row > self.nrows || col > self.ncols {\n", "file_path": "src/matrix.rs", "rank": 84, "score": 30758.636398634633 }, { "content": "/// Each row represents a new point\n\npub struct Matrix {\n\n nrows: usize,\n\n ncols: usize,\n\n data: Vec<f64>,\n\n}\n\n\n\n// constructor, get, set\n\nimpl Matrix {\n\n /// Row major index\n\n fn index(&self, row: usize, col: usize) -> usize {\n\n row * self.ncols + col\n\n // col * self.nrows + row\n\n }\n\n\n\n pub fn new_clone_vec(nrows: usize, ncols: usize, data: &[f64]) -> Matrix {\n\n assert_eq!(\n\n nrows * ncols,\n\n data.len(),\n\n \"nrows * ncols must == data.len()\"\n", "file_path": "src/matrix.rs", "rank": 85, "score": 30758.183134691175 }, { "content": " fn index_to_rc(i: usize, ncols: usize) -> (usize, usize) {\n\n (i / ncols, i % ncols)\n\n }\n\n\n\n /// Multiplies self matrix by other matrix\n\n pub fn _mul(&self, other: &Self) -> Self {\n\n // self * other -> new\n\n assert_eq!(self.ncols, other.nrows, \"ncols of m1 must == nrows of m2\");\n\n let (frows, fcols) = (self.nrows, other.ncols);\n\n let mut fdata = vec![0.0; frows * fcols];\n\n for (i, d) in fdata.iter_mut().enumerate() {\n\n let (r, c) = Self::index_to_rc(i, fcols);\n\n *d = self\n\n .row_iter(r)\n\n .zip(other.col_iter(c))\n\n .fold(0.0, |sum, (a, b)| sum + a * b);\n\n }\n\n Matrix::new(frows, fcols, fdata)\n\n }\n\n\n", "file_path": "src/matrix.rs", "rank": 86, "score": 30758.11818971842 }, { "content": "#![allow(dead_code)]\n\n//! Generic matrix stuff\n\n\n\nuse std::{\n\n fmt,\n\n ops::{Mul, MulAssign},\n\n};\n\n\n\n// standalone\n\npub mod projections;\n\npub mod transform;\n\n\n\n// impl on Matrix\n\npub mod dim2;\n\npub mod dim3;\n\npub mod parametrics;\n\n// pub mod mstack;\n\n\n\n#[derive(Clone, Debug)]\n\n/// Row major rectangular matrix\n", "file_path": "src/matrix.rs", "rank": 87, "score": 30757.967764944187 }, { "content": "impl Mul for &Matrix {\n\n type Output = Matrix;\n\n fn mul(self, rhs: Self) -> Self::Output {\n\n self._mul(rhs)\n\n }\n\n}\n\n\n\nimpl Mul<Matrix> for &Matrix {\n\n type Output = Matrix;\n\n fn mul(self, rhs: Matrix) -> Self::Output {\n\n self._mul(&rhs)\n\n }\n\n}\n\n\n\nimpl Mul<&Matrix> for Matrix {\n\n type Output = Matrix;\n\n fn mul(self, rhs: &Matrix) -> Self::Output {\n\n self._mul(rhs)\n\n }\n\n}\n", "file_path": "src/matrix.rs", "rank": 88, "score": 30756.76414481225 }, { "content": " &Matrix::new(2, 4, vec![1.0, 2.0, 4.0, 1.0, 5.0, 6.0, 7.0, 1.0,])\n\n ),\n\n \"Matrix not equal\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn multiply_with_method() {\n\n let m1 = Matrix::new(2, 3, vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0]);\n\n let m2 = Matrix::new(3, 2, vec![7.0, 8.0, 9.0, 10.0, 11.0, 12.0]);\n\n let mp = m1._mul(&m2);\n\n println!(\"{} mul by {} = {}\", m1, m2, m1._mul(&m2));\n\n assert!(matrix_equal(\n\n &mp,\n\n &Matrix::new(2, 2, vec![58.0, 64.0, 139.0, 154.0,])\n\n ));\n\n }\n\n\n\n #[test]\n\n fn multiple_and_mutate_b() {\n", "file_path": "src/matrix.rs", "rank": 89, "score": 30756.4744428081 }, { "content": " let (ax, bx, cx, dx) = utils::compute_bezier3_coef(p0.0, p1.0, p2.0, p3.0);\n\n let (ay, by, cy, dy) = utils::compute_bezier3_coef(p0.1, p1.1, p2.1, p3.1);\n\n self.add_parametric(\n\n |t: f64| ax * t * t * t + bx * t * t + cx * t + dx,\n\n |t: f64| ay * t * t * t + by * t * t + cy * t + dy,\n\n 0.0,\n\n 0.001,\n\n );\n\n }\n\n\n\n pub fn add_hermite3(&mut self, p0: (f64, f64), p1: (f64, f64), r0: (f64, f64), r1: (f64, f64)) {\n\n let (ax, bx, cx, dx) = utils::compute_hermite3_coef(p0.0, p1.0, r0.0, r1.0);\n\n let (ay, by, cy, dy) = utils::compute_hermite3_coef(p0.1, p1.1, r0.1, r1.1);\n\n self.add_parametric(\n\n |t: f64| ax * t * t * t + bx * t * t + cx * t + dx,\n\n |t: f64| ay * t * t * t + by * t * t + cy * t + dy,\n\n 0.0,\n\n 0.0001,\n\n );\n\n }\n\n}\n", "file_path": "src/matrix/parametrics.rs", "rank": 90, "score": 28999.249428808896 }, { "content": " (x0, y0, z0): (f64, f64, f64),\n\n (x1, y1, z1): (f64, f64, f64),\n\n (x2, y2, z2): (f64, f64, f64),\n\n ) {\n\n self.data\n\n .extend_from_slice(&[x0, y0, z0, 1., x1, y1, z1, 1., x2, y2, z2, 1.]);\n\n self.nrows += 3;\n\n }\n\n}\n\n\n\n// box, sphere, torus\n\nimpl Matrix {\n\n /// Add a 3d rectangular box to the matrix\n\n pub fn add_box(&mut self, (x, y, z): (f64, f64, f64), dx: f64, dy: f64, dz: f64) {\n\n // let (x0, y0, z0) = point;\n\n // define the four points in the front\n\n let p1 = (x, y, z);\n\n let p2 = (x, y - dy, z);\n\n let p3 = (x + dx, y, z);\n\n let p4 = (x + dx, y - dy, z);\n", "file_path": "src/matrix/dim3.rs", "rank": 91, "score": 28989.28904719151 }, { "content": " pub fn ndc_n1to1_to_device(&mut self, width: f64, height: f64) {\n\n let map_width = mapper(-1., 1., 0., width);\n\n let map_height = mapper(-1., 1., 0., height);\n\n\n\n for row in self.mut_iter_by_row() {\n\n row[0] = map_width(-row[0]);\n\n row[1] = map_height(row[1]);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{matrix::transform, utils::display_edge_matrix, RGB};\n\n\n\n #[test]\n\n fn test_perspective() {\n\n let mut model = Matrix::new_edge_matrix();\n\n model.add_sphere((110., 0., 0.), 75.);\n", "file_path": "src/matrix/projections.rs", "rank": 92, "score": 28989.06791232451 }, { "content": "use super::Matrix;\n\nuse crate::{parametrics::Parametric, utils};\n\nuse std::f64::consts;\n\n\n\n// draw parametric\n\nimpl Matrix {\n\n /// Add a parametric curve\n\n /// # Arguments\n\n /// `x` - Function that takes in `t` from 0 to 1 and produces x\n\n /// `y` - Function that takes in `t` from 0 to 1 and produces y\n\n /// `z` - The z value that the curve will be on\n\n /// `step` - Controls the precision of the curves\n\n pub fn add_parametric<F1, F2>(&mut self, xf: F1, yf: F2, z: f64, step: f64)\n\n where\n\n F1: Fn(f64) -> f64,\n\n F2: Fn(f64) -> f64,\n\n {\n\n let p = Parametric::new(xf, yf);\n\n for points in p.points_iter(step).collect::<Vec<(f64, f64)>>().windows(2) {\n\n let (x0, y0) = points[0];\n", "file_path": "src/matrix/parametrics.rs", "rank": 93, "score": 28984.837809691136 }, { "content": " let (x1, y1) = points[1];\n\n self.append_edge(&[x0, y0, z, x1, y1, z]);\n\n }\n\n }\n\n\n\n /// Add a circle with `center` and `radius`\n\n pub fn add_circle(&mut self, center: (f64, f64, f64), radius: f64) {\n\n let (x, y, z) = center;\n\n self.add_parametric(\n\n |t: f64| radius * (t * 2.0 * consts::PI).cos() + x,\n\n |t: f64| radius * (t * 2.0 * consts::PI).sin() + y,\n\n z,\n\n 0.001,\n\n );\n\n }\n\n\n\n /// Add a cubic Bezier curve\n\n /// # Arguments\n\n /// `p[0-3]` - control points\n\n pub fn add_bezier3(&mut self, p0: (f64, f64), p1: (f64, f64), p2: (f64, f64), p3: (f64, f64)) {\n", "file_path": "src/matrix/parametrics.rs", "rank": 94, "score": 28984.09929549049 }, { "content": "\n\n // define the four points in the back, lined up with front\n\n let p5 = (x, y, z - dz);\n\n let p6 = (x, y - dy, z - dz);\n\n let p7 = (x + dx, y, z - dz);\n\n let p8 = (x + dx, y - dy, z - dz);\n\n\n\n // front\n\n self.append_polygon(p1, p2, p3);\n\n self.append_polygon(p3, p2, p4);\n\n\n\n // right\n\n self.append_polygon(p3, p4, p8);\n\n self.append_polygon(p3, p8, p7);\n\n\n\n // back\n\n self.append_polygon(p7, p8, p6);\n\n self.append_polygon(p7, p6, p5);\n\n\n\n // left\n", "file_path": "src/matrix/dim3.rs", "rank": 95, "score": 28984.01614568413 }, { "content": " self.append_polygon(p5, p6, p2);\n\n self.append_polygon(p5, p2, p1);\n\n\n\n // top\n\n self.append_polygon(p7, p1, p3);\n\n self.append_polygon(p7, p5, p1);\n\n\n\n // btm\n\n self.append_polygon(p6, p4, p2);\n\n self.append_polygon(p6, p8, p4);\n\n\n\n /*\n\n // old\n\n // self.append_edge(&[x0, y0, z0, x0 + dx, y0, z0]);\n\n // self.append_edge(&[x0, y0, z0, x0, y0 - dy, z0]);\n\n // self.append_edge(&[x0 + dx, y0, z0, x0 + dx, y0 - dy, z0]);\n\n // self.append_edge(&[x0, y0 - dy, z0, x0 + dx, y0 - dy, z0]);\n\n\n\n // self.append_edge(&[x0, y0, z0 - dz, x0 + dx, y0, z0 - dz]);\n\n // self.append_edge(&[x0, y0, z0 - dz, x0, y0 - dy, z0 - dz]);\n", "file_path": "src/matrix/dim3.rs", "rank": 96, "score": 28983.301333779764 }, { "content": " use crate::{matrix::transform, utils::display_polygon_matrix};\n\n\n\n #[test]\n\n fn draw_sphere() {\n\n let mut m = Matrix::new_polygon_matrix();\n\n m.add_sphere((250., 250., 0.), 40.);\n\n m *= transform::rotatex(40.) * transform::rotatey(90.);\n\n println!(\"len of array: {}\", m.data.len());\n\n display_polygon_matrix(&m, false);\n\n }\n\n\n\n #[test]\n\n fn test_no_degenerate_triangles_in_sphere() {\n\n for radius in (0..1000).into_iter().step_by(100) {\n\n let mut m = Matrix::new_polygon_matrix();\n\n m.add_sphere((0., 0., 0.), radius as f64 + 0.3);\n\n\n\n for chunk in m.data.chunks_exact(m.ncols * 3) {\n\n if let [x0, y0, z0, _w0, x1, y1, z1, _w1, x2, y2, z2, _w2] = chunk {\n\n assert_ne!((x0, y0, z0), (x1, y1, z1));\n", "file_path": "src/matrix/dim3.rs", "rank": 97, "score": 28982.867355813778 }, { "content": "use super::Matrix;\n\n///! Note: all matrices here are row-major (transposed compared to what we have from classes)\n\n///! since my engine uses row-major point matrices\n\n\n\n// generate transformation matrices\n\n/// Generate a translation matrix with (dx, dy, dz)\n", "file_path": "src/matrix/transform.rs", "rank": 98, "score": 28981.640908087382 }, { "content": " model.add_sphere((-100., 0., 0.), 75.);\n\n model.add_box((-80., -120., 0.), 75., 75., 75.);\n\n model.add_torus((-30., -335., 0.), 25., 175.);\n\n let t = Matrix::ident(4)\n\n // .mul(&transform::rotatex(30.))\n\n // .mul(&transform::rotatey(-20.))\n\n ._mul(&transform::mv(0., 250., 250.));\n\n let model = model._mul(&t);\n\n\n\n // now apply perspective\n\n let mut model = model._mul(&perspective(90., 1., 1., 500.));\n\n model.perspective_divide();\n\n\n\n let fg_color = RGB::WHITE;\n\n\n\n display_edge_matrix(&model, true, fg_color);\n\n }\n\n}\n", "file_path": "src/matrix/projections.rs", "rank": 99, "score": 28981.581945150698 } ]
Rust
src/cluster/cluster.rs
MilesBreslin/Chunky-Bits
2338a32b9fd8fbae75173c4e667b4b509128dad3
use std::{ convert::TryInto, path::Path, }; use futures::stream::Stream; use serde::{ Deserialize, Serialize, }; use tokio::{ io, io::AsyncRead, }; use crate::{ cluster::{ ClusterNodes, ClusterProfile, ClusterProfiles, Destination, DestinationInner, FileOrDirectory, MetadataFormat, MetadataTypes, Tunables, }, error::{ ClusterError, LocationParseError, MetadataReadError, }, file::{ new_profiler, FileReference, FileWriteBuilder, Location, ProfileReport, ProfileReporter, }, }; #[derive(Clone, Serialize, Deserialize)] pub struct Cluster { #[serde(alias = "destination")] #[serde(alias = "nodes")] #[serde(alias = "node")] pub destinations: ClusterNodes, #[serde(alias = "metadata")] pub metadata: MetadataTypes, pub profiles: ClusterProfiles, #[serde(default)] #[serde(alias = "tunable")] #[serde(alias = "tuning")] pub tunables: Tunables, } impl Cluster { pub async fn from_location( location: impl TryInto<Location, Error = impl Into<LocationParseError>>, ) -> Result<Cluster, MetadataReadError> { MetadataFormat::Yaml.from_location(location).await } pub fn get_file_writer(&self, profile: &ClusterProfile) -> FileWriteBuilder<Destination> { let destination = self.get_destination(profile); FileReference::write_builder() .destination(destination) .chunk_size((1_usize) << profile.get_chunk_size()) .data_chunks(profile.get_data_chunks()) } pub async fn write_file_ref( &self, path: impl AsRef<Path>, file_ref: &FileReference, ) -> Result<(), ClusterError> { self.metadata.write(path, &file_ref).await?; Ok(()) } pub async fn write_file<R>( &self, path: impl AsRef<Path>, reader: &mut R, profile: &ClusterProfile, content_type: Option<String>, ) -> Result<(), ClusterError> where R: AsyncRead + Unpin, { let mut file_ref = self.get_file_writer(profile).write(reader).await?; file_ref.content_type = content_type; self.metadata.write(path, &file_ref).await.unwrap(); Ok(()) } pub async fn write_file_with_report<R>( &self, path: impl AsRef<Path>, reader: &mut R, profile: &ClusterProfile, content_type: Option<String>, ) -> (ProfileReport, Result<(), ClusterError>) where R: AsyncRead + Unpin, { let (reporter, destination) = self.get_destination_with_profiler(profile); let result = FileReference::write_builder() .destination(destination) .chunk_size((1_usize) << profile.get_chunk_size()) .data_chunks(profile.get_data_chunks()) .parity_chunks(profile.get_parity_chunks()) .write(reader) .await; match result { Ok(mut file_ref) => { file_ref.content_type = content_type; self.metadata.write(path, &file_ref).await.unwrap(); (reporter.profile().await, Ok(())) }, Err(err) => (reporter.profile().await, Err(err.into())), } } pub async fn get_file_ref( &self, path: impl AsRef<Path>, ) -> Result<FileReference, MetadataReadError> { self.metadata.read(path).await } pub async fn read_file( &self, path: impl AsRef<Path>, ) -> Result<impl AsyncRead + Unpin, MetadataReadError> { let file_ref = self.get_file_ref(path).await?; let reader = file_ref.read_builder_owned().reader_owned(); Ok(reader) } pub fn get_destination(&self, profile: &ClusterProfile) -> Destination { let inner = DestinationInner { nodes: self.destinations.clone(), location_context: self.tunables.as_ref().clone(), profile: profile.clone(), }; Destination(inner.into()) } pub fn get_destination_with_profiler( &self, profile: &ClusterProfile, ) -> (ProfileReporter, Destination) { let (profiler, reporter) = new_profiler(); let location_context = self .tunables .generate_location_context_builder() .profiler(profiler) .build(); ( reporter, Destination( DestinationInner { nodes: self.destinations.clone(), location_context, profile: profile.clone(), } .into(), ), ) } pub fn get_profile<'a>( &self, profile: impl Into<Option<&'a str>>, ) -> Option<&'_ ClusterProfile> { self.profiles.get(profile) } pub async fn list_files( &self, path: &Path, ) -> Result<impl Stream<Item = io::Result<FileOrDirectory>> + 'static, MetadataReadError> { self.metadata.list(path).await } }
use std::{ convert::TryInto, path::Path, }; use futures::stream::Stream; use serde::{ Deserialize, Serialize, }; use tokio::{ io, io::AsyncRead, }; use crate::{ cluster::{ ClusterNodes, ClusterProfile, ClusterProfiles, Destination, DestinationInner, FileOrDirectory, MetadataFormat, MetadataTypes, Tunables, }, error::{ ClusterError, LocationParseError, MetadataReadError, }, file::{ new_profiler, FileReference, FileWriteBuilder, Location, ProfileReport, ProfileReporter, }, }; #[derive(Clone, Serialize, Deserialize)] pub struct Cluster { #[serde(alias = "destination")] #[serde(alias = "nodes")] #[serde(alias = "node")] pub destinations: ClusterNodes, #[serde(alias = "metadata")] pub metadata: MetadataTypes, pub profiles: ClusterProfiles, #[serde(default)] #[serde(alias = "tunable")] #[serde(alias = "tuning")] pub tunables: Tunables, } impl Cluster { pub async
parity_chunks(profile.get_parity_chunks()) .write(reader) .await; match result { Ok(mut file_ref) => { file_ref.content_type = content_type; self.metadata.write(path, &file_ref).await.unwrap(); (reporter.profile().await, Ok(())) }, Err(err) => (reporter.profile().await, Err(err.into())), } } pub async fn get_file_ref( &self, path: impl AsRef<Path>, ) -> Result<FileReference, MetadataReadError> { self.metadata.read(path).await } pub async fn read_file( &self, path: impl AsRef<Path>, ) -> Result<impl AsyncRead + Unpin, MetadataReadError> { let file_ref = self.get_file_ref(path).await?; let reader = file_ref.read_builder_owned().reader_owned(); Ok(reader) } pub fn get_destination(&self, profile: &ClusterProfile) -> Destination { let inner = DestinationInner { nodes: self.destinations.clone(), location_context: self.tunables.as_ref().clone(), profile: profile.clone(), }; Destination(inner.into()) } pub fn get_destination_with_profiler( &self, profile: &ClusterProfile, ) -> (ProfileReporter, Destination) { let (profiler, reporter) = new_profiler(); let location_context = self .tunables .generate_location_context_builder() .profiler(profiler) .build(); ( reporter, Destination( DestinationInner { nodes: self.destinations.clone(), location_context, profile: profile.clone(), } .into(), ), ) } pub fn get_profile<'a>( &self, profile: impl Into<Option<&'a str>>, ) -> Option<&'_ ClusterProfile> { self.profiles.get(profile) } pub async fn list_files( &self, path: &Path, ) -> Result<impl Stream<Item = io::Result<FileOrDirectory>> + 'static, MetadataReadError> { self.metadata.list(path).await } }
fn from_location( location: impl TryInto<Location, Error = impl Into<LocationParseError>>, ) -> Result<Cluster, MetadataReadError> { MetadataFormat::Yaml.from_location(location).await } pub fn get_file_writer(&self, profile: &ClusterProfile) -> FileWriteBuilder<Destination> { let destination = self.get_destination(profile); FileReference::write_builder() .destination(destination) .chunk_size((1_usize) << profile.get_chunk_size()) .data_chunks(profile.get_data_chunks()) } pub async fn write_file_ref( &self, path: impl AsRef<Path>, file_ref: &FileReference, ) -> Result<(), ClusterError> { self.metadata.write(path, &file_ref).await?; Ok(()) } pub async fn write_file<R>( &self, path: impl AsRef<Path>, reader: &mut R, profile: &ClusterProfile, content_type: Option<String>, ) -> Result<(), ClusterError> where R: AsyncRead + Unpin, { let mut file_ref = self.get_file_writer(profile).write(reader).await?; file_ref.content_type = content_type; self.metadata.write(path, &file_ref).await.unwrap(); Ok(()) } pub async fn write_file_with_report<R>( &self, path: impl AsRef<Path>, reader: &mut R, profile: &ClusterProfile, content_type: Option<String>, ) -> (ProfileReport, Result<(), ClusterError>) where R: AsyncRead + Unpin, { let (reporter, destination) = self.get_destination_with_profiler(profile); let result = FileReference::write_builder() .destination(destination) .chunk_size((1_usize) << profile.get_chunk_size()) .data_chunks(profile.get_data_chunks()) .
random
[ { "content": "#[derive(Serialize, Deserialize)]\n\nstruct MetadataGitSerde {\n\n #[serde(default)]\n\n pub format: MetadataFormat,\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl From<MetadataGitSerde> for MetadataGit {\n\n fn from(meta: MetadataGitSerde) -> Self {\n\n let MetadataGitSerde { format, path } = meta;\n\n let meta_path = MetadataPath {\n\n format,\n\n path,\n\n put_script: None,\n\n fail_on_script_error: false,\n\n };\n\n MetadataGit { meta_path }\n\n }\n\n}\n\n\n\nimpl From<MetadataGit> for MetadataGitSerde {\n", "file_path": "src/cluster/metadata.rs", "rank": 0, "score": 133144.54451408997 }, { "content": "#[derive(Clone, Serialize, Deserialize)]\n\nstruct TunablesInner {\n\n #[serde(default = \"TunablesInner::https_only\")]\n\n https_only: bool,\n\n #[serde(default = \"TunablesInner::on_conflict\")]\n\n on_conflict: OnConflict,\n\n #[serde(default = \"TunablesInner::user_agent\")]\n\n user_agent: Option<String>,\n\n}\n\n\n\nmacro_rules! default_getters {\n\n ($($field:ident: $type:ty),*,) => {\n\n impl TunablesInner {\n\n $(\n\n fn $field() -> $type {\n\n <TunablesInner as Default>::default().$field\n\n }\n\n )*\n\n }\n\n };\n\n}\n\n\n\ndefault_getters! {\n\n https_only: bool,\n\n user_agent: Option<String>,\n\n on_conflict: OnConflict,\n\n}\n\n\n", "file_path": "src/cluster/tunables.rs", "rank": 1, "score": 110212.96897251696 }, { "content": "#[derive(Deserialize)]\n\n#[serde(untagged)]\n\nenum ClusterNodesDeserializer {\n\n Single(ClusterNode),\n\n Set(Vec<ClusterNodesDeserializer>),\n\n Map(BTreeMap<String, ClusterNodesDeserializer>),\n\n}\n\n\n\nimpl From<ClusterNodesDeserializer> for ClusterNodes {\n\n fn from(des: ClusterNodesDeserializer) -> ClusterNodes {\n\n use ClusterNodesDeserializer::*;\n\n ClusterNodes(match des {\n\n Single(node) => {\n\n vec![node]\n\n },\n\n Set(mut nodes) => {\n\n let mut nodes_out = Vec::<ClusterNode>::new();\n\n for sub_nodes in nodes.drain(..) {\n\n let mut nodes: ClusterNodes = sub_nodes.into();\n\n nodes_out.append(&mut nodes.0);\n\n }\n\n nodes_out\n", "file_path": "src/cluster/nodes.rs", "rank": 2, "score": 98804.43890345379 }, { "content": "pub trait CollectionDestination {\n\n type Writer: ShardWriter + Send + Sync + 'static;\n\n fn get_writers(&self, count: usize) -> Result<Vec<Self::Writer>, FileWriteError>;\n\n fn get_used_writers(\n\n &self,\n\n locations: &[Option<&Location>],\n\n ) -> Result<Vec<Self::Writer>, FileWriteError> {\n\n let writers_needed = locations.iter().filter_map(|loc| *loc).count();\n\n self.get_writers(writers_needed)\n\n }\n\n fn get_context(&self) -> LocationContext {\n\n Default::default()\n\n }\n\n}\n\n\n\nimpl<T: CollectionDestination> CollectionDestination for Arc<T> {\n\n type Writer = <T as CollectionDestination>::Writer;\n\n\n\n fn get_writers(&self, count: usize) -> Result<Vec<Self::Writer>, FileWriteError> {\n\n <T as CollectionDestination>::get_writers(&self, count)\n", "file_path": "src/file/collection_destination.rs", "rank": 3, "score": 98432.26463454419 }, { "content": "struct ReadResult {\n\n result: Result<usize, String>,\n\n location: Location,\n\n start_time: Instant,\n\n end_time: Instant,\n\n}\n\n\n", "file_path": "src/file/profiler.rs", "rank": 4, "score": 98375.9212335994 }, { "content": "struct WriteResult {\n\n result: Result<(), String>,\n\n location: Location,\n\n length: usize,\n\n start_time: Instant,\n\n end_time: Instant,\n\n}\n\n\n", "file_path": "src/file/profiler.rs", "rank": 5, "score": 98375.9212335994 }, { "content": "pub fn new_profiler() -> (Profiler, ProfileReporter) {\n\n let (log_tx, mut log_rx) = mpsc::unbounded_channel::<ResultLog>();\n\n let (profile_tx, profile_rx) = oneshot::channel::<ProfileReport>();\n\n let (drop_tx, mut drop_rx) = oneshot::channel::<()>();\n\n tokio::spawn(async move {\n\n let mut profile = ProfileReport(vec![]);\n\n loop {\n\n select! {\n\n _ = &mut drop_rx => {\n\n let _ = profile_tx.send(profile);\n\n return;\n\n },\n\n result = log_rx.recv() => {\n\n match result {\n\n Some(result) => {\n\n profile.0.push(result);\n\n },\n\n None => {\n\n let _ = profile_tx.send(profile);\n\n return;\n", "file_path": "src/file/profiler.rs", "rank": 6, "score": 97839.92643457092 }, { "content": "#[async_trait]\n\npub trait ShardWriter {\n\n async fn write_shard(\n\n &mut self,\n\n hash: &AnyHash,\n\n bytes: &[u8],\n\n ) -> Result<Vec<Location>, ShardError>;\n\n}\n\n\n\n#[async_trait]\n\nimpl ShardWriter for Box<dyn ShardWriter + Send + Sync> {\n\n async fn write_shard(\n\n &mut self,\n\n hash: &AnyHash,\n\n bytes: &[u8],\n\n ) -> Result<Vec<Location>, ShardError> {\n\n self.as_mut().write_shard(hash, bytes).await\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n", "file_path": "src/file/collection_destination.rs", "rank": 7, "score": 88652.34066216022 }, { "content": "#[allow(dead_code)]\n\nstruct TestCluster {\n\n cluster: Cluster,\n\n // Kept for the drop implementation\n\n data_dir: TempDir,\n\n metadata_dir: TempDir,\n\n}\n\n\n\nimpl Deref for TestCluster {\n\n type Target = Cluster;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.cluster\n\n }\n\n}\n\n\n\nimpl DerefMut for TestCluster {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.cluster\n\n }\n\n}\n", "file_path": "tests/cluster.rs", "rank": 8, "score": 83797.352213879 }, { "content": "#[derive(Copy, Clone)]\n\nstruct FileWriteBuilderState {\n\n chunk_size: usize,\n\n data: usize,\n\n parity: usize,\n\n concurrency: usize,\n\n}\n\n\n\nimpl Default for FileWriteBuilderState {\n\n fn default() -> Self {\n\n FileWriteBuilderState {\n\n chunk_size: 1 << 20,\n\n data: 3,\n\n parity: 2,\n\n concurrency: 10,\n\n }\n\n }\n\n}\n\n\n\nimpl<D: Default> Default for FileWriteBuilder<D> {\n\n fn default() -> Self {\n", "file_path": "src/file/writer.rs", "rank": 9, "score": 73893.53025231433 }, { "content": "pub trait Integrity {\n\n fn is_ideal(&self) -> bool;\n\n fn is_available(&self) -> bool;\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\npub enum LocationIntegrity {\n\n Valid,\n\n Resilvered,\n\n Invalid,\n\n Unavailable,\n\n}\n\n\n\nimpl fmt::Display for LocationIntegrity {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.serialize(f)\n\n }\n\n}\n\n\n\nimpl Integrity for LocationIntegrity {\n", "file_path": "src/file/file_part.rs", "rank": 10, "score": 73639.3353575749 }, { "content": "pub fn cluster_filter(\n\n cluster: impl Into<Arc<Cluster>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = Rejection> + Clone {\n\n let cluster: Arc<Cluster> = cluster.into();\n\n cluster_filter_get(cluster.clone()).or(cluster_filter_put(cluster))\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum HttpRange {\n\n Prefix { length: u64 },\n\n Suffix { length: u64 },\n\n Range { start: u64, end: u64 },\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum HttpRangeError {\n\n InvalidFormat,\n\n InvalidInteger,\n\n InvalidLength,\n\n MultiRange,\n", "file_path": "src/http.rs", "rank": 11, "score": 69502.88823598847 }, { "content": "pub fn cluster_filter_get(\n\n cluster: impl Into<Arc<Cluster>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = Rejection> + Clone {\n\n let cluster: Arc<Cluster> = cluster.into();\n\n warp::get()\n\n .or(warp::head())\n\n .map(move |_| cluster.clone())\n\n .and(warp::path::full())\n\n .and(get_http_range())\n\n .and_then(index_get)\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 12, "score": 66840.414712381 }, { "content": "pub fn cluster_filter_put(\n\n cluster: impl Into<Arc<Cluster>>,\n\n) -> impl Filter<Extract = impl warp::Reply, Error = Rejection> + Clone {\n\n let cluster: Arc<Cluster> = cluster.into();\n\n warp::put()\n\n .map(move || cluster.clone())\n\n .and(warp::path::full())\n\n .and(warp::header::optional::<String>(\"content-type\"))\n\n .and(warp::body::stream())\n\n .and_then(index_put)\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 13, "score": 66840.414712381 }, { "content": "pub trait SizedInt {\n\n const MAX: usize;\n\n const MIN: usize;\n\n const NAME: &'static str;\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub struct SizeError<T: SizedInt>(PhantomData<T>);\n\nimpl<T: SizedInt> SizeError<T> {\n\n fn new() -> Self {\n\n Self(PhantomData)\n\n }\n\n}\n\nimpl<T> std::fmt::Display for SizeError<T>\n\nwhere\n\n T: SizedInt,\n\n{\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(\n\n f,\n", "file_path": "src/cluster/sized_int.rs", "rank": 14, "score": 64422.79761194346 }, { "content": "use std::{\n\n cmp::Ordering,\n\n collections::{\n\n BTreeMap,\n\n BTreeSet,\n\n },\n\n};\n\n\n\nuse serde::{\n\n Deserialize,\n\n Serialize,\n\n};\n\n\n\nuse crate::file::WeightedLocation;\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\n#[serde(from = \"ClusterNodesDeserializer\")]\n\npub struct ClusterNodes(pub Vec<ClusterNode>);\n\n\n\nimpl From<ClusterNodes> for BTreeSet<ClusterNode> {\n\n fn from(c: ClusterNodes) -> BTreeSet<ClusterNode> {\n\n c.0.into_iter().collect()\n\n }\n\n}\n\n\n\n#[derive(Deserialize)]\n\n#[serde(untagged)]\n", "file_path": "src/cluster/nodes.rs", "rank": 15, "score": 61605.95734384742 }, { "content": " pub location: WeightedLocation,\n\n #[serde(default)]\n\n pub zones: BTreeSet<String>,\n\n #[serde(default)]\n\n pub repeat: usize,\n\n}\n\n\n\nimpl Ord for ClusterNode {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n if self.zones == other.zones {\n\n self.location.cmp(&other.location)\n\n } else {\n\n self.zones.cmp(&other.zones)\n\n }\n\n }\n\n}\n\n\n\nimpl PartialOrd for ClusterNode {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n", "file_path": "src/cluster/nodes.rs", "rank": 16, "score": 61591.786594735786 }, { "content": " },\n\n Map(nodes) => {\n\n let mut nodes_out = Vec::<ClusterNode>::new();\n\n for (name, sub_nodes) in nodes.into_iter() {\n\n let nodes: ClusterNodes = sub_nodes.into();\n\n for sub_node in nodes.0 {\n\n let mut sub_node = sub_node.clone();\n\n sub_node.zones.insert(name.clone());\n\n nodes_out.push(sub_node);\n\n }\n\n }\n\n nodes_out\n\n },\n\n })\n\n }\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct ClusterNode {\n\n #[serde(flatten)]\n", "file_path": "src/cluster/nodes.rs", "rank": 17, "score": 61588.90603270787 }, { "content": "use serde::{\n\n Deserialize,\n\n Serialize,\n\n};\n\n\n\nuse crate::file::{\n\n LocationContext,\n\n LocationContextBuilder,\n\n};\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\n#[serde(from = \"TunablesInner\")]\n\n#[serde(into = \"TunablesInner\")]\n\npub struct Tunables {\n\n inner: TunablesInner,\n\n location_context: LocationContext,\n\n}\n\n\n\nimpl AsRef<LocationContext> for Tunables {\n\n fn as_ref(&self) -> &LocationContext {\n", "file_path": "src/cluster/tunables.rs", "rank": 18, "score": 61566.075662333344 }, { "content": " fn default() -> Self {\n\n TunablesInner::default().into()\n\n }\n\n}\n\n\n\nimpl Tunables {\n\n pub(super) fn generate_location_context_builder(&self) -> LocationContextBuilder {\n\n self.inner.generate_location_context_builder()\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n", "file_path": "src/cluster/tunables.rs", "rank": 19, "score": 61554.75706358709 }, { "content": " &self.location_context\n\n }\n\n}\n\n\n\nimpl From<TunablesInner> for Tunables {\n\n fn from(inner: TunablesInner) -> Self {\n\n Tunables {\n\n location_context: inner.generate_location_context_builder().build(),\n\n inner,\n\n }\n\n }\n\n}\n\n\n\nimpl From<Tunables> for TunablesInner {\n\n fn from(t: Tunables) -> Self {\n\n t.inner\n\n }\n\n}\n\n\n\nimpl Default for Tunables {\n", "file_path": "src/cluster/tunables.rs", "rank": 20, "score": 61547.96104578443 }, { "content": " if let Some(user_agent) = &self.user_agent {\n\n builder = builder.user_agent(user_agent.clone())\n\n }\n\n builder.build().unwrap()\n\n };\n\n let builder = LocationContext::builder().http_client(http_client);\n\n let builder = match &self.on_conflict {\n\n OnConflict::Ignore => builder.conflict_ignore(),\n\n OnConflict::Overwrite => builder.conflict_overwrite(),\n\n };\n\n builder\n\n }\n\n}\n", "file_path": "src/cluster/tunables.rs", "rank": 21, "score": 61537.56044790145 }, { "content": " ClusterWriterState,\n\n },\n\n error::FileWriteError,\n\n file::{\n\n CollectionDestination,\n\n Location,\n\n LocationContext,\n\n },\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct Destination(pub(super) Arc<DestinationInner>);\n\n\n\npub(super) struct DestinationInner {\n\n pub(super) location_context: LocationContext,\n\n pub(super) nodes: ClusterNodes,\n\n pub(super) profile: ClusterProfile,\n\n}\n\n\n\nimpl AsRef<ClusterNodes> for Destination {\n", "file_path": "src/cluster/destination.rs", "rank": 22, "score": 61459.64546354573 }, { "content": " fn as_ref(&self) -> &ClusterNodes {\n\n &self.0.nodes\n\n }\n\n}\n\nimpl AsRef<ClusterProfile> for Destination {\n\n fn as_ref(&self) -> &ClusterProfile {\n\n &self.0.profile\n\n }\n\n}\n\n\n\nimpl CollectionDestination for Destination {\n\n type Writer = ClusterWriter;\n\n\n\n fn get_writers(&self, count: usize) -> Result<Vec<Self::Writer>, FileWriteError> {\n\n self.get_used_writers(&vec![None; count])\n\n }\n\n\n\n fn get_used_writers(\n\n &self,\n\n locations: &[Option<&Location>],\n", "file_path": "src/cluster/destination.rs", "rank": 23, "score": 61458.16488513516 }, { "content": "use std::{\n\n collections::HashSet,\n\n iter::{\n\n once,\n\n repeat_with,\n\n },\n\n sync::Arc,\n\n};\n\n\n\nuse tokio::sync::{\n\n oneshot,\n\n Mutex,\n\n};\n\n\n\nuse crate::{\n\n cluster::{\n\n ClusterNodes,\n\n ClusterProfile,\n\n ClusterWriter,\n\n ClusterWriterInnerState,\n", "file_path": "src/cluster/destination.rs", "rank": 24, "score": 61452.04483464155 }, { "content": " ) -> Result<Vec<Self::Writer>, FileWriteError> {\n\n let count = locations.iter().filter(|opt| opt.is_none()).count();\n\n let DestinationInner {\n\n ref nodes,\n\n ref profile,\n\n ..\n\n } = self.0.as_ref();\n\n // Does not account for zone rules. ShardWriters will handle that\n\n let possible_nodes: usize = nodes.0.iter().map(|node| node.repeat + 1).sum();\n\n if possible_nodes < count {\n\n return Err(FileWriteError::NotEnoughWriters);\n\n }\n\n let mut inner_state = ClusterWriterInnerState {\n\n available_indexes: nodes\n\n .0\n\n .iter()\n\n .enumerate()\n\n .map(|(i, node)| (i, node.repeat + 1))\n\n .collect(),\n\n failed_indexes: HashSet::new(),\n", "file_path": "src/cluster/destination.rs", "rank": 25, "score": 61446.062215901205 }, { "content": " zone_status: profile.zone_rules.clone(),\n\n errors: vec![],\n\n rng: None,\n\n };\n\n for location in locations.iter().flatten() {\n\n let parent_nodes = nodes\n\n .0\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, node)| node.location.location.is_parent_of(location));\n\n for (index, node) in parent_nodes {\n\n inner_state.remove_availability(index, node);\n\n }\n\n }\n\n let state = Arc::new(ClusterWriterState {\n\n parent: self.0.clone(),\n\n inner_state: Mutex::new(inner_state),\n\n });\n\n\n\n let (tx_waiters, rx_waiters): (Vec<_>, Vec<_>) =\n", "file_path": "src/cluster/destination.rs", "rank": 26, "score": 61443.17274739418 }, { "content": " repeat_with(oneshot::channel::<()>).take(count).unzip();\n\n\n\n let writers = once(None)\n\n .chain(rx_waiters.into_iter().map(Some))\n\n .zip(tx_waiters)\n\n .map(|(rx, tx)| ClusterWriter {\n\n state: state.clone(),\n\n waiter: rx,\n\n staller: Some(tx),\n\n })\n\n .collect();\n\n\n\n Ok(writers)\n\n }\n\n}\n", "file_path": "src/cluster/destination.rs", "rank": 27, "score": 61431.0501470434 }, { "content": "pub trait DataVerifier: Sized {\n\n fn verify(&self, data: &[u8]) -> bool;\n\n fn verify_async<T>(&self, data: T) -> JoinHandle<(bool, T)>\n\n where\n\n T: AsRef<[u8]> + Send + Sync + 'static;\n\n}\n\n\n\nimpl<H> DataVerifier for H\n\nwhere\n\n H: DataHasher + Clone,\n\n{\n\n fn verify(&self, data: &[u8]) -> bool {\n\n *self == <Self as DataHasher>::from_buf(data)\n\n }\n\n\n\n fn verify_async<T>(&self, data: T) -> JoinHandle<(bool, T)>\n\n where\n\n T: AsRef<[u8]> + Send + Sync + Sized + 'static,\n\n {\n\n let inner = self.clone();\n", "file_path": "src/file/hash/any.rs", "rank": 28, "score": 61321.2365920293 }, { "content": " de::DeserializeOwned,\n\n Deserialize,\n\n Serialize,\n\n};\n\nuse tokio::{\n\n fs,\n\n io,\n\n process::Command,\n\n};\n\nuse tokio_stream::wrappers::ReadDirStream;\n\n\n\nuse crate::{\n\n error::{\n\n LocationError,\n\n LocationParseError,\n\n MetadataReadError,\n\n SerdeError,\n\n },\n\n file::Location,\n\n};\n", "file_path": "src/cluster/metadata.rs", "rank": 29, "score": 61205.00904410921 }, { "content": " pub fn from_bytes<T, U>(&self, v: &T) -> Result<U, SerdeError>\n\n where\n\n T: AsRef<[u8]>,\n\n U: DeserializeOwned,\n\n {\n\n use MetadataFormat::*;\n\n Ok(match self {\n\n JsonStrict => serde_json::from_slice(v.as_ref())?,\n\n Json | JsonPretty | Yaml => serde_yaml::from_slice(v.as_ref())?,\n\n })\n\n }\n\n\n\n pub async fn from_location<T>(\n\n &self,\n\n location: impl TryInto<Location, Error = impl Into<LocationParseError>>,\n\n ) -> Result<T, MetadataReadError>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n let location: Location = TryInto::try_into(location).map_err(|err| err.into())?;\n", "file_path": "src/cluster/metadata.rs", "rank": 30, "score": 61198.71517778132 }, { "content": "\n\n#[derive(Clone, Serialize, Deserialize)]\n\n#[serde(rename_all = \"kebab-case\")]\n\n#[serde(tag = \"type\")]\n\npub enum MetadataTypes {\n\n Path(MetadataPath),\n\n Git(MetadataGit),\n\n}\n\n\n\nimpl MetadataTypes {\n\n pub async fn write<T>(\n\n &self,\n\n path: impl AsRef<Path>,\n\n payload: &T,\n\n ) -> Result<(), MetadataReadError>\n\n where\n\n T: Serialize,\n\n {\n\n match self {\n\n MetadataTypes::Path(meta_path) => meta_path.write(path, payload).await,\n", "file_path": "src/cluster/metadata.rs", "rank": 31, "score": 61195.655129892366 }, { "content": " }\n\n\n\n pub async fn read<T>(&self, path: impl AsRef<Path>) -> Result<T, MetadataReadError>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n let path = self.sub_path(path);\n\n let bytes = fs::read(path).await.map_err(LocationError::from)?;\n\n Ok(self.format.from_bytes(&bytes)?)\n\n }\n\n\n\n pub async fn list(\n\n &self,\n\n path: &Path,\n\n ) -> io::Result<impl Stream<Item = io::Result<FileOrDirectory>> + 'static> {\n\n let self_owned = self.clone();\n\n let path = self.sub_path(path);\n\n let stream = FileOrDirectory::list(&path).await?.map(move |result| {\n\n result.map(|mut f_or_d| {\n\n self_owned.to_pub_path(&mut f_or_d);\n", "file_path": "src/cluster/metadata.rs", "rank": 32, "score": 61194.565061334404 }, { "content": " .list(path)\n\n .await\n\n .map_err(LocationError::from)?\n\n .boxed()),\n\n MetadataTypes::Git(meta_git) => Ok(meta_git\n\n .list(path)\n\n .await\n\n .map_err(LocationError::from)?\n\n .boxed()),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct MetadataPath {\n\n #[serde(default)]\n\n pub format: MetadataFormat,\n\n pub path: PathBuf,\n\n pub put_script: Option<String>,\n\n #[serde(default)]\n", "file_path": "src/cluster/metadata.rs", "rank": 33, "score": 61193.58073066598 }, { "content": " MetadataTypes::Git(meta_git) => meta_git.write(path, payload).await,\n\n }\n\n }\n\n\n\n pub async fn read<T>(&self, path: impl AsRef<Path>) -> Result<T, MetadataReadError>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n match self {\n\n MetadataTypes::Path(meta_path) => meta_path.read(path).await,\n\n MetadataTypes::Git(meta_git) => meta_git.read(path).await,\n\n }\n\n }\n\n\n\n pub async fn list(\n\n &self,\n\n path: &Path,\n\n ) -> Result<impl Stream<Item = io::Result<FileOrDirectory>> + 'static, MetadataReadError> {\n\n match self {\n\n MetadataTypes::Path(meta_path) => Ok(meta_path\n", "file_path": "src/cluster/metadata.rs", "rank": 34, "score": 61192.255405568125 }, { "content": "\n\nimpl Default for MetadataFormat {\n\n fn default() -> Self {\n\n MetadataFormat::JsonPretty\n\n }\n\n}\n\n\n\nimpl MetadataFormat {\n\n pub fn to_string<T>(&self, payload: &T) -> Result<String, SerdeError>\n\n where\n\n T: Serialize,\n\n {\n\n use MetadataFormat::*;\n\n Ok(match self {\n\n Json | JsonStrict => serde_json::to_string(payload)?,\n\n JsonPretty => serde_json::to_string_pretty(payload)?,\n\n Yaml => serde_yaml::to_string(payload)?,\n\n })\n\n }\n\n\n", "file_path": "src/cluster/metadata.rs", "rank": 35, "score": 61192.217974091 }, { "content": " let bytes = location.read().await?;\n\n Ok(self.from_bytes(&bytes)?)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\npub enum FileOrDirectory {\n\n Directory(PathBuf),\n\n File(PathBuf),\n\n}\n\n\n\nimpl fmt::Display for FileOrDirectory {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let path: &PathBuf = self.as_ref();\n\n write!(f, \"{}\", path.display())\n\n }\n\n}\n\n\n\nimpl FileOrDirectory {\n\n pub async fn from_local_path(path: PathBuf) -> io::Result<Self> {\n", "file_path": "src/cluster/metadata.rs", "rank": 36, "score": 61191.84190248563 }, { "content": " let metadata = fs::metadata(&path).await?;\n\n if metadata.is_dir() {\n\n Ok(FileOrDirectory::Directory(path))\n\n } else if metadata.is_file() {\n\n Ok(FileOrDirectory::File(path))\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::NotFound,\n\n \"Not a file or directory\",\n\n ))\n\n }\n\n }\n\n\n\n pub async fn list(\n\n path: &Path,\n\n ) -> io::Result<impl Stream<Item = io::Result<FileOrDirectory>> + 'static> {\n\n let top_level = FileOrDirectory::from_local_path(path.to_owned()).await?;\n\n let children = if let FileOrDirectory::Directory(_) = &top_level {\n\n let dir_reader = fs::read_dir(&path).await?;\n\n ReadDirStream::new(dir_reader)\n", "file_path": "src/cluster/metadata.rs", "rank": 37, "score": 61191.60935651093 }, { "content": "}\n\n\n\nimpl MetadataGit {\n\n pub async fn write<T>(\n\n &self,\n\n path: impl AsRef<Path>,\n\n payload: &T,\n\n ) -> Result<(), MetadataReadError>\n\n where\n\n T: Serialize,\n\n {\n\n let path = Self::check_sub_git_dir(path).map_err(|err| MetadataReadError::IoError(err))?;\n\n let orig_path: PathBuf = path.as_ref().to_owned();\n\n let path = self.sub_path(path);\n\n let payload = self.format.to_string(payload)?;\n\n fs::write(&path, payload)\n\n .await\n\n .map_err(LocationError::from)?;\n\n let res = Command::new(\"git\")\n\n .arg(\"add\")\n", "file_path": "src/cluster/metadata.rs", "rank": 38, "score": 61191.46555921604 }, { "content": " pub fail_on_script_error: bool,\n\n}\n\n\n\nimpl MetadataPath {\n\n pub async fn write<T>(\n\n &self,\n\n path: impl AsRef<Path>,\n\n payload: &T,\n\n ) -> Result<(), MetadataReadError>\n\n where\n\n T: Serialize,\n\n {\n\n let path = self.sub_path(path);\n\n let payload = self.format.to_string(payload)?;\n\n fs::write(path, payload)\n\n .await\n\n .map_err(LocationError::from)?;\n\n if let Some(put_script) = &self.put_script {\n\n let res = Command::new(\"/bin/sh\")\n\n .arg(\"-c\")\n", "file_path": "src/cluster/metadata.rs", "rank": 39, "score": 61191.12741023208 }, { "content": " .components()\n\n .filter(|c| matches!(c, Component::Normal(_))),\n\n );\n\n new_path\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\n#[serde(from = \"MetadataGitSerde\")]\n\n#[serde(into = \"MetadataGitSerde\")]\n\npub struct MetadataGit {\n\n meta_path: MetadataPath,\n\n}\n\n\n\nimpl Deref for MetadataGit {\n\n type Target = MetadataPath;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.meta_path\n\n }\n", "file_path": "src/cluster/metadata.rs", "rank": 40, "score": 61190.885646828654 }, { "content": " path: &Path,\n\n ) -> io::Result<impl Stream<Item = io::Result<FileOrDirectory>> + 'static> {\n\n Self::check_sub_git_dir(path)?;\n\n let stream = self.meta_path.list(path).await?;\n\n Ok(stream.filter_map(|result| async move {\n\n match result {\n\n Ok(file_or_dir) => Ok(Self::check_sub_git_dir(file_or_dir).ok()).transpose(),\n\n Err(err) => Some(Err(err)),\n\n }\n\n }))\n\n }\n\n\n\n pub async fn read<T>(&self, path: impl AsRef<Path>) -> Result<T, MetadataReadError>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n let path = Self::check_sub_git_dir(path).map_err(|err| MetadataReadError::IoError(err))?;\n\n self.meta_path.read(path).await\n\n }\n\n\n", "file_path": "src/cluster/metadata.rs", "rank": 41, "score": 61190.61121781459 }, { "content": " fn from(meta: MetadataGit) -> Self {\n\n let MetadataGit { meta_path } = meta;\n\n let MetadataPath {\n\n format,\n\n path,\n\n put_script: _,\n\n fail_on_script_error: _,\n\n } = meta_path;\n\n MetadataGitSerde { format, path }\n\n }\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"kebab-case\")]\n\npub enum MetadataFormat {\n\n Json,\n\n JsonPretty,\n\n JsonStrict,\n\n Yaml,\n\n}\n", "file_path": "src/cluster/metadata.rs", "rank": 42, "score": 61188.949189419945 }, { "content": " .filter_map(|entry_res| async move {\n\n let entry = match entry_res {\n\n Ok(e) => e,\n\n Err(err) => return Some(Err(err)),\n\n };\n\n match FileOrDirectory::from_local_path(entry.path().clone()).await {\n\n Ok(file_or_dir) => Some(Ok::<_, io::Error>(file_or_dir)),\n\n Err(err) if err.kind() == io::ErrorKind::NotFound => None,\n\n Err(err) => Some(Err(err)),\n\n }\n\n })\n\n .boxed()\n\n } else {\n\n stream::empty().boxed()\n\n };\n\n Ok(stream::once(future::ready(Ok(top_level))).chain(children))\n\n }\n\n}\n\n\n\nimpl AsRef<Path> for FileOrDirectory {\n", "file_path": "src/cluster/metadata.rs", "rank": 43, "score": 61184.32937855201 }, { "content": " .current_dir(&self.path)\n\n .spawn()\n\n .unwrap()\n\n .wait()\n\n .await;\n\n match res {\n\n Ok(status) => match status.code() {\n\n Some(0) => {},\n\n Some(code) => return Err(MetadataReadError::ExitCode(code)),\n\n None => return Err(MetadataReadError::Signal),\n\n },\n\n Err(err) => {\n\n return Err(MetadataReadError::PostExec(err));\n\n },\n\n }\n\n Ok(())\n\n }\n\n\n\n pub async fn list(\n\n &self,\n", "file_path": "src/cluster/metadata.rs", "rank": 44, "score": 61184.10638091584 }, { "content": " fn as_ref(&self) -> &Path {\n\n let path: &PathBuf = self.as_ref();\n\n &path\n\n }\n\n}\n\n\n\nimpl AsRef<PathBuf> for FileOrDirectory {\n\n fn as_ref(&self) -> &PathBuf {\n\n use FileOrDirectory::*;\n\n match self {\n\n File(path) => &path,\n\n Directory(path) => &path,\n\n }\n\n }\n\n}\n\n\n\nimpl AsMut<PathBuf> for FileOrDirectory {\n\n fn as_mut(&mut self) -> &mut PathBuf {\n\n use FileOrDirectory::*;\n\n match self {\n", "file_path": "src/cluster/metadata.rs", "rank": 45, "score": 61182.5976455028 }, { "content": "use std::{\n\n convert::TryInto,\n\n fmt,\n\n ops::Deref,\n\n path::{\n\n Component,\n\n Path,\n\n PathBuf,\n\n },\n\n};\n\n\n\nuse futures::{\n\n future,\n\n stream::{\n\n self,\n\n Stream,\n\n StreamExt,\n\n },\n\n};\n\nuse serde::{\n", "file_path": "src/cluster/metadata.rs", "rank": 46, "score": 61182.423396892525 }, { "content": " File(ref mut path) => path,\n\n Directory(ref mut path) => path,\n\n }\n\n }\n\n}\n\n\n\nimpl From<FileOrDirectory> for PathBuf {\n\n fn from(f: FileOrDirectory) -> Self {\n\n use FileOrDirectory::*;\n\n match f {\n\n File(path) => path,\n\n Directory(path) => path,\n\n }\n\n }\n\n}\n", "file_path": "src/cluster/metadata.rs", "rank": 47, "score": 61181.712088241264 }, { "content": " fn check_sub_git_dir<T>(path: T) -> io::Result<T>\n\n where\n\n T: AsRef<Path>,\n\n {\n\n if Self::is_sub_git_dir(path.as_ref()) {\n\n Err(io::Error::new(\n\n io::ErrorKind::PermissionDenied,\n\n \"Access to .git is denied\",\n\n ))\n\n } else {\n\n Ok(path)\n\n }\n\n }\n\n\n\n fn is_sub_git_dir(path: impl AsRef<Path>) -> bool {\n\n for component in path.as_ref().components() {\n\n match component {\n\n Component::Normal(part) if part.eq(\".git\") => {\n\n return true;\n\n },\n", "file_path": "src/cluster/metadata.rs", "rank": 48, "score": 61180.41591466902 }, { "content": " .arg(put_script)\n\n .current_dir(&self.path)\n\n .spawn()\n\n .unwrap()\n\n .wait()\n\n .await;\n\n if self.fail_on_script_error {\n\n match res {\n\n Ok(status) => match status.code() {\n\n Some(0) => {},\n\n Some(code) => return Err(MetadataReadError::ExitCode(code)),\n\n None => return Err(MetadataReadError::Signal),\n\n },\n\n Err(err) => {\n\n return Err(MetadataReadError::PostExec(err));\n\n },\n\n }\n\n }\n\n }\n\n Ok(())\n", "file_path": "src/cluster/metadata.rs", "rank": 49, "score": 61179.42903858572 }, { "content": " Component::Normal(_) => {\n\n return false;\n\n },\n\n _ => {},\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize)]\n", "file_path": "src/cluster/metadata.rs", "rank": 50, "score": 61179.36640987498 }, { "content": " .arg(&orig_path)\n\n .current_dir(&self.path)\n\n .spawn()\n\n .unwrap()\n\n .wait()\n\n .await;\n\n match res {\n\n Ok(status) => match status.code() {\n\n Some(0) => {},\n\n Some(code) => return Err(MetadataReadError::ExitCode(code)),\n\n None => return Err(MetadataReadError::Signal),\n\n },\n\n Err(err) => {\n\n return Err(MetadataReadError::PostExec(err));\n\n },\n\n }\n\n let res = Command::new(\"git\")\n\n .arg(\"commit\")\n\n .arg(\"-m\")\n\n .arg(format!(\"Write {}\", orig_path.display()))\n", "file_path": "src/cluster/metadata.rs", "rank": 51, "score": 61178.82784779361 }, { "content": " f_or_d\n\n })\n\n });\n\n Ok(stream)\n\n }\n\n\n\n fn to_pub_path(&self, f: &mut FileOrDirectory) {\n\n let sub_path: &PathBuf = f.as_ref();\n\n let new_sub_path: PathBuf = self.pub_path(sub_path);\n\n let sub_path: &mut PathBuf = f.as_mut();\n\n *sub_path = new_sub_path;\n\n }\n\n\n\n fn pub_path(&self, sub_path: &Path) -> PathBuf {\n\n let mut parent_components = self.path.components();\n\n let mut sub_components = sub_path.components().peekable();\n\n loop {\n\n match (parent_components.next(), sub_components.peek()) {\n\n (Some(x), Some(y)) if x == *y => {\n\n sub_components.next();\n", "file_path": "src/cluster/metadata.rs", "rank": 52, "score": 61177.041146679454 }, { "content": " },\n\n (Some(_), None) => {\n\n panic!(\"Parent path length exceeds child length\");\n\n },\n\n _ => {\n\n break;\n\n },\n\n }\n\n }\n\n if sub_components.peek().is_none() {\n\n PathBuf::from(\".\")\n\n } else {\n\n sub_components.collect()\n\n }\n\n }\n\n\n\n fn sub_path(&self, path: impl AsRef<Path>) -> PathBuf {\n\n let mut new_path = self.path.clone();\n\n new_path.extend(\n\n path.as_ref()\n", "file_path": "src/cluster/metadata.rs", "rank": 53, "score": 61174.167596115665 }, { "content": "pub struct ClusterProfiles {\n\n default: ClusterProfile,\n\n #[serde(flatten)]\n\n custom: BTreeMap<String, ClusterProfile>,\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ClusterProfiles {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::de::Deserializer<'de>,\n\n {\n\n deserializer.deserialize_map(hollow::HollowClusterProfilesVisitor)\n\n }\n\n}\n\n\n\nimpl ClusterProfiles {\n\n pub fn get_default(&self) -> &ClusterProfile {\n\n &self.default\n\n }\n\n\n", "file_path": "src/cluster/profile.rs", "rank": 54, "score": 61083.26892773972 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct ZoneRule {\n\n #[serde(default)]\n\n pub minimum: i8,\n\n pub maximum: Option<i8>,\n\n #[serde(default)]\n\n pub ideal: i8,\n\n}\n\n\n\nmod hollow {\n\n use std::fmt::{\n\n self,\n\n Formatter,\n\n };\n\n\n\n use serde::de::{\n\n Error,\n", "file_path": "src/cluster/profile.rs", "rank": 55, "score": 61081.18302474165 }, { "content": "use std::{\n\n collections::{\n\n BTreeMap,\n\n HashMap,\n\n },\n\n mem::swap,\n\n};\n\n\n\nuse serde::{\n\n Deserialize,\n\n Serialize,\n\n};\n\n\n\nuse crate::cluster::sized_int::{\n\n ChunkSize,\n\n DataChunkCount,\n\n ParityChunkCount,\n\n};\n\n\n\n#[derive(Clone, Serialize)]\n", "file_path": "src/cluster/profile.rs", "rank": 56, "score": 61080.38677979592 }, { "content": " Ok(ClusterProfiles { default, custom })\n\n }\n\n }\n\n\n\n #[derive(Deserialize)]\n\n #[serde(rename = \"ClusterProfile\")]\n\n struct HollowClusterProfile {\n\n pub chunk_size: Option<ChunkSize>,\n\n #[serde(alias = \"data\")]\n\n pub data_chunks: Option<DataChunkCount>,\n\n #[serde(alias = \"parity\")]\n\n pub parity_chunks: Option<ParityChunkCount>,\n\n #[serde(default)]\n\n #[serde(alias = \"zone\")]\n\n #[serde(alias = \"zones\")]\n\n #[serde(alias = \"rules\")]\n\n pub zone_rules: HashMap<String, Option<ZoneRule>>,\n\n }\n\n\n\n impl HollowClusterProfile {\n", "file_path": "src/cluster/profile.rs", "rank": 57, "score": 61078.04459678001 }, { "content": " MapAccess,\n\n Visitor,\n\n };\n\n\n\n use super::*;\n\n\n\n pub(super) struct HollowClusterProfilesVisitor;\n\n\n\n impl<'de> Visitor<'de> for HollowClusterProfilesVisitor {\n\n type Value = ClusterProfiles;\n\n\n\n fn expecting(&self, f: &mut Formatter) -> fmt::Result {\n\n write!(f, \"Needs default\")\n\n }\n\n\n\n fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>\n\n where\n\n A: MapAccess<'de>,\n\n {\n\n let mut default: Option<ClusterProfile> = None;\n", "file_path": "src/cluster/profile.rs", "rank": 58, "score": 61075.41358373715 }, { "content": " Some(name) => self.custom.insert(name, profile),\n\n }\n\n }\n\n\n\n fn filter_profile_name<T>(name: Option<T>) -> Option<T>\n\n where\n\n T: AsRef<str>,\n\n {\n\n match name {\n\n None => None,\n\n Some(name) if name.as_ref().eq_ignore_ascii_case(\"default\") => None,\n\n Some(name) => Some(name),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct ClusterProfile {\n\n #[serde(default)]\n\n pub chunk_size: ChunkSize,\n", "file_path": "src/cluster/profile.rs", "rank": 59, "score": 61074.84595539272 }, { "content": " #[serde(alias = \"data\")]\n\n pub data_chunks: DataChunkCount,\n\n #[serde(alias = \"parity\")]\n\n pub parity_chunks: ParityChunkCount,\n\n #[serde(default)]\n\n #[serde(alias = \"zone\")]\n\n #[serde(alias = \"zones\")]\n\n #[serde(alias = \"rules\")]\n\n pub zone_rules: ZoneRules,\n\n}\n\n\n\nimpl ClusterProfile {\n\n pub fn get_chunk_size(&self) -> usize {\n\n self.chunk_size.into()\n\n }\n\n\n\n pub fn get_data_chunks(&self) -> usize {\n\n self.data_chunks.into()\n\n }\n\n\n", "file_path": "src/cluster/profile.rs", "rank": 60, "score": 61073.42411983501 }, { "content": " pub fn get<'a>(&self, profile: impl Into<Option<&'a str>>) -> Option<&'_ ClusterProfile> {\n\n let profile = profile.into();\n\n match Self::filter_profile_name(profile) {\n\n None => Some(&self.default),\n\n Some(profile) => self.custom.get(profile),\n\n }\n\n }\n\n\n\n pub fn insert(\n\n &mut self,\n\n name: impl Into<Option<String>>,\n\n profile: ClusterProfile,\n\n ) -> Option<ClusterProfile> {\n\n let name = name.into();\n\n match Self::filter_profile_name(name) {\n\n None => {\n\n let mut profile = profile;\n\n swap(&mut self.default, &mut profile);\n\n Some(profile)\n\n },\n", "file_path": "src/cluster/profile.rs", "rank": 61, "score": 61072.852065933796 }, { "content": " let mut custom: HashMap<String, HollowClusterProfile> = HashMap::new();\n\n while let Some(key) = map.next_key::<String>()? {\n\n if key.eq_ignore_ascii_case(\"default\") {\n\n if default.is_none() {\n\n default = Some(map.next_value()?);\n\n } else {\n\n return Err(A::Error::duplicate_field(\"default\"));\n\n }\n\n } else {\n\n use std::collections::hash_map::Entry;\n\n match custom.entry(key) {\n\n Entry::Vacant(entry) => {\n\n entry.insert(map.next_value()?);\n\n },\n\n Entry::Occupied(entry) => {\n\n return Err(A::Error::custom(format!(\n\n \"duplicate field `{}`\",\n\n entry.key(),\n\n )));\n\n },\n", "file_path": "src/cluster/profile.rs", "rank": 62, "score": 61069.25149877643 }, { "content": " pub fn get_parity_chunks(&self) -> usize {\n\n self.parity_chunks.into()\n\n }\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize)]\n\npub struct ZoneRules(pub BTreeMap<String, ZoneRule>);\n\nimpl AsRef<BTreeMap<String, ZoneRule>> for ZoneRules {\n\n fn as_ref(&self) -> &BTreeMap<String, ZoneRule> {\n\n &self.0\n\n }\n\n}\n\nimpl AsMut<BTreeMap<String, ZoneRule>> for ZoneRules {\n\n fn as_mut(&mut self) -> &mut BTreeMap<String, ZoneRule> {\n\n &mut self.0\n\n }\n\n}\n\nimpl Default for ZoneRules {\n\n fn default() -> Self {\n\n ZoneRules(BTreeMap::new())\n", "file_path": "src/cluster/profile.rs", "rank": 63, "score": 61068.5048158856 }, { "content": " fn merge_with_default(self, mut def: ClusterProfile) -> ClusterProfile {\n\n let HollowClusterProfile {\n\n chunk_size,\n\n data_chunks,\n\n parity_chunks,\n\n zone_rules,\n\n } = self;\n\n if let Some(chunk_size) = chunk_size {\n\n def.chunk_size = chunk_size;\n\n }\n\n if let Some(data_chunks) = data_chunks {\n\n def.data_chunks = data_chunks;\n\n }\n\n if let Some(parity_chunks) = parity_chunks {\n\n def.parity_chunks = parity_chunks;\n\n }\n\n for (zone, rule) in zone_rules {\n\n match rule {\n\n Some(rule) => {\n\n def.zone_rules.0.insert(zone, rule);\n", "file_path": "src/cluster/profile.rs", "rank": 64, "score": 61065.55349831309 }, { "content": " }\n\n }\n\n }\n\n\n\n let default = match default {\n\n Some(default) => default,\n\n None => {\n\n return Err(A::Error::missing_field(\"default\"));\n\n },\n\n };\n\n\n\n let custom = custom\n\n .into_iter()\n\n .map(|(name, hollow_profile)| {\n\n let default = default.clone();\n\n let profile = hollow_profile.merge_with_default(default);\n\n (name, profile)\n\n })\n\n .collect();\n\n\n", "file_path": "src/cluster/profile.rs", "rank": 65, "score": 61065.13693109546 }, { "content": " },\n\n None => {\n\n def.zone_rules.0.remove(&zone);\n\n },\n\n }\n\n }\n\n def\n\n }\n\n }\n\n}\n", "file_path": "src/cluster/profile.rs", "rank": 66, "score": 61058.09972784112 }, { "content": "struct ResilverPartFullReport<'a>(&'a ResilverPartReport<'a>);\n\n\n\nimpl fmt::Display for ResilverPartFullReport<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let (part_integrity, write_error, chunks) = self.0.full_report();\n\n write!(f, \"part\\t{}\", part_integrity)?;\n\n if let Err(error) = write_error {\n\n write!(f, \"\\t{}\", error)?;\n\n }\n\n writeln!(f)?;\n\n for (chunk, chunk_integrity, locations, failed_locations) in chunks {\n\n writeln!(f, \"chunk\\t{}\\t{}\", chunk_integrity, chunk.hash)?;\n\n for (location, loc_integrity, error) in locations {\n\n if let Some(error) = error {\n\n writeln!(f, \"location\\t{}\\t{}\\t{}\", loc_integrity, location, error)?;\n\n } else {\n\n writeln!(f, \"location\\t{}\\t{}\", loc_integrity, location)?;\n\n }\n\n }\n\n for error in failed_locations {\n\n writeln!(f, \"error\\t{}\", error)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/file/file_part.rs", "rank": 67, "score": 60609.3820833369 }, { "content": "struct VerifyPartFullReport<'a>(&'a VerifyPartReport<'a>);\n\n\n\nimpl fmt::Display for VerifyPartFullReport<'_> {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let (part_integrity, chunks) = self.0.full_report();\n\n writeln!(f, \"part\\t{}\", part_integrity)?;\n\n for (chunk, chunk_integrity, locations) in chunks {\n\n writeln!(f, \"chunk\\t{}\\t{}\", chunk_integrity, chunk.hash)?;\n\n for (location, loc_integrity, error) in locations {\n\n if let Some(error) = error {\n\n writeln!(f, \"location\\t{}\\t{}\\t{}\", loc_integrity, location, error)?;\n\n } else {\n\n writeln!(f, \"location\\t{}\\t{}\", loc_integrity, location)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/file/file_part.rs", "rank": 68, "score": 60609.3820833369 }, { "content": "use std::fmt;\n\n\n\nuse tokio::{\n\n select,\n\n sync::{\n\n mpsc,\n\n oneshot,\n\n },\n\n time::{\n\n Duration,\n\n Instant,\n\n },\n\n};\n\n\n\nuse crate::{\n\n error::LocationError,\n\n file::Location,\n\n};\n\n\n\npub struct ProfileReporter {\n", "file_path": "src/file/profiler.rs", "rank": 69, "score": 59092.7316198762 }, { "content": " result_log_copy_children!(self, location)\n\n }\n\n\n\n fn start_time(&self) -> &Instant {\n\n result_log_copy_children!(self, start_time)\n\n }\n\n\n\n fn end_time(&self) -> &Instant {\n\n result_log_copy_children!(self, end_time)\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Profiler(mpsc::UnboundedSender<ResultLog>);\n\n\n\nimpl Profiler {\n\n pub(super) fn log_read(\n\n &self,\n\n result: &Result<Vec<u8>, LocationError>,\n\n location: Location,\n", "file_path": "src/file/profiler.rs", "rank": 70, "score": 59080.597732068825 }, { "content": " report: oneshot::Receiver<ProfileReport>,\n\n quit: oneshot::Sender<()>,\n\n}\n\n\n\nimpl ProfileReporter {\n\n pub async fn profile(self) -> ProfileReport {\n\n let ProfileReporter { report, quit } = self;\n\n drop(quit);\n\n report.await.unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/file/profiler.rs", "rank": 71, "score": 59078.92561129357 }, { "content": "\n\npub struct ProfileReport(Vec<ResultLog>);\n\n\n\nimpl fmt::Display for ProfileReport {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"ReadAvg<{:?}ms> WriteAvg<{:?}ms> Total<{:?}ms> Total<{}B>\",\n\n self.average_read_duration().map(|d| d.as_millis()),\n\n self.average_write_duration().map(|d| d.as_millis()),\n\n self.total_time().map(|d| d.as_millis()),\n\n self.total_bytes(),\n\n )\n\n }\n\n}\n\n\n\nimpl ProfileReport {\n\n pub fn average_read_duration(&self) -> Option<Duration> {\n\n let writes = self.reads();\n\n let durations = Self::duration(writes);\n", "file_path": "src/file/profiler.rs", "rank": 72, "score": 59075.058657390255 }, { "content": " self.result.as_ref().ok().copied()\n\n }\n\n\n\n fn location(&self) -> &Location {\n\n &self.location\n\n }\n\n\n\n fn start_time(&self) -> &Instant {\n\n &self.start_time\n\n }\n\n\n\n fn end_time(&self) -> &Instant {\n\n &self.end_time\n\n }\n\n}\n\n\n\nimpl GeneralResult for WriteResult {\n\n fn error(&self) -> Result<(), &str> {\n\n match &self.result {\n\n Ok(_) => Ok(()),\n", "file_path": "src/file/profiler.rs", "rank": 73, "score": 59074.923462932275 }, { "content": " start_time: Instant,\n\n ) {\n\n let result = match result {\n\n Ok(bytes) => Ok(bytes.len()),\n\n Err(err) => Err(format!(\"{}\", err)),\n\n };\n\n let _ = self.0.send(\n\n ReadResult {\n\n result,\n\n location,\n\n start_time,\n\n end_time: Instant::now(),\n\n }\n\n .into(),\n\n );\n\n }\n\n\n\n pub(super) fn log_write(\n\n &self,\n\n result: &Result<(), LocationError>,\n", "file_path": "src/file/profiler.rs", "rank": 74, "score": 59073.972989033646 }, { "content": "\n\nmacro_rules! result_log_copy_children {\n\n ($self:ident, $func:ident) => {\n\n match $self {\n\n ResultLog::Read(res) => <ReadResult as GeneralResult>::$func(res),\n\n ResultLog::Write(res) => <WriteResult as GeneralResult>::$func(res),\n\n }\n\n };\n\n}\n\n\n\nimpl GeneralResult for ResultLog {\n\n fn error(&self) -> Result<(), &str> {\n\n result_log_copy_children!(self, error)\n\n }\n\n\n\n fn length(&self) -> Option<usize> {\n\n result_log_copy_children!(self, length)\n\n }\n\n\n\n fn location(&self) -> &Location {\n", "file_path": "src/file/profiler.rs", "rank": 75, "score": 59073.74927050132 }, { "content": " None\n\n }\n\n }\n\n\n\n fn duration<'a>(\n\n iter: impl Iterator<Item = &'a (impl GeneralResult + 'static)> + 'a,\n\n ) -> impl Iterator<Item = Duration> + 'a {\n\n iter.map(move |res| res.duration())\n\n }\n\n\n\n fn success<'a, T>(iter: impl Iterator<Item = &'a T> + 'a) -> impl Iterator<Item = &'a T> + 'a\n\n where\n\n T: GeneralResult + 'static,\n\n {\n\n iter.filter(|res| res.error().is_ok())\n\n }\n\n\n\n fn writes(&self) -> impl Iterator<Item = &WriteResult> {\n\n self.0.iter().filter_map(|res| match res {\n\n ResultLog::Write(res) => Some(res),\n", "file_path": "src/file/profiler.rs", "rank": 76, "score": 59071.674923350474 }, { "content": " } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn total_bytes(&self) -> usize {\n\n Self::success(self.0.iter())\n\n .map(|res| res.length().unwrap())\n\n .sum()\n\n }\n\n\n\n fn average_duration(mut durations: impl Iterator<Item = Duration>) -> Option<Duration> {\n\n if let Some(mut duration) = durations.next() {\n\n let mut count: u32 = 1;\n\n for d in durations {\n\n duration += d;\n\n count += 1;\n\n }\n\n Some(duration / count)\n\n } else {\n", "file_path": "src/file/profiler.rs", "rank": 77, "score": 59070.61183607149 }, { "content": " },\n\n }\n\n },\n\n }\n\n }\n\n });\n\n let reporter = ProfileReporter {\n\n report: profile_rx,\n\n quit: drop_tx,\n\n };\n\n let profiler = Profiler(log_tx);\n\n (profiler, reporter)\n\n}\n\n\n", "file_path": "src/file/profiler.rs", "rank": 78, "score": 59070.57099503475 }, { "content": " Err(err) => Err(&err),\n\n }\n\n }\n\n\n\n fn length(&self) -> Option<usize> {\n\n Some(self.length)\n\n }\n\n\n\n fn location(&self) -> &Location {\n\n &self.location\n\n }\n\n\n\n fn start_time(&self) -> &Instant {\n\n &self.start_time\n\n }\n\n\n\n fn end_time(&self) -> &Instant {\n\n &self.end_time\n\n }\n\n}\n", "file_path": "src/file/profiler.rs", "rank": 79, "score": 59069.732561287165 }, { "content": " location: Location,\n\n length: usize,\n\n start_time: Instant,\n\n ) {\n\n let result = match result {\n\n Ok(_) => Ok(()),\n\n Err(err) => Err(format!(\"{}\", err)),\n\n };\n\n let _ = self.0.send(\n\n WriteResult {\n\n result,\n\n location,\n\n length,\n\n start_time,\n\n end_time: Instant::now(),\n\n }\n\n .into(),\n\n );\n\n }\n\n}\n", "file_path": "src/file/profiler.rs", "rank": 80, "score": 59069.530651040186 }, { "content": " Self::average_duration(durations)\n\n }\n\n\n\n pub fn average_write_duration(&self) -> Option<Duration> {\n\n let writes = self.writes();\n\n let durations = Self::duration(writes);\n\n Self::average_duration(durations)\n\n }\n\n\n\n pub fn start_time(&self) -> Option<Instant> {\n\n self.0.first().map(|res| *res.start_time())\n\n }\n\n\n\n pub fn end_time(&self) -> Option<Instant> {\n\n self.0.last().map(|res| *res.end_time())\n\n }\n\n\n\n pub fn total_time(&self) -> Option<Duration> {\n\n if let (Some(start), Some(end)) = (self.start_time(), self.end_time()) {\n\n Some(end.duration_since(start))\n", "file_path": "src/file/profiler.rs", "rank": 81, "score": 59069.45780463255 }, { "content": " _ => None,\n\n })\n\n }\n\n\n\n fn reads(&self) -> impl Iterator<Item = &ReadResult> {\n\n self.0.iter().filter_map(|res| match res {\n\n ResultLog::Read(res) => Some(res),\n\n _ => None,\n\n })\n\n }\n\n}\n", "file_path": "src/file/profiler.rs", "rank": 82, "score": 59068.6477666146 }, { "content": "use reqwest::{\n\n header,\n\n Body,\n\n StatusCode,\n\n};\n\nuse serde::{\n\n Deserialize,\n\n Serialize,\n\n};\n\nuse tokio::{\n\n fs::{\n\n self,\n\n File,\n\n },\n\n io::{\n\n self,\n\n AsyncRead,\n\n AsyncReadExt,\n\n AsyncSeekExt,\n\n AsyncWriteExt,\n", "file_path": "src/file/location.rs", "rank": 83, "score": 58654.56683730334 }, { "content": " },\n\n sync::mpsc,\n\n time::Instant,\n\n};\n\nuse tokio_util::io::StreamReader;\n\nuse url::Url;\n\n\n\nuse crate::{\n\n error::{\n\n LocationError,\n\n LocationParseError,\n\n ShardError,\n\n },\n\n file::{\n\n hash::AnyHash,\n\n profiler::Profiler,\n\n ShardWriter,\n\n },\n\n};\n\n\n", "file_path": "src/file/location.rs", "rank": 84, "score": 58652.07493914658 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\n#[serde(untagged)]\n\n#[serde(try_from = \"String\")]\n\n#[serde(into = \"String\")]\n\npub enum Location {\n\n Http { url: HttpUrl, range: Range },\n\n Local { path: PathBuf, range: Range },\n\n}\n\n\n\nimpl Location {\n\n pub async fn read(&self) -> Result<Vec<u8>, LocationError> {\n\n self.read_with_context(&Self::default_context()).await\n\n }\n\n\n\n pub async fn write<T>(&self, bytes: T) -> Result<(), LocationError>\n\n where\n\n T: AsRef<[u8]> + Into<Vec<u8>>,\n\n {\n\n self.write_with_context(&Self::default_context(), bytes)\n\n .await\n", "file_path": "src/file/location.rs", "rank": 85, "score": 58649.633032459926 }, { "content": " self.reader_with_context(cx)\n\n .await?\n\n .read_to_end(&mut out)\n\n .await?;\n\n Ok(out)\n\n }\n\n .await;\n\n\n\n if let Some((profiler, op_start)) = profiler_info {\n\n profiler.log_read(&result, self.clone(), op_start);\n\n }\n\n result\n\n }\n\n\n\n pub async fn reader_with_context(\n\n &self,\n\n cx: &LocationContext,\n\n ) -> Result<(impl AsyncRead + Send + Unpin), LocationError> {\n\n // TODO: Profiler\n\n use Location::*;\n", "file_path": "src/file/location.rs", "rank": 86, "score": 58648.709213578826 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl AsMut<Range> for Location {\n\n fn as_mut(&mut self) -> &mut Range {\n\n use Location::*;\n\n match self {\n\n Http { range, .. } | Local { range, .. } => range,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct Range {\n\n #[serde(default)]\n\n pub start: u64,\n\n pub length: Option<u64>,\n\n pub extend_zeros: bool,\n\n}\n", "file_path": "src/file/location.rs", "rank": 87, "score": 58647.27997516331 }, { "content": " pub async fn file_exists(&self, cx: &LocationContext) -> Result<bool, LocationError> {\n\n let LocationContext { http_client, .. } = cx;\n\n use Location::*;\n\n match self {\n\n Http { url, .. } => {\n\n let url: Url = url.clone().into();\n\n let resp = http_client.head(url).send().await?;\n\n Ok(resp.status().is_success())\n\n },\n\n Local { path, .. } => match fs::metadata(path).await {\n\n Ok(_) => Ok(true),\n\n Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(false),\n\n Err(err) => Err(err.into()),\n\n },\n\n }\n\n }\n\n\n\n pub async fn file_len(&self, cx: &LocationContext) -> Result<u64, LocationError> {\n\n let LocationContext { http_client, .. } = cx;\n\n use Location::*;\n", "file_path": "src/file/location.rs", "rank": 88, "score": 58646.93219316439 }, { "content": " }\n\n\n\n pub async fn write_subfile<T>(&self, name: &str, bytes: T) -> Result<Location, ShardError>\n\n where\n\n T: AsRef<[u8]> + Into<Vec<u8>>,\n\n {\n\n self.write_subfile_with_context(&Self::default_context(), name, bytes)\n\n .await\n\n }\n\n\n\n pub async fn delete(&self) -> Result<(), LocationError> {\n\n self.delete_with_context(&Self::default_context()).await\n\n }\n\n\n\n pub async fn read_with_context(&self, cx: &LocationContext) -> Result<Vec<u8>, LocationError> {\n\n let LocationContext { profiler, .. } = cx;\n\n let profiler_info = profiler.as_ref().map(|profiler| (profiler, Instant::now()));\n\n\n\n let result: Result<Vec<u8>, LocationError> = async move {\n\n let mut out = Vec::new();\n", "file_path": "src/file/location.rs", "rank": 89, "score": 58646.07632291607 }, { "content": " profiler.log_write(&result, self.clone(), length, op_start);\n\n }\n\n result\n\n }\n\n\n\n pub async fn write_from_reader_with_context(\n\n &self,\n\n cx: &LocationContext,\n\n reader: &mut (impl AsyncRead + Unpin),\n\n ) -> Result<u64, LocationError> {\n\n if self.range().is_specified() {\n\n return Err(LocationError::WriteToRange);\n\n }\n\n\n\n // TODO: Profiler\n\n match &cx.on_conflict {\n\n OnConflict::Overwrite => {},\n\n OnConflict::Ignore => {\n\n if self.file_exists(cx).await? {\n\n return Ok(0);\n", "file_path": "src/file/location.rs", "rank": 90, "score": 58645.53734750506 }, { "content": " range: Default::default(),\n\n }),\n\n _ => Err(LocationParseError::InvalidScheme),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\n#[serde(try_from = \"Url\")]\n\n#[serde(into = \"Url\")]\n\npub struct HttpUrl(Url);\n\n\n\nimpl fmt::Display for HttpUrl {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n <Url as fmt::Display>::fmt(&self.0, f)\n\n }\n\n}\n\n\n\nimpl AsRef<Url> for HttpUrl {\n\n fn as_ref(&self) -> &Url {\n", "file_path": "src/file/location.rs", "rank": 91, "score": 58645.37166737 }, { "content": " }),\n\n }\n\n }\n\n\n\n pub async fn delete_with_context(&self, cx: &LocationContext) -> Result<(), LocationError> {\n\n let LocationContext { http_client, .. } = cx;\n\n use Location::*;\n\n match self {\n\n Http { url, .. } => {\n\n let url: Url = url.clone().into();\n\n http_client.delete(url).send().await?;\n\n Ok(())\n\n },\n\n Local { path, .. } => {\n\n fs::remove_file(path).await?;\n\n Ok(())\n\n },\n\n }\n\n }\n\n\n", "file_path": "src/file/location.rs", "rank": 92, "score": 58644.279245752434 }, { "content": " };\n\n result\n\n }\n\n\n\n pub async fn write_with_context<T>(\n\n &self,\n\n cx: &LocationContext,\n\n bytes: T,\n\n ) -> Result<(), LocationError>\n\n where\n\n T: AsRef<[u8]> + Into<Vec<u8>>,\n\n {\n\n if self.range().is_specified() {\n\n return Err(LocationError::WriteToRange);\n\n }\n\n\n\n let LocationContext {\n\n http_client,\n\n profiler,\n\n ..\n", "file_path": "src/file/location.rs", "rank": 93, "score": 58643.67944754292 }, { "content": " }\n\n },\n\n };\n\n\n\n use Location::*;\n\n match self {\n\n Local { path, .. } => {\n\n let mut file = File::create(&path).await?;\n\n let bytes = io::copy(reader, &mut file).await?;\n\n file.flush().await?;\n\n Ok(bytes)\n\n },\n\n Http { url, .. } => {\n\n let url: Url = url.clone().into();\n\n let (tx, rx) = mpsc::channel::<io::Result<Vec<u8>>>(5);\n\n let cx = cx.clone();\n\n let response = tokio::spawn(async move {\n\n let LocationContext { http_client, .. } = cx;\n\n let s = stream::unfold(rx, |mut rx| async move {\n\n rx.recv().await.map(|result| (result, rx))\n", "file_path": "src/file/location.rs", "rank": 94, "score": 58642.89618633323 }, { "content": "\n\n pub fn conflict_overwrite(mut self) -> Self {\n\n self.on_conflict = Some(OnConflict::Overwrite);\n\n self\n\n }\n\n\n\n pub fn build(self) -> LocationContext {\n\n LocationContext {\n\n on_conflict: self.on_conflict.unwrap_or(OnConflict::Overwrite),\n\n http_client: self.http_client.unwrap_or_else(reqwest::Client::new),\n\n profiler: self.profiler,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Location {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use Location::*;\n\n match self {\n\n Http { url, range } if range.is_specified() => write!(f, \"{}{}\", range, url),\n", "file_path": "src/file/location.rs", "rank": 95, "score": 58642.541646670594 }, { "content": " },\n\n }\n\n }\n\n drop(tx);\n\n response.await.unwrap()?;\n\n Ok(total_bytes)\n\n },\n\n }\n\n }\n\n\n\n pub async fn write_subfile_with_context<T>(\n\n &self,\n\n cx: &LocationContext,\n\n name: &str,\n\n bytes: T,\n\n ) -> Result<Location, ShardError>\n\n where\n\n T: AsRef<[u8]> + Into<Vec<u8>>,\n\n {\n\n use Location::*;\n", "file_path": "src/file/location.rs", "rank": 96, "score": 58642.42207857684 }, { "content": " } = cx;\n\n let profiler_info = profiler.as_ref().map(|profiler| (profiler, Instant::now()));\n\n let length = bytes.as_ref().len();\n\n\n\n match &cx.on_conflict {\n\n OnConflict::Overwrite => {},\n\n OnConflict::Ignore => {\n\n if self.file_exists(cx).await? {\n\n let result = Ok(());\n\n if let Some((profiler, op_start)) = profiler_info {\n\n profiler.log_write(&result, self.clone(), length, op_start);\n\n }\n\n return result;\n\n }\n\n },\n\n };\n\n\n\n use Location::*;\n\n let result: Result<(), LocationError> = async move {\n\n match self {\n", "file_path": "src/file/location.rs", "rank": 97, "score": 58642.35695349615 }, { "content": "}\n\n\n\nimpl LocationContext {\n\n pub fn builder() -> LocationContextBuilder {\n\n Default::default()\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct LocationContextBuilder {\n\n on_conflict: Option<OnConflict>,\n\n http_client: Option<reqwest::Client>,\n\n profiler: Option<Profiler>,\n\n}\n\n\n\n#[derive(Clone)]\n", "file_path": "src/file/location.rs", "rank": 98, "score": 58641.9293020903 }, { "content": "use std::{\n\n convert::TryFrom,\n\n fmt,\n\n hash::Hash,\n\n io::Cursor,\n\n path::{\n\n Path,\n\n PathBuf,\n\n },\n\n pin::Pin,\n\n str::FromStr,\n\n string::ToString,\n\n};\n\n\n\nuse async_trait::async_trait;\n\nuse futures::{\n\n stream,\n\n stream::StreamExt,\n\n};\n\nuse lazy_static::lazy_static;\n", "file_path": "src/file/location.rs", "rank": 99, "score": 58641.368932403944 } ]
Rust
azul-layout/src/style.rs
dignifiedquire/azul
de204bf2770286866c4da5d049827e04dab22347
use crate::geometry::{Offsets, Size}; use crate::number::Number; use azul_css::PixelValue; #[derive(Copy, Clone, PartialEq, Debug)] pub enum AlignItems { FlexStart, FlexEnd, Center, Baseline, Stretch, } impl Default for AlignItems { fn default() -> AlignItems { AlignItems::Stretch } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum AlignSelf { Auto, FlexStart, FlexEnd, Center, Baseline, Stretch, } impl Default for AlignSelf { fn default() -> AlignSelf { AlignSelf::Auto } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum AlignContent { FlexStart, FlexEnd, Center, Stretch, SpaceBetween, SpaceAround, } impl Default for AlignContent { fn default() -> AlignContent { AlignContent::Stretch } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Direction { Inherit, LTR, RTL, } impl Default for Direction { fn default() -> Direction { Direction::Inherit } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Display { Flex, Inline, None, } impl Default for Display { fn default() -> Display { Display::Flex } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum FlexDirection { Row, Column, RowReverse, ColumnReverse, } impl Default for FlexDirection { fn default() -> FlexDirection { FlexDirection::Row } } impl FlexDirection { pub(crate) fn is_row(self) -> bool { self == FlexDirection::Row || self == FlexDirection::RowReverse } pub(crate) fn is_column(self) -> bool { self == FlexDirection::Column || self == FlexDirection::ColumnReverse } pub(crate) fn is_reverse(self) -> bool { self == FlexDirection::RowReverse || self == FlexDirection::ColumnReverse } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum JustifyContent { FlexStart, FlexEnd, Center, SpaceBetween, SpaceAround, SpaceEvenly, } impl Default for JustifyContent { fn default() -> JustifyContent { JustifyContent::FlexStart } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Overflow { Visible, Hidden, Scroll, } impl Default for Overflow { fn default() -> Overflow { Overflow::Visible } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum PositionType { Relative, Absolute, } impl Default for PositionType { fn default() -> PositionType { PositionType::Relative } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum FlexWrap { NoWrap, Wrap, WrapReverse, } impl Default for FlexWrap { fn default() -> FlexWrap { FlexWrap::NoWrap } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Dimension { Undefined, Auto, Pixels(f32), Percent(f32), } impl Default for Dimension { fn default() -> Dimension { Dimension::Undefined } } impl Dimension { pub(crate) fn resolve(self, parent_width: Number) -> Number { match self { Dimension::Pixels(pixels) => Number::Defined(pixels), Dimension::Percent(percent) => parent_width * (percent / 100.0), _ => Number::Undefined, } } pub(crate) fn is_defined(self) -> bool { match self { Dimension::Pixels(_) => true, Dimension::Percent(_) => true, _ => false, } } } impl Default for Offsets<Dimension> { fn default() -> Offsets<Dimension> { Offsets { right: Default::default(), left: Default::default(), top: Default::default(), bottom: Default::default(), } } } impl Default for Size<Dimension> { fn default() -> Size<Dimension> { Size { width: Dimension::Auto, height: Dimension::Auto, } } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum BoxSizing { ContentBox, BorderBox, } impl Default for BoxSizing { fn default() -> BoxSizing { BoxSizing::ContentBox } } #[derive(Copy, Clone, Debug)] pub struct Style { pub display: Display, pub box_sizing: BoxSizing, pub position_type: PositionType, pub direction: Direction, pub flex_direction: FlexDirection, pub flex_wrap: FlexWrap, pub overflow: Overflow, pub align_items: AlignItems, pub align_self: AlignSelf, pub align_content: AlignContent, pub justify_content: JustifyContent, pub position: Offsets<Dimension>, pub margin: Offsets<Dimension>, pub padding: Offsets<Dimension>, pub border: Offsets<Dimension>, pub flex_grow: f32, pub flex_shrink: f32, pub flex_basis: Dimension, pub size: Size<Dimension>, pub min_size: Size<Dimension>, pub max_size: Size<Dimension>, pub aspect_ratio: Number, pub font_size_px: PixelValue, pub letter_spacing: Option<PixelValue>, pub word_spacing: Option<PixelValue>, pub line_height: Option<f32>, pub tab_width: Option<f32>, } impl Default for Style { fn default() -> Style { Style { display: Default::default(), box_sizing: Default::default(), position_type: Default::default(), direction: Default::default(), flex_direction: Default::default(), flex_wrap: Default::default(), overflow: Default::default(), align_items: Default::default(), align_self: Default::default(), align_content: Default::default(), justify_content: Default::default(), position: Default::default(), margin: Default::default(), padding: Default::default(), border: Default::default(), flex_grow: 0.0, flex_shrink: 1.0, flex_basis: Dimension::Auto, size: Default::default(), min_size: Default::default(), max_size: Default::default(), aspect_ratio: Default::default(), font_size_px: PixelValue::const_px(10), letter_spacing: None, line_height: None, word_spacing: None, tab_width: None, } } } impl Style { pub(crate) fn min_main_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.min_size.width, FlexDirection::Column | FlexDirection::ColumnReverse => self.min_size.height, } } pub(crate) fn max_main_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.max_size.width, FlexDirection::Column | FlexDirection::ColumnReverse => self.max_size.height, } } pub(crate) fn main_margin_start(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.left, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.top, } } pub(crate) fn main_margin_end(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.right, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.bottom, } } pub(crate) fn cross_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.size.height, FlexDirection::Column | FlexDirection::ColumnReverse => self.size.width, } } pub(crate) fn min_cross_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.min_size.height, FlexDirection::Column | FlexDirection::ColumnReverse => self.min_size.width, } } pub(crate) fn max_cross_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.max_size.height, FlexDirection::Column | FlexDirection::ColumnReverse => self.max_size.width, } } pub(crate) fn cross_margin_start(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.top, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.left, } } pub(crate) fn cross_margin_end(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.bottom, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.right, } } pub(crate) fn align_self(&self, parent: &Style) -> AlignSelf { if self.align_self == AlignSelf::Auto { match parent.align_items { AlignItems::FlexStart => AlignSelf::FlexStart, AlignItems::FlexEnd => AlignSelf::FlexEnd, AlignItems::Center => AlignSelf::Center, AlignItems::Baseline => AlignSelf::Baseline, AlignItems::Stretch => AlignSelf::Stretch, } } else { self.align_self } } }
use crate::geometry::{Offsets, Size}; use crate::number::Number; use azul_css::PixelValue; #[derive(Copy, Clone, PartialEq, Debug)] pub enum AlignItems { FlexStart, FlexEnd, Center, Baseline, Stretch, } impl Default for AlignItems { fn default() -> AlignItems { AlignItems::Stretch } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum AlignSelf { Auto, FlexStart, FlexEnd, Center, Baseline, Stretch, } impl Default for AlignSelf { fn default() -> AlignSelf { AlignSelf::Auto } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum AlignContent { FlexStart, FlexEnd, Center, Stretch, SpaceBetween, SpaceAround, } impl Default for AlignContent { fn default() -> AlignContent { AlignContent::Stretch } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Direction { Inherit, LTR, RTL, } impl Default for Direction { fn default() -> Direction { Direction::Inherit } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Display { Flex, Inline, None, } impl Default for Display { fn default() -> Display { Display::Flex } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum FlexDirection { Row, Column, RowReverse, ColumnReverse, } impl Default for FlexDirection { fn default() -> FlexDirection { FlexDirection::Row } } impl FlexDirection { pub(crate) fn is_row(self) -> bool { self == FlexDirection::Row || self == FlexDirection::RowReverse } pub(crate) fn is_column(self) -> bool { self == FlexDirection::Column || self == FlexDirection::ColumnReverse } pub(crate) fn is_reverse(self) -> bool { self == FlexDirection::RowReverse || self == FlexDirection::ColumnReverse } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum JustifyContent { FlexStart, FlexEnd, Center, SpaceBetween, SpaceAround, SpaceEvenly, } impl Default for JustifyContent { fn default() -> JustifyContent { JustifyContent::FlexStart } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Overflow { Visible, Hidden, Scroll, } impl Default for Overflow { fn default() -> Overflow { Overflow::Visible } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum PositionType { Relative, Absolute, } impl Default for PositionType { fn default() -> PositionType { PositionType::Relative } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum FlexWrap { NoWrap, Wrap, WrapReverse, } impl Default for FlexWrap { fn default() -> FlexWrap { FlexWrap::NoWrap } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum Dimension { Undefined, Auto, Pixels(f32), Percent(f32), } impl Default for Dimension { fn default() -> Dimension { Dimension::Undefined } } impl Dimension { pub(crate) fn resolve(self, parent_width: Number) -> Number { match self { Dimension::Pixels(pixels) => Number::Defined(pixels), Dimension::Percent(percent) => parent_width * (percent / 100.0), _ => Number::Undefined, } } pub(crate) fn is_defined(self) -> bool { match self { Dimension::Pixels(_) => true, Dimension::Percent(_) => true, _ => false, } } } impl Default for Offsets<Dimension> { fn default() -> Offsets<Dimension> { Offsets { right: Default::default(), left: Default::default(), top: Default::default(), bottom: Default::default(), } } } impl Default for Size<Dimension> { fn default() -> Size<Dimension> { Size { width: Dimension::Auto, height: Dimension::Auto, } } } #[derive(Copy, Clone, PartialEq, Debug)] pub enum BoxSizing { ContentBox, BorderBox, } impl Default for BoxSizing { fn default() -> BoxSizing { BoxSizing::ContentBox } } #[derive(Copy, Clone, Debug)] pub struct Style { pub display: Display, pub box_sizing: BoxSizing, pub position_type: PositionType, pub direction: Direction, pub flex_direction: FlexDirection, pub flex_wrap: FlexWrap, pub overflow: Overflow, pub align_items: AlignItems, pub align_self: AlignSelf, pub align_content: AlignContent, pub justify_content: JustifyContent, pub position: Offsets<Dimension>, pub margin: Offsets<Dimension>, pub padding: Offsets<Dimension>, pub border: Offsets<Dimension>, pub flex_grow: f32, pub flex_shrink: f32, pub flex_basis: Dimension, pub size: Size<Dimension>, pub min_size: Size<Dimension>, pub max_size: Size<Dimension>, pub aspect_ratio: Number, pub font_size_px: PixelValue, pub letter_spacing: Option<PixelValue>, pub word_spacing: Option<PixelValue>, pub line_height: Option<f32>, pub tab_width: Option<f32>, } impl Default for Style { fn default() -> Style { Style { display: Default::default(), box_sizing: Default::default(), position_type: Default::default(), direction: Default::default(), flex_direction: Default::default(), flex_wrap: Default::default(), overflow: Default::default(), align_items: Default::default(), align_self: Default::default(), align_content: Default::default(), justify_content: Default::default(), position: Default::default(), margin: Default::default(), padding: Default::default(), border: Default::default(), flex_grow: 0.0, flex_shrink: 1.0, flex_basis: Dimension::Auto, size: Default::default(), min_size: Default::default(), max_size: Default::default(), aspect_ratio: Default::default(), font_size_px: PixelValue::const_px(10), letter_spacing: None, line_height: None, word_spacing: None, tab_width: None, } } } impl Style { pub(crate) fn min_main_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.min_size.width, FlexDirection::Column | FlexDirection::ColumnReverse => self.min_size.height, } } pub(crate) fn max_main_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.max_size.width, FlexDirection::Column | FlexDirection::ColumnReverse => self.max_size.height, } } pub(crate) fn main_margin_start(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.left, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.top, } } pub(crate) fn main_margin_end(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.right, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.bottom, } } pub(crate) fn cross_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.size.height, FlexDirection::Column | FlexDirection::ColumnReverse => self.size.width, } } pub(crate) fn min_cross_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.min_size.height, FlexDirection::Column | FlexDirection::ColumnReverse => self.min_size.width, } } pub(crate) fn max_cross_size(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.max_size.height, FlexDirection::Column | FlexDirection::ColumnReverse => self.max_size.width, } }
pub(crate) fn cross_margin_end(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.bottom, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.right, } } pub(crate) fn align_self(&self, parent: &Style) -> AlignSelf { if self.align_self == AlignSelf::Auto { match parent.align_items { AlignItems::FlexStart => AlignSelf::FlexStart, AlignItems::FlexEnd => AlignSelf::FlexEnd, AlignItems::Center => AlignSelf::Center, AlignItems::Baseline => AlignSelf::Baseline, AlignItems::Stretch => AlignSelf::Stretch, } } else { self.align_self } } }
pub(crate) fn cross_margin_start(&self, direction: FlexDirection) -> Dimension { match direction { FlexDirection::Row | FlexDirection::RowReverse => self.margin.top, FlexDirection::Column | FlexDirection::ColumnReverse => self.margin.left, } }
function_block-full_function
[ { "content": "/// For a given line number (**NOTE: 0-indexed!**), calculates the Y\n\n/// position of the bottom left corner\n\npub fn get_line_y_position(line_number: usize, font_size_px: f32, line_height_px: f32) -> f32 {\n\n ((font_size_px + line_height_px) * line_number as f32) + font_size_px\n\n}\n\n\n", "file_path": "azul/src/text_layout.rs", "rank": 0, "score": 335588.18824920757 }, { "content": "pub fn is_point_in_shape(point: (f32, f32), shape: &[(f32, f32)]) -> bool {\n\n if shape.len() < 3 {\n\n // Shape must at least have 3 points, i.e. be a triangle\n\n return false;\n\n }\n\n\n\n // We iterate over the shape in 2 points.\n\n //\n\n // If the mouse cursor (target point) is on the left side for all points,\n\n // then cursor is inside of the shape. If it appears on the right side for\n\n // only one point, we know that it isn't inside the target shape.\n\n // all() is lazy and will quit on the first result where the target is not\n\n // inside the shape.\n\n shape\n\n .iter()\n\n .zip(shape.iter().skip(1))\n\n .all(|(start, end)| !(side_of_point(point, *start, *end).is_sign_positive()))\n\n}\n\n\n\n/// Determine which side of a vector the point is on.\n\n///\n\n/// Depending on if the result of this function is positive or negative,\n\n/// the target point lies either right or left to the imaginary line from (start -> end)\n", "file_path": "azul-widgets/src/svg.rs", "rank": 1, "score": 267759.21521684295 }, { "content": "#[inline(always)]\n\npub fn wr_translate_border_style(input: CssBorderStyle) -> WrBorderStyle {\n\n match input {\n\n CssBorderStyle::None => WrBorderStyle::None,\n\n CssBorderStyle::Solid => WrBorderStyle::Solid,\n\n CssBorderStyle::Double => WrBorderStyle::Double,\n\n CssBorderStyle::Dotted => WrBorderStyle::Dotted,\n\n CssBorderStyle::Dashed => WrBorderStyle::Dashed,\n\n CssBorderStyle::Hidden => WrBorderStyle::Hidden,\n\n CssBorderStyle::Groove => WrBorderStyle::Groove,\n\n CssBorderStyle::Ridge => WrBorderStyle::Ridge,\n\n CssBorderStyle::Inset => WrBorderStyle::Inset,\n\n CssBorderStyle::Outset => WrBorderStyle::Outset,\n\n }\n\n}\n\n\n", "file_path": "azul/src/wr_translate.rs", "rank": 2, "score": 221837.54579215846 }, { "content": "/// Returns the (left-aligned!) bounding boxes of the indidividual text lines\n\npub fn word_positions_to_inline_text_layout(\n\n word_positions: &WordPositions,\n\n scaled_words: &ScaledWords,\n\n) -> InlineTextLayout {\n\n use azul_core::ui_solver::InlineTextLine;\n\n\n\n let font_size_px = word_positions.text_layout_options.font_size_px;\n\n let space_advance = scaled_words.space_advance_px;\n\n let line_height_px = space_advance\n\n * word_positions\n\n .text_layout_options\n\n .line_height\n\n .unwrap_or(DEFAULT_LINE_HEIGHT);\n\n let content_width = word_positions.content_size.width;\n\n\n\n let mut last_word_index = 0;\n\n\n\n InlineTextLayout {\n\n lines: word_positions\n\n .line_breaks\n", "file_path": "azul/src/text_layout.rs", "rank": 3, "score": 215954.3025302125 }, { "content": "pub fn match_dom_selectors<T>(\n\n ui_state: &UiState<T>,\n\n css: &Css,\n\n focused_node: &mut Option<(DomId, NodeId)>,\n\n pending_focus_target: &mut Option<FocusTarget>,\n\n hovered_nodes: &BTreeMap<NodeId, HitTestItem>,\n\n is_mouse_down: bool,\n\n) -> UiDescription<T> {\n\n use azul_css::CssDeclaration;\n\n\n\n let non_leaf_nodes = ui_state.dom.arena.node_layout.get_parents_sorted_by_depth();\n\n\n\n let mut html_tree = construct_html_cascade_tree(\n\n &ui_state.dom.arena.node_data,\n\n &ui_state.dom.arena.node_layout,\n\n &non_leaf_nodes,\n\n focused_node.as_ref().and_then(|(dom_id, node_id)| {\n\n if *dom_id == ui_state.dom_id {\n\n Some(*node_id)\n\n } else {\n", "file_path": "azul-core/src/style.rs", "rank": 4, "score": 215665.6499296578 }, { "content": "/// Performs an arithmetic operation. Returns None when trying to divide by zero.\n\nfn perform_operation(left_operand: f32, operation: &Event, right_operand: f32) -> Option<f32> {\n\n match operation {\n\n Event::Multiply => Some(left_operand * right_operand),\n\n Event::Subtract => Some(left_operand - right_operand),\n\n Event::Plus => Some(left_operand + right_operand),\n\n Event::Divide => {\n\n if right_operand == 0.0 {\n\n None\n\n } else {\n\n Some(left_operand / right_operand)\n\n }\n\n }\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "examples/calculator/calculator.rs", "rank": 5, "score": 212952.7109931202 }, { "content": "/// Matches a single group of items, panics on Children or DirectChildren selectors\n\n///\n\n/// The intent is to \"split\" the CSS path into groups by selectors, then store and cache\n\n/// whether the direct or any parent has matched the path correctly\n\npub fn selector_group_matches<'a, T>(\n\n selectors: &[&CssPathSelector],\n\n html_node: &HtmlCascadeInfo<'a, T>,\n\n) -> bool {\n\n use self::CssPathSelector::*;\n\n\n\n for selector in selectors {\n\n match selector {\n\n Global => {}\n\n Type(t) => {\n\n if html_node.node_data.get_node_type().get_path() != *t {\n\n return false;\n\n }\n\n }\n\n Class(c) => {\n\n if !html_node\n\n .node_data\n\n .get_classes()\n\n .iter()\n\n .any(|class| class.equals_str(c))\n", "file_path": "azul-core/src/style.rs", "rank": 6, "score": 211851.38685974124 }, { "content": "/// Returns if the style CSS path matches the DOM node (i.e. if the DOM node should be styled by that element)\n\npub fn matches_html_element<'a, T>(\n\n css_path: &CssPath,\n\n node_id: NodeId,\n\n node_hierarchy: &NodeHierarchy,\n\n html_node_tree: &NodeDataContainer<HtmlCascadeInfo<'a, T>>,\n\n) -> bool {\n\n use self::CssGroupSplitReason::*;\n\n\n\n if css_path.selectors.is_empty() {\n\n return false;\n\n }\n\n\n\n let mut current_node = Some(node_id);\n\n let mut direct_parent_has_to_match = false;\n\n let mut last_selector_matched = true;\n\n\n\n for (content_group, reason) in CssGroupIterator::new(&css_path.selectors) {\n\n let cur_node_id = match current_node {\n\n Some(c) => c,\n\n None => {\n", "file_path": "azul-core/src/style.rs", "rank": 7, "score": 211851.1581604411 }, { "content": "#[inline]\n\npub fn side_of_point(target: (f32, f32), start: (f32, f32), end: (f32, f32)) -> f32 {\n\n ((target.0 - start.0) * (end.1 - start.1)) - ((target.1 - start.1) * (end.0 - start.0))\n\n}\n\n\n\n/// Creates a text layout for a single string of text\n\n#[derive(Debug, Clone)]\n\npub struct SvgTextLayout {\n\n /// The words, broken up by whitespace\n\n pub words: Words,\n\n /// Words, scaled by a certain font size (with font metrics)\n\n pub scaled_words: ScaledWords,\n\n /// Layout of the positions, word-by-word\n\n pub word_positions: WordPositions,\n\n /// Positioned and horizontally aligned glyphs\n\n pub layouted_glyphs: LayoutedGlyphs,\n\n /// At what glyphs does the line actually break (necessary for aligning content)\n\n pub inline_text_layout: InlineTextLayout,\n\n}\n\n\n\n/// Since the SvgText is scaled on the GPU, the font size doesn't matter here\n", "file_path": "azul-widgets/src/svg.rs", "rank": 8, "score": 208620.7559394061 }, { "content": "/// parse the border-radius like \"5px 10px\" or \"5px 10px 6px 10px\"\n\npub fn parse_style_border_radius<'a>(\n\n input: &'a str,\n\n) -> Result<StyleBorderRadius, CssStyleBorderRadiusParseError<'a>> {\n\n let mut components = input.split_whitespace();\n\n let len = components.clone().count();\n\n\n\n match len {\n\n 1 => {\n\n // One value - border-radius: 15px;\n\n // (the value applies to all four corners, which are rounded equally:\n\n\n\n let uniform_radius = parse_pixel_value(components.next().unwrap())?;\n\n Ok(StyleBorderRadius::uniform(uniform_radius))\n\n }\n\n 2 => {\n\n // Two values - border-radius: 15px 50px;\n\n // (first value applies to top-left and bottom-right corners,\n\n // and the second value applies to top-right and bottom-left corners):\n\n\n\n let top_left_bottom_right = parse_pixel_value(components.next().unwrap())?;\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 9, "score": 207240.20352069376 }, { "content": "pub fn parse_style_background_position<'a>(\n\n input: &'a str,\n\n) -> Result<StyleBackgroundPosition, CssBackgroundPositionParseError<'a>> {\n\n use self::CssBackgroundPositionParseError::*;\n\n\n\n let input = input.trim();\n\n let mut whitespace_iter = input.split_whitespace();\n\n\n\n let first = whitespace_iter.next().ok_or(NoPosition(input))?;\n\n let second = whitespace_iter.next();\n\n\n\n if whitespace_iter.next().is_some() {\n\n return Err(TooManyComponents(input));\n\n }\n\n\n\n let horizontal =\n\n parse_background_position_horizontal(first).map_err(|e| FirstComponentWrong(e))?;\n\n\n\n let vertical = match second {\n\n Some(second) => {\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 10, "score": 207224.52298164504 }, { "content": "/// Maps an index number to a value, necessary for creating the column name:\n\n///\n\n/// ```no_run,ignore\n\n/// 0 -> A\n\n/// 25 -> Z\n\n/// 26 -> AA\n\n/// 27 -> AB\n\n/// ```\n\n///\n\n/// ... and so on. This implementation is very fast, takes ~50 to 100\n\n/// nanoseconds for 1 iteration due to almost pure-stack allocated data.\n\n/// For an explanation of the algorithm with comments, see:\n\n/// https://github.com/fschutt/street_index/blob/78b935a1303070947c0854b6d01f540ec298c9d5/src/gridconfig.rs#L155-L209\n\npub fn column_name_from_number(num: usize) -> String {\n\n const ALPHABET_LEN: usize = 26;\n\n // usize::MAX is \"GKGWBYLWRXTLPP\" with a length of 15 characters\n\n const MAX_LEN: usize = 15;\n\n\n\n #[inline(always)]\n\n fn u8_to_char(input: u8) -> u8 {\n\n 'A' as u8 + input\n\n }\n\n\n\n let mut result = [0; MAX_LEN + 1];\n\n let mut multiple_of_alphabet = num / ALPHABET_LEN;\n\n let mut character_count = 0;\n\n\n\n while multiple_of_alphabet != 0 && character_count < MAX_LEN {\n\n let remainder = (multiple_of_alphabet - 1) % ALPHABET_LEN;\n\n result[(MAX_LEN - 1) - character_count] = u8_to_char(remainder as u8);\n\n character_count += 1;\n\n multiple_of_alphabet = (multiple_of_alphabet - 1) / ALPHABET_LEN;\n\n }\n\n\n\n result[MAX_LEN] = u8_to_char((num % ALPHABET_LEN) as u8);\n\n let zeroed_characters = MAX_LEN.saturating_sub(character_count);\n\n let slice = &result[zeroed_characters..];\n\n unsafe { ::std::str::from_utf8_unchecked(slice) }.to_string()\n\n}\n\n\n", "file_path": "azul-widgets/src/table_view.rs", "rank": 11, "score": 196335.02102313284 }, { "content": "/// Parse a CSS border such as\n\n///\n\n/// \"5px solid red\"\n\npub fn parse_style_border<'a>(input: &'a str) -> Result<StyleBorderSide, CssBorderParseError<'a>> {\n\n use self::CssBorderParseError::*;\n\n\n\n let input = input.trim();\n\n\n\n // The first argument can either be a style or a pixel value\n\n\n\n let mut char_iter = input.char_indices();\n\n let first_arg_end =\n\n take_until_next_whitespace(&mut char_iter).ok_or(MissingThickness(input))?;\n\n let first_arg_str = &input[0..first_arg_end];\n\n\n\n advance_until_next_char(&mut char_iter);\n\n\n\n let second_argument_end = take_until_next_whitespace(&mut char_iter);\n\n let (border_width, border_width_str_end, border_style);\n\n\n\n match second_argument_end {\n\n None => {\n\n // First argument is the one and only argument, therefore has to be a style such as \"double\"\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 12, "score": 192278.38625921166 }, { "content": "pub fn parse_style_tab_width(input: &str) -> Result<StyleTabWidth, PercentageParseError> {\n\n parse_percentage_value(input).and_then(|e| Ok(StyleTabWidth(e)))\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 13, "score": 191420.99376425808 }, { "content": "pub fn parse_style_line_height(input: &str) -> Result<StyleLineHeight, PercentageParseError> {\n\n parse_percentage_value(input).and_then(|e| Ok(StyleLineHeight(e)))\n\n}\n\n\n\ntyped_pixel_value_parser!(parse_style_font_size, StyleFontSize);\n\n\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub enum CssStyleFontFamilyParseError<'a> {\n\n InvalidStyleFontFamily(&'a str),\n\n UnclosedQuotes(&'a str),\n\n}\n\n\n\nimpl_display! {CssStyleFontFamilyParseError<'a>, {\n\n InvalidStyleFontFamily(val) => format!(\"Invalid font-family: \\\"{}\\\"\", val),\n\n UnclosedQuotes(val) => format!(\"Unclosed quotes: \\\"{}\\\"\", val),\n\n}}\n\n\n\nimpl<'a> From<UnclosedQuotesError<'a>> for CssStyleFontFamilyParseError<'a> {\n\n fn from(err: UnclosedQuotesError<'a>) -> Self {\n\n CssStyleFontFamilyParseError::UnclosedQuotes(err.0)\n\n }\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 14, "score": 191417.58471787113 }, { "content": "pub fn word_item_is_return(item: &Word) -> bool {\n\n item.word_type == WordType::Return\n\n}\n\n\n", "file_path": "azul/src/text_layout.rs", "rank": 15, "score": 190439.48336508975 }, { "content": "pub fn transform_vertex_buffer(input: &mut [SvgVert], x: f32, y: f32) {\n\n for vert in input {\n\n vert.xy[0] += x;\n\n vert.xy[1] += y;\n\n }\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 16, "score": 189353.37319138282 }, { "content": "/// Positions the words on the screen (does not layout any glyph positions!), necessary for estimating\n\n/// the intrinsic width + height of the text content.\n\npub fn position_words(\n\n words: &Words,\n\n scaled_words: &ScaledWords,\n\n text_layout_options: &ResolvedTextLayoutOptions,\n\n) -> WordPositions {\n\n use self::WordType::*;\n\n use std::f32;\n\n\n\n let font_size_px = text_layout_options.font_size_px;\n\n let space_advance = scaled_words.space_advance_px;\n\n let word_spacing_px = space_advance\n\n * text_layout_options\n\n .word_spacing\n\n .unwrap_or(DEFAULT_WORD_SPACING);\n\n let line_height_px = space_advance\n\n * text_layout_options\n\n .line_height\n\n .unwrap_or(DEFAULT_LINE_HEIGHT);\n\n let tab_width_px = space_advance * text_layout_options.tab_width.unwrap_or(DEFAULT_TAB_WIDTH);\n\n let letter_spacing_px = text_layout_options\n", "file_path": "azul/src/text_layout.rs", "rank": 17, "score": 186925.8308750775 }, { "content": "/// sin and cos are the sinus and cosinus of the rotation\n\npub fn rotate_vertex_buffer(input: &mut [SvgVert], sin: f32, cos: f32) {\n\n for vert in input {\n\n let (x, y) = (vert.xy[0], vert.xy[1]);\n\n let new_x = (x * cos) - (y * sin);\n\n let new_y = (x * sin) + (y * cos);\n\n vert.xy = [new_x, new_y];\n\n }\n\n}\n\n\n\n#[cfg(feature = \"svg_parsing\")]\n\n#[derive(Debug)]\n\npub enum SvgParseError {\n\n /// Syntax error in the Svg\n\n FailedToParseSvg(SvgError),\n\n /// Io error reading the Svg\n\n IoError(IoError),\n\n}\n\n\n\n#[cfg(feature = \"svg_parsing\")]\n\nimpl From<SvgError> for SvgParseError {\n", "file_path": "azul-widgets/src/svg.rs", "rank": 18, "score": 184557.64327744395 }, { "content": "fn resolve_offsets(input: Offsets<f32>) -> ResolvedOffsets {\n\n ResolvedOffsets {\n\n top: input.top,\n\n left: input.left,\n\n bottom: input.bottom,\n\n right: input.right,\n\n }\n\n}\n\n\n", "file_path": "azul-layout/src/algo.rs", "rank": 19, "score": 182435.67678779177 }, { "content": "#[inline]\n\npub fn wr_translate_border_radius(\n\n border_radius: StyleBorderRadius,\n\n rect_size: LayoutSize,\n\n) -> WrBorderRadius {\n\n let StyleBorderRadius {\n\n top_left,\n\n top_right,\n\n bottom_left,\n\n bottom_right,\n\n } = border_radius;\n\n\n\n let w = rect_size.width;\n\n let h = rect_size.height;\n\n\n\n // The \"w / h\" is necessary to convert percentage-based values into pixels, for example \"border-radius: 50%;\"\n\n\n\n let top_left_px_h = top_left\n\n .and_then(|tl| tl.get_property_or_default())\n\n .unwrap_or_default()\n\n .0\n", "file_path": "azul/src/wr_translate.rs", "rank": 20, "score": 179877.42768792834 }, { "content": "// This exists because RendererOptions isn't Clone-able\n\nfn get_renderer_opts(native: bool, device_pixel_ratio: f32) -> RendererOptions {\n\n use webrender::ProgramCache;\n\n\n\n // pre-caching shaders means to compile all shaders on startup\n\n // this can take significant time and should be only used for testing the shaders\n\n const PRECACHE_SHADER_FLAGS: ShaderPrecacheFlags = ShaderPrecacheFlags::EMPTY;\n\n\n\n // NOTE: If the clear_color is None, this may lead to \"black screens\"\n\n // (because black is the default color) - so instead, white should be the default\n\n // However, if the clear color is specified, then it's hard creating transparent windows\n\n // (because of bugs in webrender / handling multi-window background colors).\n\n // Therefore the background color has to be set before render() is invoked.\n\n\n\n RendererOptions {\n\n resource_override_path: None,\n\n precache_flags: PRECACHE_SHADER_FLAGS,\n\n device_pixel_ratio,\n\n enable_subpixel_aa: true,\n\n enable_aa: true,\n\n cached_programs: Some(ProgramCache::new(None)),\n\n renderer_kind: if native {\n\n RendererKind::Native\n\n } else {\n\n RendererKind::OSMesa\n\n },\n\n ..RendererOptions::default()\n\n }\n\n}\n\n\n", "file_path": "azul/src/window.rs", "rank": 21, "score": 179043.8210876758 }, { "content": "/// Returns the native style for the OS\n\npub fn native() -> Css {\n\n azul_css_parser::new_from_str(NATIVE_CSS).unwrap()\n\n}\n", "file_path": "azul-native-style/src/lib.rs", "rank": 22, "score": 178679.38574738172 }, { "content": "/// Subtracts the padding from the bounds, returning the new bounds\n\n///\n\n/// Warning: The resulting rectangle may have negative width or height\n\nfn subtract_padding(bounds: &LayoutRect, padding: &ResolvedOffsets) -> LayoutRect {\n\n let mut new_bounds = *bounds;\n\n\n\n new_bounds.origin.x += padding.left;\n\n new_bounds.size.width -= padding.right + padding.left;\n\n new_bounds.origin.y += padding.top;\n\n new_bounds.size.height -= padding.top + padding.bottom;\n\n\n\n new_bounds\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub struct OverrideWarning {\n\n pub default: CssProperty,\n\n pub overridden_property: CssProperty,\n\n}\n\n\n", "file_path": "azul/src/display_list.rs", "rank": 23, "score": 173765.93012336953 }, { "content": "pub fn parse_direction_corner<'a>(\n\n input: &'a str,\n\n) -> Result<DirectionCorner, CssDirectionCornerParseError<'a>> {\n\n match input {\n\n \"right\" => Ok(DirectionCorner::Right),\n\n \"left\" => Ok(DirectionCorner::Left),\n\n \"top\" => Ok(DirectionCorner::Top),\n\n \"bottom\" => Ok(DirectionCorner::Bottom),\n\n _ => Err(CssDirectionCornerParseError::InvalidDirection(input)),\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub enum CssShapeParseError<'a> {\n\n ShapeErr(InvalidValueErr<'a>),\n\n}\n\n\n\nimpl_display! {CssShapeParseError<'a>, {\n\n ShapeErr(e) => format!(\"\\\"{}\\\"\", e.0),\n\n}}\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 24, "score": 172201.7158166644 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\nenum Number {\n\n Value(u8),\n\n Dot,\n\n}\n\n\n\nimpl OperandStack {\n\n /// Returns the displayable string, i.e for:\n\n /// `[3, 4, Dot, 5]` => `\"34.5\"`\n\n pub fn get_display(&self) -> String {\n\n let mut display_string = String::new();\n\n\n\n if self.negative_number {\n\n display_string.push('-');\n\n }\n\n\n\n if self.stack.is_empty() {\n\n display_string.push('0');\n\n } else {\n\n // If we get a dot at the end of the stack, i.e. \"35.\" - store it,\n\n // but don't display it\n", "file_path": "examples/calculator/calculator.rs", "rank": 25, "score": 170740.07824033065 }, { "content": "pub fn parse_pixel_value_no_percent<'a>(\n\n input: &'a str,\n\n) -> Result<PixelValueNoPercent, PixelParseError<'a>> {\n\n Ok(PixelValueNoPercent(parse_pixel_value_inner(\n\n input,\n\n &[\n\n (\"px\", SizeMetric::Px),\n\n (\"em\", SizeMetric::Em),\n\n (\"pt\", SizeMetric::Pt),\n\n ],\n\n )?))\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 26, "score": 169141.16274899134 }, { "content": "pub fn parse_layout_flex_grow<'a>(\n\n input: &'a str,\n\n) -> Result<LayoutFlexGrow, FlexGrowParseError<'a>> {\n\n match parse_float_value(input) {\n\n Ok(o) => Ok(LayoutFlexGrow(o)),\n\n Err(e) => Err(FlexGrowParseError::ParseFloat(e, input)),\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum FlexShrinkParseError<'a> {\n\n ParseFloat(ParseFloatError, &'a str),\n\n}\n\n\n\nimpl_display! {FlexShrinkParseError<'a>, {\n\n ParseFloat(e, orig_str) => format!(\"flex-shrink: Could not parse floating-point value: \\\"{}\\\" - Error: \\\"{}\\\"\", orig_str, e),\n\n}}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 27, "score": 169132.71165040223 }, { "content": "pub fn parse_layout_flex_shrink<'a>(\n\n input: &'a str,\n\n) -> Result<LayoutFlexShrink, FlexShrinkParseError<'a>> {\n\n match parse_float_value(input) {\n\n Ok(o) => Ok(LayoutFlexShrink(o)),\n\n Err(e) => Err(FlexShrinkParseError::ParseFloat(e, input)),\n\n }\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 28, "score": 169132.71165040223 }, { "content": "pub fn parse_background_position_vertical<'a>(\n\n input: &'a str,\n\n) -> Result<BackgroundPositionVertical, PixelParseError<'a>> {\n\n Ok(match input {\n\n \"top\" => BackgroundPositionVertical::Top,\n\n \"center\" => BackgroundPositionVertical::Center,\n\n \"bottom\" => BackgroundPositionVertical::Bottom,\n\n other => BackgroundPositionVertical::Exact(parse_pixel_value(other)?),\n\n })\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 29, "score": 169116.77023383437 }, { "content": "pub fn parse_background_position_horizontal<'a>(\n\n input: &'a str,\n\n) -> Result<BackgroundPositionHorizontal, PixelParseError<'a>> {\n\n Ok(match input {\n\n \"left\" => BackgroundPositionHorizontal::Left,\n\n \"center\" => BackgroundPositionHorizontal::Center,\n\n \"right\" => BackgroundPositionHorizontal::Right,\n\n other => BackgroundPositionHorizontal::Exact(parse_pixel_value(other)?),\n\n })\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 30, "score": 169116.77023383437 }, { "content": "/// Parses a `StyleFontFamily` declaration from a `&str`\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # extern crate azul_css;\n\n/// # extern crate azul_css_parser;\n\n/// # use azul_css_parser::parse_style_font_family;\n\n/// # use azul_css::{StyleFontFamily, FontId};\n\n/// let input = \"\\\"Helvetica\\\", 'Arial', Times New Roman\";\n\n/// let fonts = vec![\n\n/// FontId(\"Helvetica\".into()),\n\n/// FontId(\"Arial\".into()),\n\n/// FontId(\"Times New Roman\".into())\n\n/// ];\n\n///\n\n/// assert_eq!(parse_style_font_family(input), Ok(StyleFontFamily { fonts }));\n\n/// ```\n\npub fn parse_style_font_family<'a>(\n\n input: &'a str,\n\n) -> Result<StyleFontFamily, CssStyleFontFamilyParseError<'a>> {\n\n let multiple_fonts = input.split(',');\n\n let mut fonts = Vec::with_capacity(1);\n\n\n\n for font in multiple_fonts {\n\n let font = font.trim();\n\n let font = font.trim_matches('\\'');\n\n let font = font.trim_matches('\\\"');\n\n let font = font.trim();\n\n fonts.push(FontId(font.into()));\n\n }\n\n\n\n Ok(StyleFontFamily { fonts: fonts })\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd)]\n\npub enum ParenthesisParseError<'a> {\n\n UnclosedBraces,\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 31, "score": 168966.26679158345 }, { "content": "/// Parses a CSS box-shadow, such as \"5px 10px inset\"\n\npub fn parse_style_box_shadow<'a>(\n\n input: &'a str,\n\n) -> Result<BoxShadowPreDisplayItem, CssShadowParseError<'a>> {\n\n let mut input_iter = input.split_whitespace();\n\n let count = input_iter.clone().count();\n\n\n\n let mut box_shadow = BoxShadowPreDisplayItem {\n\n offset: [\n\n PixelValueNoPercent(PixelValue::const_px(0)),\n\n PixelValueNoPercent(PixelValue::const_px(0)),\n\n ],\n\n color: ColorU {\n\n r: 0,\n\n g: 0,\n\n b: 0,\n\n a: 255,\n\n },\n\n blur_radius: PixelValueNoPercent(PixelValue::const_px(0)),\n\n spread_radius: PixelValueNoPercent(PixelValue::const_px(0)),\n\n clip_mode: BoxShadowClipMode::Outset,\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 32, "score": 168956.14790847746 }, { "content": "pub fn parse_style_text_color<'a>(\n\n input: &'a str,\n\n) -> Result<StyleTextColor, CssColorParseError<'a>> {\n\n parse_css_color(input).and_then(|ok| Ok(StyleTextColor(ok)))\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 33, "score": 168956.14790847746 }, { "content": "// parses a background, such as \"linear-gradient(red, green)\"\n\npub fn parse_style_background_content<'a>(\n\n input: &'a str,\n\n) -> Result<StyleBackgroundContent, CssBackgroundParseError<'a>> {\n\n match parse_parentheses(\n\n input,\n\n &[\n\n \"linear-gradient\",\n\n \"repeating-linear-gradient\",\n\n \"radial-gradient\",\n\n \"repeating-radial-gradient\",\n\n \"image\",\n\n ],\n\n ) {\n\n Ok((background_type, brace_contents)) => {\n\n let gradient_type = match background_type {\n\n \"linear-gradient\" => GradientType::LinearGradient,\n\n \"repeating-linear-gradient\" => GradientType::RepeatingLinearGradient,\n\n \"radial-gradient\" => GradientType::RadialGradient,\n\n \"repeating-radial-gradient\" => GradientType::RepeatingRadialGradient,\n\n \"image\" => {\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 34, "score": 168956.14790847746 }, { "content": "#[inline]\n\npub fn wr_translate_border_side(input: CssBorderSide) -> WrBorderSide {\n\n WrBorderSide {\n\n color: wr_translate_color_u(input.color).into(),\n\n style: wr_translate_border_style(input.style),\n\n }\n\n}\n\n\n\n// NOTE: Reverse direction: Translate from webrender::LayoutRect to css::LayoutRect\n\n#[inline(always)]\n\npub const fn wr_translate_css_layout_rect(input: WrLayoutRect) -> CssLayoutRect {\n\n CssLayoutRect {\n\n origin: CssLayoutPoint {\n\n x: input.origin.x,\n\n y: input.origin.y,\n\n },\n\n size: CssLayoutSize {\n\n width: input.size.width,\n\n height: input.size.height,\n\n },\n\n }\n", "file_path": "azul/src/wr_translate.rs", "rank": 35, "score": 168510.05737532326 }, { "content": "pub fn construct_html_cascade_tree<'a, T>(\n\n input: &'a NodeDataContainer<NodeData<T>>,\n\n node_hierarchy: &NodeHierarchy,\n\n node_depths_sorted: &[(usize, NodeId)],\n\n focused_item: Option<NodeId>,\n\n hovered_items: &BTreeMap<NodeId, HitTestItem>,\n\n is_mouse_down: bool,\n\n) -> NodeDataContainer<HtmlCascadeInfo<'a, T>> {\n\n let mut nodes = (0..node_hierarchy.len())\n\n .map(|_| HtmlCascadeInfo {\n\n node_data: &input[NodeId::new(0)],\n\n index_in_parent: 0,\n\n is_last_child: false,\n\n is_hovered_over: false,\n\n is_active: false,\n\n is_focused: false,\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n for (_depth, parent_id) in node_depths_sorted {\n", "file_path": "azul-core/src/style.rs", "rank": 36, "score": 168195.00953293015 }, { "content": "/// Update the WindowStates focus node in case the previous\n\n/// frames callbacks set the focus to a specific node\n\n///\n\n/// Takes the `WindowState.pending_focus_target` and `WindowState.focused_node`\n\n/// and updates the `WindowState.focused_node` accordingly.\n\n/// Should be called before ``\n\npub fn update_focus_from_callbacks<'a, T: 'a>(\n\n self_dom_id: &DomId,\n\n pending_focus_target: &mut Option<FocusTarget>,\n\n focused_node: &mut Option<(DomId, NodeId)>,\n\n node_hierarchy: &NodeHierarchy,\n\n html_node_tree: &mut NodeDataContainer<HtmlCascadeInfo<'a, T>>,\n\n) -> Option<UpdateFocusWarning> {\n\n // `pending_focus_target` is `None` in most cases, since usually the callbacks\n\n // don't mess with the current focused item.\n\n let new_focus_target = pending_focus_target.clone()?;\n\n\n\n let mut warning = None;\n\n\n\n match new_focus_target {\n\n FocusTarget::Id((dom_id, node_id)) => {\n\n if dom_id == *self_dom_id && html_node_tree.len() < node_id.index() {\n\n *focused_node = Some((dom_id, node_id));\n\n } else {\n\n warning = Some(UpdateFocusWarning::FocusInvalidNodeId(node_id));\n\n }\n", "file_path": "azul-core/src/style.rs", "rank": 37, "score": 168096.9119458526 }, { "content": "/// Bezier formula for cubic curves (start, handle 1, handle 2, end).\n\n///\n\n/// ## Inputs\n\n///\n\n/// - `curve`: The 4 handles of the curve\n\n/// - `t`: The interpolation amount - usually between 0.0 and 1.0 if the point\n\n/// should be between the start and end\n\n///\n\n/// ## Returns\n\n///\n\n/// - `BezierControlPoint`: The calculated point which lies on the curve,\n\n/// according the the bezier formula\n\npub fn cubic_interpolate_bezier(curve: &[BezierControlPoint; 4], t: f32) -> BezierControlPoint {\n\n let one_minus = 1.0 - t;\n\n let one_minus_square = one_minus.powi(2);\n\n let one_minus_cubic = one_minus.powi(3);\n\n\n\n let t_pow2 = t.powi(2);\n\n let t_pow3 = t.powi(3);\n\n\n\n let x = one_minus_cubic * curve[0].x\n\n + 3.0 * one_minus_square * t * curve[1].x\n\n + 3.0 * one_minus * t_pow2 * curve[2].x\n\n + t_pow3 * curve[3].x;\n\n\n\n let y = one_minus_cubic * curve[0].y\n\n + 3.0 * one_minus_square * t * curve[1].y\n\n + 3.0 * one_minus * t_pow2 * curve[2].y\n\n + t_pow3 * curve[3].y;\n\n\n\n BezierControlPoint { x, y }\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 38, "score": 164690.46360985358 }, { "content": "pub fn quadratic_interpolate_bezier(curve: &[BezierControlPoint; 3], t: f32) -> BezierControlPoint {\n\n let one_minus = 1.0 - t;\n\n let one_minus_square = one_minus.powi(2);\n\n\n\n let t_pow2 = t.powi(2);\n\n\n\n // TODO: Why 3.0 and not 2.0?\n\n\n\n let x = one_minus_square * curve[0].x\n\n + 2.0 * one_minus * t * curve[1].x\n\n + 3.0 * t_pow2 * curve[2].x;\n\n\n\n let y = one_minus_square * curve[0].y\n\n + 2.0 * one_minus * t * curve[1].y\n\n + 3.0 * t_pow2 * curve[2].y;\n\n\n\n BezierControlPoint { x, y }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "azul-widgets/src/svg.rs", "rank": 39, "score": 164690.46360985358 }, { "content": "/// Calculates the normal vector at a certain point (perpendicular to the curve)\n\npub fn cubic_bezier_normal(curve: &[BezierControlPoint; 4], t: f32) -> BezierNormalVector {\n\n // 1. Calculate the derivative of the bezier curve\n\n //\n\n // This means, we go from 4 control points to 3 control points and redistribute\n\n // the weights of the control points according to the formula:\n\n //\n\n // w'0 = 3(w1-w0)\n\n // w'1 = 3(w2-w1)\n\n // w'2 = 3(w3-w2)\n\n\n\n let weight_1_x = 3.0 * (curve[1].x - curve[0].x);\n\n let weight_1_y = 3.0 * (curve[1].y - curve[0].y);\n\n\n\n let weight_2_x = 3.0 * (curve[2].x - curve[1].x);\n\n let weight_2_y = 3.0 * (curve[2].y - curve[1].y);\n\n\n\n let weight_3_x = 3.0 * (curve[3].x - curve[2].x);\n\n let weight_3_y = 3.0 * (curve[3].y - curve[2].y);\n\n\n\n // The first derivative of a cubic bezier curve is a quadratic bezier curve\n", "file_path": "azul-widgets/src/svg.rs", "rank": 40, "score": 164690.46360985358 }, { "content": "fn node_needs_to_clip_children(layout: &RectLayout) -> bool {\n\n !(layout.is_horizontal_overflow_visible() || layout.is_vertical_overflow_visible())\n\n}\n\n\n\n/// NOTE: This function assumes that the UiDescription has an initialized arena\n\n///\n\n/// This only looks at the user-facing styles of the `UiDescription`, not the actual\n\n/// layout. The layout is done only in the `into_display_list_builder` step.\n\npub(crate) fn display_list_from_ui_description<'a, T>(\n\n ui_description: &'a UiDescription<T>,\n\n ui_state: &UiState<T>,\n\n) -> DisplayList<'a, T> {\n\n let arena = &ui_description.ui_descr_arena;\n\n\n\n let mut override_warnings = Vec::new();\n\n\n\n let display_rect_arena = arena.node_data.transform(|_, node_id| {\n\n let style = &ui_description.styled_nodes[node_id];\n\n let tag = ui_state.node_ids_to_tag_ids.get(&node_id).map(|tag| *tag);\n\n let mut rect = DisplayRectangle::new(tag, style);\n", "file_path": "azul/src/display_list.rs", "rank": 41, "score": 164029.72595387482 }, { "content": "#[inline(always)]\n\npub fn wr_translate_layout_side_offsets(input: CssLayoutSideOffsets) -> WrLayoutSideOffsets {\n\n WrLayoutSideOffsets::new(\n\n input.top.get(),\n\n input.right.get(),\n\n input.bottom.get(),\n\n input.left.get(),\n\n )\n\n}\n\n\n\n#[inline(always)]\n\npub const fn wr_translate_color_u(input: CssColorU) -> WrColorU {\n\n WrColorU {\n\n r: input.r,\n\n g: input.g,\n\n b: input.b,\n\n a: input.a,\n\n }\n\n}\n\n\n\n#[inline(always)]\n\npub const fn wr_translate_color_f(input: CssColorF) -> WrColorF {\n\n WrColorF {\n\n r: input.r,\n\n g: input.g,\n\n b: input.b,\n\n a: input.a,\n\n }\n\n}\n\n\n", "file_path": "azul/src/wr_translate.rs", "rank": 42, "score": 162279.90072638364 }, { "content": "// Since there can be a small floating point error, round the item to the nearest pixel,\n\n// then compare the rects\n\nfn contains_rect_rounded(a: &LayoutRect, b: LayoutRect) -> bool {\n\n let a_x = a.origin.x.round() as isize;\n\n let a_y = a.origin.x.round() as isize;\n\n let a_width = a.size.width.round() as isize;\n\n let a_height = a.size.height.round() as isize;\n\n\n\n let b_x = b.origin.x.round() as isize;\n\n let b_y = b.origin.x.round() as isize;\n\n let b_width = b.size.width.round() as isize;\n\n let b_height = b.size.height.round() as isize;\n\n\n\n b_x >= a_x && b_y >= a_y && b_x + b_width <= a_x + a_width && b_y + b_height <= a_y + a_height\n\n}\n\n\n", "file_path": "azul/src/display_list.rs", "rank": 43, "score": 157577.53969950235 }, { "content": "#[test]\n\nfn test_overflow_parsing() {\n\n use crate::prelude::Overflow;\n\n\n\n let layout1 = RectLayout::default();\n\n\n\n // The default for overflowing is overflow: auto, which clips\n\n // children, so this should evaluate to true by default\n\n assert_eq!(node_needs_to_clip_children(&layout1), true);\n\n\n\n let layout2 = RectLayout {\n\n overflow_x: Some(CssPropertyValue::Exact(Overflow::Visible)),\n\n overflow_y: Some(CssPropertyValue::Exact(Overflow::Visible)),\n\n ..Default::default()\n\n };\n\n assert_eq!(node_needs_to_clip_children(&layout2), false);\n\n\n\n let layout3 = RectLayout {\n\n overflow_x: Some(CssPropertyValue::Exact(Overflow::Hidden)),\n\n overflow_y: Some(CssPropertyValue::Exact(Overflow::Hidden)),\n\n ..Default::default()\n\n };\n\n assert_eq!(node_needs_to_clip_children(&layout3), true);\n\n}\n", "file_path": "azul/src/display_list.rs", "rank": 44, "score": 155846.99541731732 }, { "content": "fn apply_style_property(style: &mut RectStyle, layout: &mut RectLayout, property: &CssProperty) {\n\n use azul_css::CssProperty::*;\n\n\n\n match property {\n\n Display(d) => layout.display = Some(*d),\n\n Float(f) => layout.float = Some(*f),\n\n BoxSizing(bs) => layout.box_sizing = Some(*bs),\n\n\n\n TextColor(c) => style.text_color = Some(*c),\n\n FontSize(fs) => style.font_size = Some(*fs),\n\n FontFamily(ff) => style.font_family = Some(ff.clone()),\n\n TextAlign(ta) => style.text_align = Some(*ta),\n\n\n\n LetterSpacing(ls) => style.letter_spacing = Some(*ls),\n\n LineHeight(lh) => style.line_height = Some(*lh),\n\n WordSpacing(ws) => style.word_spacing = Some(*ws),\n\n TabWidth(tw) => style.tab_width = Some(*tw),\n\n Cursor(c) => style.cursor = Some(*c),\n\n\n\n Width(w) => layout.width = Some(*w),\n", "file_path": "azul/src/display_list.rs", "rank": 45, "score": 155444.0535787503 }, { "content": "/// Parses an `direction` such as `\"50deg\"` or `\"to right bottom\"` (in the context of gradients)\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # extern crate azul_css;\n\n/// # extern crate azul_css_parser;\n\n/// # use azul_css_parser::parse_direction;\n\n/// # use azul_css::{Direction, FloatValue};\n\n/// use azul_css::DirectionCorner::*;\n\n///\n\n/// assert_eq!(parse_direction(\"to right bottom\"), Ok(Direction::FromTo(TopLeft, BottomRight)));\n\n/// assert_eq!(parse_direction(\"to right\"), Ok(Direction::FromTo(Left, Right)));\n\n/// assert_eq!(parse_direction(\"50deg\"), Ok(Direction::Angle(FloatValue::new(50.0))));\n\n/// ```\n\npub fn parse_direction<'a>(input: &'a str) -> Result<Direction, CssDirectionParseError<'a>> {\n\n use std::f32::consts::PI;\n\n\n\n let input_iter = input.split_whitespace();\n\n let count = input_iter.clone().count();\n\n let mut first_input_iter = input_iter.clone();\n\n // \"50deg\" | \"to\" | \"right\"\n\n let first_input = first_input_iter\n\n .next()\n\n .ok_or(CssDirectionParseError::Error(input))?;\n\n\n\n let deg = {\n\n if first_input.ends_with(\"grad\") {\n\n first_input.split(\"grad\").next().unwrap().parse::<f32>()? / 400.0 * 360.0\n\n } else if first_input.ends_with(\"rad\") {\n\n first_input.split(\"rad\").next().unwrap().parse::<f32>()? * 180.0 / PI\n\n } else if first_input.ends_with(\"deg\") || first_input.parse::<f32>().is_ok() {\n\n first_input.split(\"deg\").next().unwrap().parse::<f32>()?\n\n } else if let Ok(angle) = first_input.parse::<f32>() {\n\n angle\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 46, "score": 154588.99838912865 }, { "content": "pub fn render_table_view<T>(state: &mut TableViewState, rows: usize, columns: usize) -> Dom<T> {\n\n // div.__azul-native-table-container\n\n // |-> div.__azul-native-table-column (Column 0)\n\n // |-> div.__azul-native-table-top-left-rect .__azul-native-table-column-name\n\n // '-> div.__azul-native-table-row-numbers .__azul-native-table-row\n\n //\n\n // |-> div.__azul-native-table-column-container\n\n // |-> div.__azul-native-table-column (Column 1 ...)\n\n // |-> div.__azul-native-table-column-name\n\n // '-> div.__azul-native-table-row\n\n // '-> div.__azul-native-table-cell\n\n\n\n Dom::div()\n\n .with_class(\"__azul-native-table-container\")\n\n .with_child(\n\n Dom::div()\n\n .with_class(\"__azul-native-table-row-number-wrapper\")\n\n .with_child(\n\n // Empty rectangle at the top left of the table\n\n Dom::div()\n", "file_path": "azul-widgets/src/table_view.rs", "rank": 47, "score": 153590.69960723125 }, { "content": "fn sort_children_by_position<'a>(\n\n parent: NodeId,\n\n node_hierarchy: &NodeHierarchy,\n\n rectangles: &NodeDataContainer<DisplayRectangle<'a>>,\n\n) -> Vec<NodeId> {\n\n use azul_css::LayoutPosition::*;\n\n\n\n let mut not_absolute_children = parent\n\n .children(node_hierarchy)\n\n .filter(|id| {\n\n rectangles[*id]\n\n .layout\n\n .position\n\n .and_then(|p| p.get_property_or_default())\n\n .unwrap_or_default()\n\n != Absolute\n\n })\n\n .collect::<Vec<NodeId>>();\n\n\n\n let mut absolute_children = parent\n", "file_path": "azul/src/display_list.rs", "rank": 48, "score": 153518.91867242873 }, { "content": "/// Parse a padding value such as\n\n///\n\n/// \"10px 10px\"\n\npub fn parse_layout_padding<'a>(input: &'a str) -> Result<LayoutPadding, LayoutPaddingParseError> {\n\n let mut input_iter = input.split_whitespace();\n\n let first = parse_pixel_value(\n\n input_iter\n\n .next()\n\n .ok_or(LayoutPaddingParseError::TooFewValues)?,\n\n )?;\n\n let second = parse_pixel_value(match input_iter.next() {\n\n Some(s) => s,\n\n None => {\n\n return Ok(LayoutPadding {\n\n top: first,\n\n bottom: first,\n\n left: first,\n\n right: first,\n\n })\n\n }\n\n })?;\n\n let third = parse_pixel_value(match input_iter.next() {\n\n Some(s) => s,\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 49, "score": 152367.16107923072 }, { "content": "pub fn parse_layout_margin<'a>(input: &'a str) -> Result<LayoutMargin, LayoutMarginParseError> {\n\n match parse_layout_padding(input) {\n\n Ok(padding) => Ok(LayoutMargin {\n\n top: padding.top,\n\n left: padding.left,\n\n right: padding.right,\n\n bottom: padding.bottom,\n\n }),\n\n Err(LayoutPaddingParseError::PixelParseError(e)) => Err(e.into()),\n\n Err(LayoutPaddingParseError::TooManyValues) => Err(LayoutMarginParseError::TooManyValues),\n\n Err(LayoutPaddingParseError::TooFewValues) => Err(LayoutMarginParseError::TooFewValues),\n\n }\n\n}\n\n\n\nconst DEFAULT_BORDER_COLOR: ColorU = ColorU {\n\n r: 0,\n\n g: 0,\n\n b: 0,\n\n a: 255,\n\n};\n\n// Default border thickness on the web seems to be 3px\n\nconst DEFAULT_BORDER_THICKNESS: PixelValue = PixelValue::const_px(3);\n\n\n\nuse std::str::CharIndices;\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 50, "score": 152365.76820944424 }, { "content": "/// \"Y/N\" MsgBox (title, message, icon, default)\n\npub fn msg_box_yes_no(title: &str, message: &str, icon: MessageBoxIcon, default: YesNo) -> YesNo {\n\n ::tinyfiledialogs::message_box_yes_no(title, message, icon, default.into()).into()\n\n}\n\n\n", "file_path": "azul/src/dialogs.rs", "rank": 51, "score": 151442.4866329086 }, { "content": "/// Open a directory, returns `None` if the user canceled the dialog\n\npub fn open_directory_dialog(default_path: Option<&str>) -> Option<String> {\n\n ::tinyfiledialogs::select_folder_dialog(\"Open Folder\", default_path.unwrap_or(\"\"))\n\n}\n\n\n", "file_path": "azul/src/dialogs.rs", "rank": 52, "score": 150770.64284268275 }, { "content": "/// Opens a save file dialog, returns `None` if the user canceled the dialog\n\npub fn save_file_dialog(default_path: Option<&str>) -> Option<String> {\n\n let path = default_path.unwrap_or(\"\");\n\n ::tinyfiledialogs::save_file_dialog(\"Save File\", path)\n\n}\n\n\n\n// TODO (at least on Windows):\n\n// - Find and replace dialog\n\n// - Font picker dialog\n\n// - Page setup dialog\n\n// - Print dialog\n\n// - Print property dialog\n", "file_path": "azul/src/dialogs.rs", "rank": 53, "score": 150770.53121325275 }, { "content": "#[test]\n\nfn test_column_name_from_number() {\n\n assert_eq!(column_name_from_number(0), String::from(\"A\"));\n\n assert_eq!(column_name_from_number(1), String::from(\"B\"));\n\n assert_eq!(column_name_from_number(6), String::from(\"G\"));\n\n assert_eq!(column_name_from_number(26), String::from(\"AA\"));\n\n assert_eq!(column_name_from_number(27), String::from(\"AB\"));\n\n assert_eq!(column_name_from_number(225), String::from(\"HR\"));\n\n}\n", "file_path": "azul-widgets/src/table_view.rs", "rank": 54, "score": 148995.8279449702 }, { "content": "/// In order to figure out on which nodes to insert the :hover and :active hit-test tags,\n\n/// we need to select all items that have a :hover or :active tag.\n\nfn match_hover_selectors<'a, T>(\n\n hover_selectors: BTreeMap<CssPath, HoverGroup>,\n\n node_hierarchy: &NodeHierarchy,\n\n html_node_tree: &NodeDataContainer<HtmlCascadeInfo<'a, T>>,\n\n) -> BTreeMap<NodeId, HoverGroup> {\n\n let mut btree_map = BTreeMap::new();\n\n\n\n for (css_path, hover_selector) in hover_selectors {\n\n btree_map.extend(\n\n html_node_tree\n\n .linear_iter()\n\n .filter(|node_id| {\n\n matches_html_element(&css_path, *node_id, node_hierarchy, html_node_tree)\n\n })\n\n .map(|node_id| (node_id, hover_selector)),\n\n );\n\n }\n\n\n\n btree_map\n\n}\n\n\n", "file_path": "azul-core/src/style.rs", "rank": 55, "score": 148018.94550474087 }, { "content": "fn get_radii(r: &SvgRect) -> (TypedRect<f32, UnknownUnit>, BorderRadii) {\n\n let rect = TypedRect::new(\n\n TypedPoint2D::new(r.x, r.y),\n\n TypedSize2D::new(r.width, r.height),\n\n );\n\n let radii = BorderRadii {\n\n top_left: r.rx,\n\n top_right: r.rx,\n\n bottom_left: r.rx,\n\n bottom_right: r.rx,\n\n };\n\n (rect, radii)\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 56, "score": 147110.4745086535 }, { "content": "/// Returns the (logical_size, physical_size) as LayoutSizes, which can then be passed to webrender\n\nfn convert_window_size(size: &WindowSize) -> (LayoutSize, DeviceIntSize) {\n\n let physical_size = size.get_physical_size();\n\n (\n\n LayoutSize::new(size.dimensions.width, size.dimensions.height),\n\n DeviceIntSize::new(physical_size.width as i32, physical_size.height as i32),\n\n )\n\n}\n\n\n\n/// Special rendering function that skips building a layout and only does\n\n/// hit-testing and rendering - called on pure scroll events, since it's\n\n/// significantly less CPU-intensive to just render the last display list instead of\n\n/// re-layouting on every single scroll event.\n", "file_path": "azul/src/app.rs", "rank": 57, "score": 146255.3746783778 }, { "content": "/// Opens the default color picker dialog\n\npub fn color_picker_dialog(\n\n title: &str,\n\n default_value: Option<ColorValue>,\n\n) -> Option<(String, [u8; 3])> {\n\n let default = default_value.unwrap_or_default().into();\n\n ::tinyfiledialogs::color_chooser_dialog(title, default)\n\n}\n\n\n", "file_path": "azul/src/dialogs.rs", "rank": 58, "score": 144145.8070118518 }, { "content": "/// Open a single file, returns `None` if the user canceled the dialog.\n\n///\n\n/// Filters are the file extensions, i.e. `Some(&[\"doc\", \"docx\"])` to only allow\n\n/// \"doc\" and \"docx\" files\n\npub fn open_file_dialog(\n\n default_path: Option<&str>,\n\n filter_list: Option<&[&str]>,\n\n) -> Option<String> {\n\n let filter_list = filter_list.map(|f| (f, \"\"));\n\n let path = default_path.unwrap_or(\"\");\n\n ::tinyfiledialogs::open_file_dialog(\"Open File\", path, filter_list)\n\n}\n\n\n", "file_path": "azul/src/dialogs.rs", "rank": 59, "score": 144145.1795988106 }, { "content": "/// Helper function to easily draw some lines at runtime\n\n///\n\n/// ## Inputs\n\n///\n\n/// - `lines`: Each item in `lines` is a line (represented by a `Vec<(x, y)>`).\n\n/// Lines that are shorter than 2 points are ignored / not rendered.\n\n/// - `stroke_color`: The color of the line\n\n/// - `stroke_options`: If the line should be round, square, etc.\n\npub fn quick_lines(\n\n lines: &[Vec<(f32, f32)>],\n\n stroke_color: ColorU,\n\n stroke_options: Option<SvgStrokeOptions>,\n\n) -> SvgLayerResourceDirect {\n\n let stroke_options = stroke_options.unwrap_or_default();\n\n let style = SvgStyle::stroked(stroke_color, stroke_options);\n\n\n\n let polygons = lines\n\n .iter()\n\n .filter(|line| line.len() >= 2)\n\n .map(|line| {\n\n let first_point = &line[0];\n\n let mut poly_events = vec![PathEvent::MoveTo(TypedPoint2D::new(\n\n first_point.0,\n\n first_point.1,\n\n ))];\n\n\n\n for (x, y) in line.iter().skip(1) {\n\n poly_events.push(PathEvent::LineTo(TypedPoint2D::new(*x, *y)));\n\n }\n\n\n\n SvgLayerType::Polygon(poly_events)\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n tesselate_polygon_data(&polygons, style)\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 60, "score": 144140.4817782584 }, { "content": "pub fn text_on_curve(\n\n layout: &SvgTextLayout,\n\n text_style: SvgStyle,\n\n vectorized_font: &VectorizedFont,\n\n curve: &SampledBezierCurve,\n\n) -> SvgLayerResourceDirect {\n\n // NOTE: char offsets are now in unscaled glyph space!\n\n let (char_offsets, char_rotations) =\n\n curve.get_text_offsets_and_rotations(&layout.layouted_glyphs.glyphs, 0.0);\n\n\n\n let fill_vertices = text_style.fill.map(|_| {\n\n let fill_verts = vectorized_font.get_fill_vertices(&layout.layouted_glyphs.glyphs);\n\n curved_vector_text_to_vertices(&char_offsets, &char_rotations, fill_verts)\n\n });\n\n\n\n let stroke_vertices = text_style.stroke.map(|stroke| {\n\n let stroke_verts =\n\n vectorized_font.get_stroke_vertices(&layout.layouted_glyphs.glyphs, &stroke.1);\n\n curved_vector_text_to_vertices(&char_offsets, &char_rotations, stroke_verts)\n\n });\n\n\n\n SvgLayerResourceDirect {\n\n style: text_style,\n\n fill: fill_vertices,\n\n stroke: stroke_vertices,\n\n }\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 61, "score": 144140.4817782584 }, { "content": "pub fn normal_text(\n\n layout: &SvgTextLayout,\n\n text_style: SvgStyle,\n\n vectorized_font: &VectorizedFont,\n\n) -> SvgLayerResourceDirect {\n\n let fill_vertices = text_style.fill.map(|_| {\n\n let fill_verts = vectorized_font.get_fill_vertices(&layout.layouted_glyphs.glyphs);\n\n normal_text_to_vertices(&layout.layouted_glyphs.glyphs, fill_verts)\n\n });\n\n\n\n let stroke_vertices = text_style.stroke.map(|stroke| {\n\n let stroke_verts =\n\n vectorized_font.get_stroke_vertices(&layout.layouted_glyphs.glyphs, &stroke.1);\n\n normal_text_to_vertices(&layout.layouted_glyphs.glyphs, stroke_verts)\n\n });\n\n\n\n SvgLayerResourceDirect {\n\n style: text_style,\n\n fill: fill_vertices,\n\n stroke: stroke_vertices,\n\n }\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 62, "score": 144140.4817782584 }, { "content": "pub fn quick_rects(\n\n rects: &[SvgRect],\n\n stroke_color: Option<ColorU>,\n\n fill_color: Option<ColorU>,\n\n stroke_options: Option<SvgStrokeOptions>,\n\n) -> SvgLayerResourceDirect {\n\n let style = SvgStyle {\n\n stroke: stroke_color.map(|col| (col, stroke_options.unwrap_or_default())),\n\n fill: fill_color,\n\n ..Default::default()\n\n };\n\n let rects = rects\n\n .iter()\n\n .map(|r| SvgLayerType::Rect(*r))\n\n .collect::<Vec<_>>();\n\n tesselate_polygon_data(&rects, style)\n\n}\n\n\n\nconst BEZIER_SAMPLE_RATE: usize = 20;\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 63, "score": 144140.4817782584 }, { "content": "#[cfg(feature = \"native_style\")]\n\npub fn native() -> Css {\n\n azul_native_style::native()\n\n}\n\n\n\n/// Parses CSS stylesheet from a string. Convenience wrapper for `azul-css-parser::new_from_str`.\n", "file_path": "azul/src/css.rs", "rank": 64, "score": 142258.89549119642 }, { "content": "/// Parses a string (\"true\" or \"false\")\n\nfn parse_bool(input: &str) -> Option<bool> {\n\n match input {\n\n \"true\" => Some(true),\n\n \"false\" => Some(false),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "azul/src/xml.rs", "rank": 65, "score": 141933.41956572345 }, { "content": "/// \"Ok / Cancel\" MsgBox (title, message, icon, default)\n\npub fn msg_box_ok_cancel(\n\n title: &str,\n\n message: &str,\n\n icon: MessageBoxIcon,\n\n default: OkCancel,\n\n) -> OkCancel {\n\n ::tinyfiledialogs::message_box_ok_cancel(title, message, icon, default.into()).into()\n\n}\n\n\n\n/// Yes or No result, returned from the `msg_box_yes_no` function\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]\n\npub enum YesNo {\n\n Yes,\n\n No,\n\n}\n\n\n\nimpl From<YesNo> for ::tinyfiledialogs::YesNo {\n\n #[inline]\n\n fn from(e: YesNo) -> ::tinyfiledialogs::YesNo {\n\n match e {\n", "file_path": "azul/src/dialogs.rs", "rank": 66, "score": 141741.94655529174 }, { "content": "/// Open multiple files at once, returns `None` if the user canceled the dialog,\n\n/// otherwise returns the `Vec<String>` with the given file paths\n\n///\n\n/// Filters are the file extensions, i.e. `Some(&[\"doc\", \"docx\"])` to only allow\n\n/// \"doc\" and \"docx\" files\n\npub fn open_multiple_files_dialog(\n\n default_path: Option<&str>,\n\n filter_list: Option<&[&str]>,\n\n) -> Option<Vec<String>> {\n\n let filter_list = filter_list.map(|f| (f, \"\"));\n\n let path = default_path.unwrap_or(\"\");\n\n ::tinyfiledialogs::open_file_dialog_multi(\"Open Folder\", path, filter_list)\n\n}\n\n\n", "file_path": "azul/src/dialogs.rs", "rank": 67, "score": 141741.188655189 }, { "content": "/// Returns the (fill, stroke) vertices of a layer\n\npub fn tesselate_polygon_data(\n\n layer_data: &[SvgLayerType],\n\n style: SvgStyle,\n\n) -> SvgLayerResourceDirect // (Option<(Vec<SvgVert>, Vec<u32>)>, Option<(Vec<SvgVert>, Vec<u32>)>)\n\n{\n\n let tolerance = 0.01;\n\n let fill = style.fill.is_some();\n\n let stroke_options = style.stroke.map(|s| s.1);\n\n\n\n let mut last_index = 0;\n\n let mut fill_vertex_buf = Vec::<SvgVert>::new();\n\n let mut fill_index_buf = Vec::<u32>::new();\n\n\n\n let mut last_stroke_index = 0;\n\n let mut stroke_vertex_buf = Vec::<SvgVert>::new();\n\n let mut stroke_index_buf = Vec::<u32>::new();\n\n\n\n for layer in layer_data {\n\n let mut path = None;\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 68, "score": 141736.79425816183 }, { "content": "pub fn normal_text_to_vertices(\n\n glyph_ids: &[GlyphInstance],\n\n mut vertex_buffers: Vec<VertexBuffers<SvgVert, u32>>,\n\n) -> VerticesIndicesBuffer {\n\n normal_text_to_vertices_inner(glyph_ids, &mut vertex_buffers);\n\n join_vertex_buffers(&vertex_buffers)\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 69, "score": 141736.79425816183 }, { "content": "pub fn get_layouted_glyphs(\n\n word_positions: &WordPositions,\n\n scaled_words: &ScaledWords,\n\n inline_text_layout: &InlineTextLayout,\n\n origin: LayoutPoint,\n\n) -> LayoutedGlyphs {\n\n use crate::text_shaping;\n\n\n\n let letter_spacing_px = word_positions\n\n .text_layout_options\n\n .letter_spacing\n\n .unwrap_or(0.0);\n\n let mut all_glyphs = Vec::with_capacity(scaled_words.items.len());\n\n\n\n for line in inline_text_layout.lines.iter() {\n\n let line_x = line.bounds.origin.x;\n\n let line_y = line.bounds.origin.y;\n\n\n\n let scaled_words_in_this_line = &scaled_words.items[line.word_start..line.word_end];\n\n let word_positions_in_this_line =\n", "file_path": "azul/src/text_layout.rs", "rank": 70, "score": 141736.79425816183 }, { "content": "/// Takes a text broken into semantic items and a font instance and\n\n/// scales the font accordingly.\n\npub fn words_to_scaled_words(\n\n words: &Words,\n\n font_bytes: &[u8],\n\n font_index: u32,\n\n font_size_px: f32,\n\n) -> ScaledWords {\n\n use crate::text_shaping::{self, HbBuffer, HbFont, HbScaledFont, HB_SCALE_FACTOR};\n\n use std::char;\n\n use std::mem;\n\n\n\n let hb_font = HbFont::from_bytes(font_bytes, font_index);\n\n let hb_scaled_font = HbScaledFont::from_font(&hb_font, font_size_px);\n\n\n\n // Get the dimensions of the space glyph\n\n let hb_space_buffer = HbBuffer::from_str(\" \");\n\n let hb_shaped_space = text_shaping::shape_word_hb(&hb_space_buffer, &hb_scaled_font);\n\n let space_advance_px = hb_shaped_space.glyph_positions[0].x_advance as f32 / HB_SCALE_FACTOR;\n\n let space_codepoint = hb_shaped_space.glyph_infos[0].codepoint;\n\n\n\n let internal_str = words.internal_str.replace(char::is_whitespace, \" \");\n", "file_path": "azul/src/text_layout.rs", "rank": 71, "score": 141736.79425816183 }, { "content": "pub fn reset_tag_id() {\n\n TAG_ID.swap(1, Ordering::SeqCst);\n\n}\n\n\n\nimpl ScrollTagId {\n\n pub fn new() -> ScrollTagId {\n\n ScrollTagId(new_tag_id())\n\n }\n\n}\n\n\n\nstatic DOM_ID: AtomicUsize = AtomicUsize::new(1);\n\n\n\n/// DomID - used for identifying different DOMs (for example IFrameCallbacks)\n\n/// have a different DomId than the root DOM\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct DomId {\n\n /// Unique ID for this DOM\n\n id: usize,\n\n /// If this DOM was generated from an IFrameCallback, stores the parents\n\n /// DomId + the NodeId (from the parent DOM) which the IFrameCallback\n", "file_path": "azul-core/src/dom.rs", "rank": 72, "score": 141736.79425816183 }, { "content": "/// Returns all CSS paths that have a `:hover` or `:active` in their path\n\n/// (since they need to have tags for hit-testing)\n\npub fn collect_hover_groups(css: &Css) -> BTreeMap<CssPath, HoverGroup> {\n\n use azul_css::{CssPathPseudoSelector::*, CssPathSelector::*};\n\n\n\n let hover_rule = PseudoSelector(Hover);\n\n let active_rule = PseudoSelector(Active);\n\n\n\n // Filter out all :hover and :active rules, since we need to create tags\n\n // for them after the main CSS styling has been done\n\n css.rules()\n\n .filter_map(|rule_block| {\n\n let pos = rule_block\n\n .path\n\n .selectors\n\n .iter()\n\n .position(|x| *x == hover_rule || *x == active_rule)?;\n\n if rule_block.declarations.is_empty() {\n\n return None;\n\n }\n\n\n\n let active_or_hover = match rule_block.path.selectors.get(pos)? {\n", "file_path": "azul-core/src/style.rs", "rank": 73, "score": 140813.3107499646 }, { "content": "/// Utility function that, given the current keyboard state and a list of\n\n/// keyboard accelerators + callbacks, checks what callback can be invoked\n\n/// and the first matching callback. This leads to very readable\n\n/// (but still type checked) code like this:\n\n///\n\n/// ```no_run,ignore\n\n/// use azul::prelude::{AcceleratorKey::*, VirtualKeyCode::*};\n\n///\n\n/// fn my_callback<T>(info: CallbackInfo<T>) -> UpdateScreen {\n\n/// keymap(info, &[\n\n/// [vec![Ctrl, S], save_document],\n\n/// [vec![Ctrl, N], create_new_document],\n\n/// [vec![Ctrl, O], open_new_file],\n\n/// [vec![Ctrl, Shift, N], create_new_window],\n\n/// ])\n\n/// }\n\n/// ```\n\npub fn keymap<T>(\n\n info: CallbackInfo<T>,\n\n events: &[(Vec<AcceleratorKey>, CallbackType<T>)],\n\n) -> UpdateScreen {\n\n let keyboard_state = info.state.windows[info.window_id]\n\n .get_keyboard_state()\n\n .clone();\n\n\n\n events\n\n .iter()\n\n .filter(|(keymap_character, _)| {\n\n keymap_character\n\n .iter()\n\n .all(|keymap_char| keymap_char.matches(&keyboard_state))\n\n })\n\n .next()\n\n .and_then(|(_, callback)| (callback)(info))\n\n}\n", "file_path": "azul/src/window_state.rs", "rank": 74, "score": 139713.54353139712 }, { "content": "/// Parses an XML string and returns a `Dom` with the components instantiated in the `<app></app>`\n\npub fn str_to_dom<T>(\n\n xml: &str,\n\n component_map: &mut XmlComponentMap<T>,\n\n) -> Result<Dom<T>, XmlParseError> {\n\n let root_nodes = parse_xml_string(xml)?;\n\n get_xml_components(&root_nodes, component_map)?;\n\n let app_node = get_app_node(&root_nodes)?;\n\n render_dom_from_app_node(&app_node, component_map).map_err(|e| e.into())\n\n}\n\n\n", "file_path": "azul/src/xml.rs", "rank": 75, "score": 139704.2181809758 }, { "content": "/// Returns all node IDs where the children overflow the parent, together with the\n\n/// `(parent_rect, child_rect)` - the child rect is the sum of the children.\n\n///\n\n/// TODO: The performance of this function can be theoretically improved:\n\n///\n\n/// - Unioning the rectangles is heavier than just looping through the children and\n\n/// summing up their width / height / padding + margin.\n\n/// - Scroll nodes only need to be inserted if the parent doesn't have `overflow: hidden`\n\n/// activated\n\n/// - Overflow for X and Y needs to be tracked seperately (for overflow-x / overflow-y separation),\n\n/// so there we'd need to track in which direction the inner_rect is overflowing.\n\nfn get_nodes_that_need_scroll_clip<'a, T: 'a>(\n\n node_hierarchy: &NodeHierarchy,\n\n display_list_rects: &NodeDataContainer<DisplayRectangle<'a>>,\n\n dom_rects: &NodeDataContainer<NodeData<T>>,\n\n layouted_rects: &NodeDataContainer<PositionedRectangle>,\n\n parents: &[(usize, NodeId)],\n\n pipeline_id: PipelineId,\n\n) -> ScrolledNodes {\n\n use azul_css::Overflow;\n\n\n\n let mut nodes = BTreeMap::new();\n\n let mut tags_to_node_ids = BTreeMap::new();\n\n\n\n for (_, parent) in parents {\n\n let parent_rect = &layouted_rects[*parent];\n\n\n\n let children_scroll_rect = match parent_rect.bounds.get_scroll_rect(\n\n parent\n\n .children(&node_hierarchy)\n\n .map(|child_id| layouted_rects[child_id].bounds),\n", "file_path": "azul/src/display_list.rs", "rank": 76, "score": 139613.07167223239 }, { "content": "// Calculates the layout for one word block\n\npub fn curved_vector_text_to_vertices(\n\n char_offsets: &[(f32, f32)],\n\n char_rotations: &[BezierCharacterRotation],\n\n mut vertex_buffers: Vec<VertexBuffers<SvgVert, u32>>,\n\n) -> VerticesIndicesBuffer {\n\n vertex_buffers\n\n .iter_mut()\n\n .zip(char_rotations.into_iter())\n\n .zip(char_offsets.iter())\n\n .for_each(|((vertex_buf, char_rot), char_offset)| {\n\n let (char_offset_x, char_offset_y) = char_offset; // weird borrow issue\n\n // 1. Rotate individual characters inside of the word\n\n let (char_sin, char_cos) = (char_rot.0.sin(), char_rot.0.cos());\n\n rotate_vertex_buffer(&mut vertex_buf.vertices, char_sin, char_cos);\n\n // 2. Transform characters to their respective positions\n\n transform_vertex_buffer(&mut vertex_buf.vertices, *char_offset_x, *char_offset_y);\n\n });\n\n\n\n join_vertex_buffers(&vertex_buffers)\n\n}\n", "file_path": "azul-widgets/src/svg.rs", "rank": 77, "score": 139466.62039240103 }, { "content": "#[allow(unused_variables)]\n\npub fn image_source_get_bytes(\n\n image_source: &ImageSource,\n\n) -> Result<(WrImageData, WrImageDescriptor), ImageReloadError> {\n\n use crate::wr_translate::wr_translate_image_format;\n\n\n\n match image_source {\n\n ImageSource::Embedded(bytes) => {\n\n #[cfg(feature = \"image_loading\")]\n\n {\n\n decode_image_data(bytes.to_vec()).map_err(|e| ImageReloadError::DecodingError(e))\n\n }\n\n #[cfg(not(feature = \"image_loading\"))]\n\n {\n\n Err(ImageReloadError::DecodingModuleNotActive)\n\n }\n\n }\n\n ImageSource::Raw(raw_image) => {\n\n let opaque = is_image_opaque(raw_image.data_format, &raw_image.pixels[..]);\n\n let allow_mipmaps = true;\n\n let descriptor = WrImageDescriptor::new(\n", "file_path": "azul/src/app_resources.rs", "rank": 78, "score": 139466.62039240103 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn get_xft_dpi() -> Option<f32> {\n\n // TODO!\n\n /*\n\n #include <X11/Xlib.h>\n\n #include <X11/Xatom.h>\n\n #include <X11/Xresource.h>\n\n\n\n double _glfwPlatformGetMonitorDPI(_GLFWmonitor* monitor)\n\n {\n\n char *resourceString = XResourceManagerString(_glfw.x11.display);\n\n XrmDatabase db;\n\n XrmValue value;\n\n char *type = NULL;\n\n double dpi = 0.0;\n\n\n\n XrmInitialize(); /* Need to initialize the DB before calling Xrm* functions */\n\n\n\n db = XrmGetStringDatabase(resourceString);\n\n\n\n if (resourceString) {\n", "file_path": "azul/src/window.rs", "rank": 79, "score": 139192.91790302406 }, { "content": "/// Quick helper function to generate the vertices for a black circle at runtime\n\npub fn quick_circle(circle: SvgCircle, fill_color: ColorU) -> SvgLayerResourceDirect {\n\n let style = SvgStyle::filled(fill_color);\n\n tesselate_polygon_data(&[SvgLayerType::Circle(circle)], style)\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 80, "score": 138786.37725745892 }, { "content": "/// Quick helper function to generate the layer for **multiple** circles (in one draw call)\n\npub fn quick_circles(circles: &[SvgCircle], fill_color: ColorU) -> SvgLayerResourceDirect {\n\n let circles = circles\n\n .iter()\n\n .map(|c| SvgLayerType::Circle(*c))\n\n .collect::<Vec<_>>();\n\n let style = SvgStyle::filled(fill_color);\n\n tesselate_polygon_data(&circles, style)\n\n}\n\n\n", "file_path": "azul-widgets/src/svg.rs", "rank": 81, "score": 138786.37725745892 }, { "content": "fn advance_caret(caret: &mut f32, line_number: &mut usize, intersection: LineCaretIntersection) {\n\n use self::LineCaretIntersection::*;\n\n match intersection {\n\n NoIntersection => {}\n\n AdvanceCaretTo(x) => {\n\n *caret = x;\n\n }\n\n PushCaretOntoNextLine(num_lines, x) => {\n\n *line_number += num_lines;\n\n *caret = x;\n\n }\n\n }\n\n}\n\n\n", "file_path": "azul/src/text_layout.rs", "rank": 82, "score": 137879.295709381 }, { "content": "#[inline]\n\npub fn calculate_vertical_shift_multiplier(\n\n vertical_alignment: StyleTextAlignmentVert,\n\n) -> Option<f32> {\n\n use azul_css::StyleTextAlignmentVert::*;\n\n match vertical_alignment {\n\n Top => None,\n\n Center => Some(0.5), // move the line by the half width\n\n Bottom => Some(1.0), // move the line by the full width\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, Ord, PartialOrd)]\n\n#[repr(C)]\n\npub struct ExternalScrollId(pub u64, pub PipelineId);\n\n\n\n#[derive(Default, Debug, Clone)]\n\npub struct ScrolledNodes {\n\n pub overflowing_nodes: BTreeMap<NodeId, OverflowingScrollNode>,\n\n pub tags_to_node_ids: BTreeMap<ScrollTagId, NodeId>,\n\n}\n", "file_path": "azul-core/src/ui_solver.rs", "rank": 83, "score": 137324.75109558878 }, { "content": "#[inline]\n\npub fn calculate_horizontal_shift_multiplier(\n\n horizontal_alignment: StyleTextAlignmentHorz,\n\n) -> Option<f32> {\n\n use azul_css::StyleTextAlignmentHorz::*;\n\n match horizontal_alignment {\n\n Left => None,\n\n Center => Some(0.5), // move the line by the half width\n\n Right => Some(1.0), // move the line by the full width\n\n }\n\n}\n\n\n", "file_path": "azul-core/src/ui_solver.rs", "rank": 84, "score": 137324.75109558878 }, { "content": "/// Parses an XML string and returns a `String`, which contains the Rust source code\n\n/// (i.e. it compiles the XML to valid Rust)\n\npub fn str_to_rust_code<T>(\n\n xml: &str,\n\n imports: &str,\n\n component_map: &mut XmlComponentMap<T>,\n\n) -> Result<String, CompileError> {\n\n const HEADER_WARNING: &str = \"/// Auto-generated UI source code\";\n\n\n\n let root_nodes = parse_xml_string(xml).map_err(|e| format!(\"XML parse error: {}\", e))?;\n\n get_xml_components(&root_nodes, component_map)\n\n .map_err(|e| format!(\"Error parsing component: {}\", e))?;\n\n let app_node =\n\n get_app_node(&root_nodes).map_err(|e| format!(\"Could not find <app /> node: {}\", e))?;\n\n let components_source = compile_components_to_rust_code(&component_map)?;\n\n let app_source = compile_app_node_to_rust_code(&app_node, &component_map)?;\n\n\n\n let source_code = format!(\n\n \"{}\\r\\n{}\\r\\n{}\\r\\n{}\",\n\n HEADER_WARNING,\n\n imports,\n\n compile_components(components_source),\n\n app_source,\n\n );\n\n\n\n Ok(source_code)\n\n}\n\n\n", "file_path": "azul/src/xml.rs", "rank": 85, "score": 137300.53066087922 }, { "content": "#[allow(unused_variables)]\n\nfn get_hidpi_factor(window: &GliumWindow, events_loop: &EventLoop<()>) -> (f32, f32) {\n\n let monitor = window.current_monitor();\n\n let winit_hidpi_factor = monitor.hidpi_factor();\n\n\n\n #[cfg(target_os = \"linux\")]\n\n {\n\n (\n\n linux_get_hidpi_factor(&monitor, &events_loop),\n\n winit_hidpi_factor as f32,\n\n )\n\n }\n\n #[cfg(not(target_os = \"linux\"))]\n\n {\n\n (winit_hidpi_factor as f32, winit_hidpi_factor as f32)\n\n }\n\n}\n\n\n", "file_path": "azul/src/window.rs", "rank": 86, "score": 135663.89031424548 }, { "content": "/// Checks wheter a given input is enclosed in parentheses, prefixed\n\n/// by a certain number of stopwords.\n\n///\n\n/// On success, returns what the stopword was + the string inside the braces\n\n/// on failure returns None.\n\n///\n\n/// ```rust\n\n/// # use azul_css_parser::parse_parentheses;\n\n/// # use azul_css_parser::ParenthesisParseError::*;\n\n/// // Search for the nearest \"abc()\" brace\n\n/// assert_eq!(parse_parentheses(\"abc(def(g))\", &[\"abc\"]), Ok((\"abc\", \"def(g)\")));\n\n/// assert_eq!(parse_parentheses(\"abc(def(g))\", &[\"def\"]), Err(StopWordNotFound(\"abc\")));\n\n/// assert_eq!(parse_parentheses(\"def(ghi(j))\", &[\"def\"]), Ok((\"def\", \"ghi(j)\")));\n\n/// assert_eq!(parse_parentheses(\"abc(def(g))\", &[\"abc\", \"def\"]), Ok((\"abc\", \"def(g)\")));\n\n/// ```\n\npub fn parse_parentheses<'a>(\n\n input: &'a str,\n\n stopwords: &[&'static str],\n\n) -> Result<(&'static str, &'a str), ParenthesisParseError<'a>> {\n\n use self::ParenthesisParseError::*;\n\n\n\n let input = input.trim();\n\n if input.is_empty() {\n\n return Err(EmptyInput);\n\n }\n\n\n\n let first_open_brace = input.find('(').ok_or(NoOpeningBraceFound)?;\n\n let found_stopword = &input[..first_open_brace];\n\n\n\n // CSS does not allow for space between the ( and the stopword, so no .trim() here\n\n let mut validated_stopword = None;\n\n for stopword in stopwords {\n\n if found_stopword == *stopword {\n\n validated_stopword = Some(stopword);\n\n break;\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 87, "score": 135040.2803806846 }, { "content": "// parses a single gradient such as \"to right, 50px\"\n\npub fn parse_gradient<'a>(\n\n input: &'a str,\n\n background_type: GradientType,\n\n) -> Result<StyleBackgroundContent, CssBackgroundParseError<'a>> {\n\n let input = input.trim();\n\n\n\n // Splitting the input by \",\" doesn't work since rgba() might contain commas\n\n let mut comma_separated_items = Vec::<&str>::new();\n\n let mut current_input = &input[..];\n\n\n\n 'outer: loop {\n\n let (skip_next_braces_result, character_was_found) =\n\n match skip_next_braces(&current_input, ',') {\n\n Some(s) => s,\n\n None => break 'outer,\n\n };\n\n let new_push_item = if character_was_found {\n\n &current_input[..skip_next_braces_result]\n\n } else {\n\n &current_input[..]\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 88, "score": 135035.7030796117 }, { "content": "pub fn new_opengl_texture_id() -> usize {\n\n LAST_OPENGL_ID.fetch_add(1, Ordering::SeqCst)\n\n}\n\n\n\nlazy_static! {\n\n\n\n /// Non-cleaned up textures. When a GlTexture is registered, it has to stay active as long\n\n /// as WebRender needs it for drawing. To transparently do this, we store the epoch that the\n\n /// texture was originally created with, and check, **after we have drawn the frame**,\n\n /// if there are any textures that need cleanup.\n\n ///\n\n /// Because the Texture2d is wrapped in an Rc, the destructor (which cleans up the OpenGL\n\n /// texture) does not run until we remove the textures\n\n ///\n\n /// Note: Because textures could be used after the current draw call (ex. for scrolling),\n\n /// the ACTIVE_GL_TEXTURES are indexed by their epoch. Use `renderer.flush_pipeline_info()`\n\n /// to see which textures are still active and which ones can be safely removed.\n\n ///\n\n /// See: https://github.com/servo/webrender/issues/2940\n\n pub(crate) static ref ACTIVE_GL_TEXTURES: Mutex<FastHashMap<Epoch, FastHashMap<ExternalImageId, ActiveTexture>>> = Mutex::new(FastHashMap::default());\n", "file_path": "azul/src/compositor.rs", "rank": 89, "score": 135030.35679511842 }, { "content": "/// ContextBuilder is sadly not clone-able, which is why it has to be re-created\n\n/// every time you want to create a new context. The goals is to not crash on\n\n/// platforms that don't have VSync or SRGB (which are OpenGL extensions) installed.\n\n///\n\n/// Secondly, in order to support multi-window apps, all windows need to share\n\n/// the same OpenGL context - i.e. `builder.with_shared_lists(some_gl_window.context());`\n\n///\n\n/// `allow_sharing_context` should only be true for the root window - so that\n\n/// we can be sure the shared context can't be re-shared by the created window. Only\n\n/// the root window (via `FakeDisplay`) is allowed to manage the OpenGL context.\n\nfn create_context_builder<'a>(vsync: bool, srgb: bool) -> ContextBuilder<'a, NotCurrent> {\n\n // See #33 - specifying a specific OpenGL version\n\n // makes winit crash on older Intel drivers, which is why we\n\n // don't specify a specific OpenGL version here\n\n let mut builder = ContextBuilder::new();\n\n\n\n // #[cfg(debug_assertions)] {\n\n // builder = builder.with_gl_debug_flag(true);\n\n // }\n\n\n\n // #[cfg(not(debug_assertions))] {\n\n builder = builder.with_gl_debug_flag(false);\n\n // }\n\n\n\n if vsync {\n\n builder = builder.with_vsync(true);\n\n }\n\n\n\n if srgb {\n\n builder = builder.with_srgb(true);\n\n }\n\n\n\n builder\n\n}\n\n\n", "file_path": "azul/src/window.rs", "rank": 90, "score": 134513.60800896166 }, { "content": "/// Wrapper around `message_box_ok` with the default title \"Info\" + an info icon.\n\n///\n\n/// Note: If you are too young to remember Visual Basics glorious `MsgBox`\n\n/// then I pity you. Those were the days.\n\npub fn msg_box(content: &str) {\n\n msg_box_ok(\"Info\", content, MessageBoxIcon::Info);\n\n}\n\n\n\n/// Color value (hex or rgb) to open the `color_chooser_dialog` with\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum ColorValue<'a> {\n\n Hex(&'a str),\n\n RGB(&'a [u8; 3]),\n\n}\n\n\n\n/// Default color in the color picker\n\nconst DEFAULT_COLOR: [u8; 3] = [0, 0, 0];\n\n\n\nimpl<'a> Default for ColorValue<'a> {\n\n fn default() -> Self {\n\n ColorValue::RGB(&DEFAULT_COLOR)\n\n }\n\n}\n\n\n\nimpl<'a> Into<DefaultColorValue<'a>> for ColorValue<'a> {\n\n fn into(self) -> DefaultColorValue<'a> {\n\n match self {\n\n ColorValue::Hex(s) => DefaultColorValue::Hex(s),\n\n ColorValue::RGB(r) => DefaultColorValue::RGB(r),\n\n }\n\n }\n\n}\n\n\n", "file_path": "azul/src/dialogs.rs", "rank": 91, "score": 133479.71921883733 }, { "content": "/// Main parsing function, takes a stringified key / value pair and either\n\n/// returns the parsed value or an error\n\n///\n\n/// ```rust\n\n/// # extern crate azul_css_parser;\n\n/// # extern crate azul_css;\n\n///\n\n/// # use azul_css_parser;\n\n/// # use azul_css::{LayoutWidth, PixelValue, CssPropertyType, CssPropertyValue, CssProperty};\n\n/// assert_eq!(\n\n/// azul_css_parser::parse_css_property(CssPropertyType::Width, \"500px\"),\n\n/// Ok(CssProperty::Width(CssPropertyValue::Exact(LayoutWidth(PixelValue::px(500.0)))))\n\n/// )\n\n/// ```\n\npub fn parse_css_property<'a>(\n\n key: CssPropertyType,\n\n value: &'a str,\n\n) -> Result<CssProperty, CssParsingError<'a>> {\n\n use self::CssPropertyType::*;\n\n let value = value.trim();\n\n Ok(match value {\n\n \"auto\" => CssProperty::auto(key),\n\n \"none\" => CssProperty::none(key),\n\n \"initial\" => CssProperty::initial(key).into(),\n\n \"inherit\" => CssProperty::inherit(key).into(),\n\n value => match key {\n\n TextColor => parse_style_text_color(value)?.into(),\n\n FontSize => parse_style_font_size(value)?.into(),\n\n FontFamily => parse_style_font_family(value)?.into(),\n\n TextAlign => parse_layout_text_align(value)?.into(),\n\n LetterSpacing => parse_style_letter_spacing(value)?.into(),\n\n LineHeight => parse_style_line_height(value)?.into(),\n\n WordSpacing => parse_style_word_spacing(value)?.into(),\n\n TabWidth => parse_style_tab_width(value)?.into(),\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 92, "score": 132892.67362258426 }, { "content": "/// Parse a color of the form 'rgb([0-255], [0-255], [0-255])', or 'rgba([0-255], [0-255], [0-255],\n\n/// [0.0-1.0])' without the leading 'rgb[a](' or trailing ')'. Alpha defaults to 255.\n\npub fn parse_color_rgb<'a>(\n\n input: &'a str,\n\n parse_alpha: bool,\n\n) -> Result<ColorU, CssColorParseError<'a>> {\n\n let mut components = input.split(',').map(|c| c.trim());\n\n let rgb_color = parse_color_rgb_components(&mut components)?;\n\n let a = if parse_alpha {\n\n parse_alpha_component(&mut components)?\n\n } else {\n\n 255\n\n };\n\n if let Some(arg) = components.next() {\n\n return Err(CssColorParseError::ExtraArguments(arg));\n\n }\n\n Ok(ColorU { a, ..rgb_color })\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 93, "score": 132887.86332753065 }, { "content": "/// Parse a color of the form 'hsl([0.0-360.0]deg, [0-100]%, [0-100]%)', or 'hsla([0.0-360.0]deg, [0-100]%, [0-100]%, [0.0-1.0])' without the leading 'hsl[a](' or trailing ')'. Alpha defaults to 255.\n\npub fn parse_color_hsl<'a>(\n\n input: &'a str,\n\n parse_alpha: bool,\n\n) -> Result<ColorU, CssColorParseError<'a>> {\n\n let mut components = input.split(',').map(|c| c.trim());\n\n let rgb_color = parse_color_hsl_components(&mut components)?;\n\n let a = if parse_alpha {\n\n parse_alpha_component(&mut components)?\n\n } else {\n\n 255\n\n };\n\n if let Some(arg) = components.next() {\n\n return Err(CssColorParseError::ExtraArguments(arg));\n\n }\n\n Ok(ColorU { a, ..rgb_color })\n\n}\n\n\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 94, "score": 132887.76083646764 }, { "content": "// parses \"red\" , \"red 5%\"\n\npub fn parse_gradient_stop<'a>(\n\n input: &'a str,\n\n) -> Result<GradientStopPre, CssGradientStopParseError<'a>> {\n\n use self::CssGradientStopParseError::*;\n\n\n\n let input = input.trim();\n\n\n\n // Color functions such as \"rgba(...)\" can contain spaces, so we parse right-to-left.\n\n let (color_str, percentage_str) = match (input.rfind(')'), input.rfind(char::is_whitespace)) {\n\n (Some(closing_brace), None) if closing_brace < input.len() - 1 => {\n\n // percentage after closing brace, eg. \"rgb(...)50%\"\n\n (\n\n &input[..=closing_brace],\n\n Some(&input[(closing_brace + 1)..]),\n\n )\n\n }\n\n (None, Some(last_ws)) => {\n\n // percentage after last whitespace, eg. \"... 50%\"\n\n (&input[..=last_ws], Some(&input[(last_ws + 1)..]))\n\n }\n", "file_path": "azul-css-parser/src/css_parser.rs", "rank": 95, "score": 132882.87308796452 }, { "content": "/// The UiState contains all the tags (for hit-testing) as well as the mapping\n\n/// from Hit-testing tags to NodeIds (which are important for filtering input events\n\n/// and routing input events to the callbacks).\n\npub fn ui_state_from_dom<T>(\n\n dom: Dom<T>,\n\n parent_dom_node_id: Option<(DomId, NodeId)>,\n\n) -> UiState<T> {\n\n use crate::dom::{self, new_tag_id};\n\n\n\n // NOTE: Originally it was allowed to create a DOM with\n\n // multiple root elements using `add_sibling()` and `with_sibling()`.\n\n //\n\n // However, it was decided to remove these functions (in commit #586933),\n\n // as they aren't practical (you can achieve the same thing with one\n\n // wrapper div and multiple add_child() calls) and they create problems\n\n // when laying out elements since add_sibling() essentially modifies the\n\n // space that the parent can distribute, which in code, simply looks weird\n\n // and led to bugs.\n\n //\n\n // It is assumed that the DOM returned by the user has exactly one root node\n\n // with no further siblings and that the root node is the Node with the ID 0.\n\n\n\n // All tags that have can be focused (necessary for hit-testing)\n", "file_path": "azul-core/src/ui_state.rs", "rank": 96, "score": 132882.87308796452 }, { "content": "pub fn new_tag_id() -> TagId {\n\n TAG_ID.fetch_add(1, Ordering::SeqCst) as TagId\n\n}\n\n\n", "file_path": "azul-core/src/dom.rs", "rank": 97, "score": 132882.87308796452 }, { "content": "pub trait ToNumber {\n\n fn to_number(self) -> Number;\n\n}\n\n\n", "file_path": "azul-layout/src/number.rs", "rank": 98, "score": 132835.68227789525 }, { "content": "pub fn table_view_on_click<T>(_info: DefaultCallbackInfo<T, TableViewState>) -> CallbackReturn {\n\n println!(\"table was clicked\");\n\n DontRedraw\n\n}\n\n\n", "file_path": "azul-widgets/src/table_view.rs", "rank": 99, "score": 132384.54432241968 } ]
Rust
polars/polars-arrow/src/builder.rs
Spirans/polars
7774f419fdbf79bc4c4ec3bd6f0f72d87b32a70c
use crate::bit_util; use crate::vec::AlignedVec; pub use arrow::array::LargeStringBuilder; use arrow::array::{ ArrayBuilder, ArrayData, ArrayRef, BooleanArray, LargeStringArray, PrimitiveArray, }; use arrow::buffer::{Buffer, MutableBuffer}; use arrow::datatypes::{ArrowPrimitiveType, DataType}; use std::any::Any; use std::mem; use std::sync::Arc; #[derive(Debug)] pub struct BooleanBufferBuilder { buffer: MutableBuffer, len: usize, } impl BooleanBufferBuilder { #[inline] pub fn new(capacity: usize) -> Self { let byte_capacity = bit_util::ceil(capacity, 8); let buffer = MutableBuffer::from_len_zeroed(byte_capacity); Self { buffer, len: 0 } } pub fn len(&self) -> usize { self.len } pub fn is_empty(&self) -> bool { self.len == 0 } pub fn capacity(&self) -> usize { self.buffer.capacity() * 8 } #[inline] pub fn advance(&mut self, additional: usize) { let new_len = self.len + additional; let new_len_bytes = bit_util::ceil(new_len, 8); if new_len_bytes > self.buffer.len() { self.buffer.resize(new_len_bytes, 0); } self.len = new_len; } #[inline] pub fn reserve(&mut self, additional: usize) { let capacity = self.len + additional; if capacity > self.capacity() { let additional = bit_util::ceil(capacity, 8) - self.buffer.len(); self.buffer.reserve(additional); } } #[inline] pub fn append(&mut self, v: bool) { self.advance(1); if v { unsafe { bit_util::set_bit_raw(self.buffer.as_mut_ptr(), self.len - 1) }; } } #[inline] pub fn append_n(&mut self, additional: usize, v: bool) { self.advance(additional); if additional > 0 && v { let offset = self.len() - additional; (0..additional).for_each(|i| unsafe { bit_util::set_bit_raw(self.buffer.as_mut_ptr(), offset + i) }) } } #[inline] pub fn append_slice(&mut self, slice: &[bool]) { let additional = slice.len(); self.advance(additional); let offset = self.len() - additional; for (i, v) in slice.iter().enumerate() { if *v { unsafe { bit_util::set_bit_raw(self.buffer.as_mut_ptr(), offset + i) } } } } pub fn shrink_to_fit(&mut self) { let byte_len = bit_util::ceil(self.len(), 8); self.buffer.resize(byte_len, 0) } #[inline] pub fn finish(&mut self) -> Buffer { let buf = std::mem::replace(&mut self.buffer, MutableBuffer::new(0)); self.len = 0; buf.into() } } #[derive(Debug)] pub struct BooleanArrayBuilder { values_builder: BooleanBufferBuilder, bitmap_builder: BooleanBufferBuilder, } impl BooleanArrayBuilder { pub fn new(capacity: usize) -> Self { Self { values_builder: BooleanBufferBuilder::new(capacity), bitmap_builder: BooleanBufferBuilder::new(capacity), } } pub fn new_no_nulls(capacity: usize) -> Self { Self { values_builder: BooleanBufferBuilder::new(capacity), bitmap_builder: BooleanBufferBuilder::new(0), } } pub fn capacity(&self) -> usize { self.values_builder.capacity() } pub fn append_value(&mut self, v: bool) { self.bitmap_builder.append(true); self.values_builder.append(v); } pub fn append_null(&mut self) { self.bitmap_builder.append(false); self.values_builder.advance(1); } pub fn append_option(&mut self, v: Option<bool>) { match v { None => self.append_null(), Some(v) => self.append_value(v), }; } pub fn append_slice(&mut self, v: &[bool]) { self.bitmap_builder.append_n(v.len(), true); self.values_builder.append_slice(v); } pub fn append_values(&mut self, values: &[bool], is_valid: &[bool]) { assert_eq!(values.len(), is_valid.len()); self.bitmap_builder.append_slice(is_valid); self.values_builder.append_slice(values); } pub fn shrink_to_fit(&mut self) { self.values_builder.shrink_to_fit(); self.bitmap_builder.shrink_to_fit(); } pub fn finish_with_null_buffer(&mut self, buffer: Buffer) -> BooleanArray { self.shrink_to_fit(); let len = self.len(); let data = ArrayData::builder(DataType::Boolean) .len(len) .add_buffer(self.values_builder.finish()) .null_bit_buffer(buffer) .build(); BooleanArray::from(data) } pub fn finish(&mut self) -> BooleanArray { self.shrink_to_fit(); let len = self.len(); let null_bit_buffer = self.bitmap_builder.finish(); let null_count = len - null_bit_buffer.count_set_bits(); let mut builder = ArrayData::builder(DataType::Boolean) .len(len) .add_buffer(self.values_builder.finish()); if null_count > 0 { builder = builder.null_bit_buffer(null_bit_buffer); } let data = builder.build(); BooleanArray::from(data) } } impl ArrayBuilder for BooleanArrayBuilder { fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn into_box_any(self: Box<Self>) -> Box<dyn Any> { self } fn len(&self) -> usize { self.values_builder.len() } fn is_empty(&self) -> bool { self.values_builder.is_empty() } fn finish(&mut self) -> ArrayRef { Arc::new(self.finish()) } } pub struct PrimitiveArrayBuilder<T> where T: ArrowPrimitiveType, T::Native: Default, { values: AlignedVec<T::Native>, bitmap_builder: BooleanBufferBuilder, null_count: usize, } impl<T> PrimitiveArrayBuilder<T> where T: ArrowPrimitiveType, T::Native: Default, { pub fn new(capacity: usize) -> Self { let values = AlignedVec::<T::Native>::with_capacity_aligned(capacity); let bitmap_builder = BooleanBufferBuilder::new(capacity); Self { values, bitmap_builder, null_count: 0, } } pub fn new_no_nulls(capacity: usize) -> Self { let values = AlignedVec::<T::Native>::with_capacity_aligned(capacity); let bitmap_builder = BooleanBufferBuilder::new(0); Self { values, bitmap_builder, null_count: 0, } } #[inline] pub fn append_value(&mut self, v: T::Native) { self.values.push(v); self.bitmap_builder.append(true); } #[inline] pub fn append_slice(&mut self, other: &[T::Native]) { self.values.extend_from_slice(other) } #[inline] pub fn append_null(&mut self) { self.bitmap_builder.append(false); self.values.push(Default::default()); self.null_count += 1; } pub fn shrink_to_fit(&mut self) { self.values.shrink_to_fit(); self.bitmap_builder.shrink_to_fit(); } pub fn finish_with_null_buffer(&mut self, buffer: Buffer) -> PrimitiveArray<T> { self.shrink_to_fit(); let values = mem::take(&mut self.values); values.into_primitive_array(Some(buffer)) } pub fn finish(&mut self) -> PrimitiveArray<T> { self.shrink_to_fit(); let values = mem::take(&mut self.values); let null_bit_buffer = self.bitmap_builder.finish(); let buf = if self.null_count == 0 { None } else { Some(null_bit_buffer) }; values.into_primitive_array(buf) } } impl<T> ArrayBuilder for PrimitiveArrayBuilder<T> where T: ArrowPrimitiveType, { fn len(&self) -> usize { self.values.len() } fn is_empty(&self) -> bool { self.values.is_empty() } fn finish(&mut self) -> ArrayRef { Arc::new(PrimitiveArrayBuilder::finish(self)) } fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn into_box_any(self: Box<Self>) -> Box<dyn Any> { self } } #[derive(Debug)] pub struct NoNullLargeStringBuilder { values: AlignedVec<u8>, offsets: AlignedVec<i64>, } impl NoNullLargeStringBuilder { pub fn with_capacity(values_capacity: usize, list_capacity: usize) -> Self { let mut offsets = AlignedVec::with_capacity_aligned(list_capacity + 1); offsets.push(0); Self { values: AlignedVec::with_capacity_aligned(values_capacity), offsets, } } pub fn extend_from_slices(&mut self, values: &[u8], offsets: &[i64]) { self.values.extend_from_slice(values); self.offsets.extend_from_slice(offsets); } #[inline] pub fn append_value(&mut self, value: &str) { self.values.extend_from_slice(value.as_bytes()); self.offsets.push(self.values.len() as i64); } pub fn finish(&mut self) -> LargeStringArray { let values = mem::take(&mut self.values); let offsets = mem::take(&mut self.offsets); let offsets_len = offsets.len() - 1; let buf_offsets = offsets.into_arrow_buffer(); let buf_values = values.into_arrow_buffer(); assert_eq!(buf_values.len(), buf_values.capacity()); assert_eq!(buf_offsets.len(), buf_offsets.capacity()); let arraydata = ArrayData::builder(DataType::LargeUtf8) .len(offsets_len) .add_buffer(buf_offsets) .add_buffer(buf_values) .build(); LargeStringArray::from(arraydata) } } #[cfg(test)] mod test { use super::*; use arrow::array::Array; use arrow::datatypes::UInt32Type; #[test] fn test_primitive_builder() { let mut builder = PrimitiveArrayBuilder::<UInt32Type>::new(10); builder.append_value(0); builder.append_null(); let out = builder.finish(); assert_eq!(out.len(), 2); assert_eq!(out.null_count(), 1); dbg!(out); } #[test] fn test_string_builder() { let mut builder = LargeStringBuilder::with_capacity(1, 3); builder.append_value("foo").unwrap(); builder.append_null().unwrap(); builder.append_value("bar").unwrap(); let out = builder.finish(); let vals = out.iter().collect::<Vec<_>>(); assert_eq!(vals, &[Some("foo"), None, Some("bar")]); } }
use crate::bit_util; use crate::vec::AlignedVec; pub use arrow::array::LargeStringBuilder; use arrow::array::{ ArrayBuilder, ArrayData, ArrayRef, BooleanArray, LargeStringArray, PrimitiveArray, }; use arrow::buffer::{Buffer, MutableBuffer}; use arrow::datatypes::{ArrowPrimitiveType, DataType}; use std::any::Any; use std::mem; use std::sync::Arc; #[derive(Debug)] pub struct BooleanBufferBuilder { buffer: MutableBuffer, len: usize, } impl BooleanBufferBuilder { #[inline] pub fn new(capacity: usize) -> Self { let byte_capacity = bit_util::ceil(capacity, 8); let buffer = MutableBuffer::from_len_zeroed(byte_capacity); Self { buffer, len: 0 } } pub fn len(&self) -> usize { self.len } pub fn is_empty(&self) -> bool { self.len == 0 } pub fn capacity(&self) -> usize { self.buffer.capacity() * 8 } #[inline] pub fn advance(&mut self, additional: usize) { let new_len = self.len + additional; let new_len_bytes = bit_util::ceil(new_len, 8); if new_len_bytes > self.buffer.len() { self.buffer.resize(new_len_bytes, 0); } self.len = new_len; } #[inline] pub fn reserve(&mut self, additional: usize) { let capacity = self.len + additional; if capacity > self.capacity() { let additional = bit_util::ceil(capacity, 8) - self.buffer.len(); self.buffer.reserve(additional); } } #[inline] pub fn append(&mut self, v: bool) { self.advance(1); if v { unsafe { bit_util::set_bit_raw(self.buffer.as_mut_ptr(), self.len - 1) }; } } #[inline] pub fn append_n(&mut self, additional: usize, v: bool) { self.advance(additional); if additional > 0 && v { let offset = self.len() - additional; (0..additional).for_each(|i| unsafe { bit_util::set_bit_raw(self.buffer.as_mut_ptr(), offset + i) }) } } #[inline] pub fn append_slice(&mut self, slice: &[bool]) { let additional = slice.len(); self.advance(additional); let offset = self.len() - additional; for (i, v) in slice.iter().enumerate() { if *v { unsafe { bit_util::set_bit_raw(self.buffer.as_mut_ptr(), offset + i) } } } } pub fn shrink_to_fit(&mut self) { let byte_len = bit_util::ceil(self.len(), 8); self.buffer.resize(byte_len, 0) } #[inline] pub fn finish(&mut self) -> Buffer { let buf = std::mem::replace(&mut self.buffer, MutableBuffer::new(0)); self.len = 0; buf.into() } } #[derive(Debug)] pub struct BooleanArrayBuilder { values_builder: BooleanBufferBuilder, bitmap_builder: BooleanBufferBuilder, } impl BooleanArrayBuilder { pub fn new(capacity: usize) -> Self { Self { values_builder: BooleanBufferBuilder::new(capacity), bitmap_builder: BooleanBufferBuilder::new(capacity), } } pub fn new_no_nulls(capacity: usize) -> Self { Self { values_builder: BooleanBufferBuilder::new(capacity), bitmap_builder: BooleanBufferBuilder::new(0), } } pub fn capacity(&self) -> usize { self.values_builder.capacity() } pub fn append_value(&mut self, v: bool) { self.bitmap_builder.append(true); self.values_builder.append(v); } pub fn append_null(&mut self) { self.bitmap_builder.append(false); self.values_builder.advance(1); } pub fn append_option(&mut self, v: Option<bool>) { match v { None => self.append_null(), Some(v) => self.append_value(v), }; } pub fn append_slice(&mut self, v: &[bool]) { self.bitmap_builder.append_n(v.len(), true); self.values_builder.append_slice(v); } pub fn append_values(&mut self, values: &[bool], is_valid: &[bool]) { assert_eq!(values.len(), is_valid.len()); self.bitmap_builder.append_slice(is_valid); self.values_builder.append_slice(values); } pub fn shrink_to_fit(&mut self) { self.values_builder.shrink_to_fit(); self.bitmap_builder.shrink_to_fit(); } pub fn finish_with_null_buffer(&mut self, buffer: Buffer) -> BooleanArray { self.shrink_to_fit(); let len = self.len(); let data = ArrayData::builder(DataType::Boolean) .len(len) .add_buffer(self.values_builder.finish()) .null_bit_buffer(buffer) .build(); BooleanArray::from(data) } pub fn finish(&mut self) -> BooleanArray { self.shrink_to_fit(); let len = self.len(); let null_bit_buffer = self.bitmap_builder.finish(); let null_count = len - null_bit_buffer.count_set_bits(); let mut builder = ArrayData::builder(DataType::Boolean) .len(len) .add_buffer(self.values_builder.finish()); if null_count > 0 { builder = builder.null_bit_buffer(null_bit_buffer); } let data = builder.build(); BooleanArray::from(data) } } impl ArrayBuilder for BooleanArrayBuilder { fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn into_box_any(self: Box<Self>) -> Box<dyn Any> { self } fn len(&self) -> usize { self.values_builder.len() } fn is_empt
extend_from_slice(values); self.offsets.extend_from_slice(offsets); } #[inline] pub fn append_value(&mut self, value: &str) { self.values.extend_from_slice(value.as_bytes()); self.offsets.push(self.values.len() as i64); } pub fn finish(&mut self) -> LargeStringArray { let values = mem::take(&mut self.values); let offsets = mem::take(&mut self.offsets); let offsets_len = offsets.len() - 1; let buf_offsets = offsets.into_arrow_buffer(); let buf_values = values.into_arrow_buffer(); assert_eq!(buf_values.len(), buf_values.capacity()); assert_eq!(buf_offsets.len(), buf_offsets.capacity()); let arraydata = ArrayData::builder(DataType::LargeUtf8) .len(offsets_len) .add_buffer(buf_offsets) .add_buffer(buf_values) .build(); LargeStringArray::from(arraydata) } } #[cfg(test)] mod test { use super::*; use arrow::array::Array; use arrow::datatypes::UInt32Type; #[test] fn test_primitive_builder() { let mut builder = PrimitiveArrayBuilder::<UInt32Type>::new(10); builder.append_value(0); builder.append_null(); let out = builder.finish(); assert_eq!(out.len(), 2); assert_eq!(out.null_count(), 1); dbg!(out); } #[test] fn test_string_builder() { let mut builder = LargeStringBuilder::with_capacity(1, 3); builder.append_value("foo").unwrap(); builder.append_null().unwrap(); builder.append_value("bar").unwrap(); let out = builder.finish(); let vals = out.iter().collect::<Vec<_>>(); assert_eq!(vals, &[Some("foo"), None, Some("bar")]); } }
y(&self) -> bool { self.values_builder.is_empty() } fn finish(&mut self) -> ArrayRef { Arc::new(self.finish()) } } pub struct PrimitiveArrayBuilder<T> where T: ArrowPrimitiveType, T::Native: Default, { values: AlignedVec<T::Native>, bitmap_builder: BooleanBufferBuilder, null_count: usize, } impl<T> PrimitiveArrayBuilder<T> where T: ArrowPrimitiveType, T::Native: Default, { pub fn new(capacity: usize) -> Self { let values = AlignedVec::<T::Native>::with_capacity_aligned(capacity); let bitmap_builder = BooleanBufferBuilder::new(capacity); Self { values, bitmap_builder, null_count: 0, } } pub fn new_no_nulls(capacity: usize) -> Self { let values = AlignedVec::<T::Native>::with_capacity_aligned(capacity); let bitmap_builder = BooleanBufferBuilder::new(0); Self { values, bitmap_builder, null_count: 0, } } #[inline] pub fn append_value(&mut self, v: T::Native) { self.values.push(v); self.bitmap_builder.append(true); } #[inline] pub fn append_slice(&mut self, other: &[T::Native]) { self.values.extend_from_slice(other) } #[inline] pub fn append_null(&mut self) { self.bitmap_builder.append(false); self.values.push(Default::default()); self.null_count += 1; } pub fn shrink_to_fit(&mut self) { self.values.shrink_to_fit(); self.bitmap_builder.shrink_to_fit(); } pub fn finish_with_null_buffer(&mut self, buffer: Buffer) -> PrimitiveArray<T> { self.shrink_to_fit(); let values = mem::take(&mut self.values); values.into_primitive_array(Some(buffer)) } pub fn finish(&mut self) -> PrimitiveArray<T> { self.shrink_to_fit(); let values = mem::take(&mut self.values); let null_bit_buffer = self.bitmap_builder.finish(); let buf = if self.null_count == 0 { None } else { Some(null_bit_buffer) }; values.into_primitive_array(buf) } } impl<T> ArrayBuilder for PrimitiveArrayBuilder<T> where T: ArrowPrimitiveType, { fn len(&self) -> usize { self.values.len() } fn is_empty(&self) -> bool { self.values.is_empty() } fn finish(&mut self) -> ArrayRef { Arc::new(PrimitiveArrayBuilder::finish(self)) } fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn into_box_any(self: Box<Self>) -> Box<dyn Any> { self } } #[derive(Debug)] pub struct NoNullLargeStringBuilder { values: AlignedVec<u8>, offsets: AlignedVec<i64>, } impl NoNullLargeStringBuilder { pub fn with_capacity(values_capacity: usize, list_capacity: usize) -> Self { let mut offsets = AlignedVec::with_capacity_aligned(list_capacity + 1); offsets.push(0); Self { values: AlignedVec::with_capacity_aligned(values_capacity), offsets, } } pub fn extend_from_slices(&mut self, values: &[u8], offsets: &[i64]) { self.values.
random
[ { "content": "#[inline]\n\npub fn slice_offsets(offset: i64, length: usize, array_len: usize) -> (usize, usize) {\n\n let abs_offset = offset.abs() as usize;\n\n\n\n // The offset counted from the start of the array\n\n // negative index\n\n if offset < 0 {\n\n if abs_offset <= array_len {\n\n (array_len - abs_offset, std::cmp::min(length, abs_offset))\n\n // negative index larger that array: slice from start\n\n } else {\n\n (0, std::cmp::min(length, array_len))\n\n }\n\n // positive index\n\n } else if abs_offset <= array_len {\n\n (abs_offset, std::cmp::min(length, array_len - abs_offset))\n\n // empty slice\n\n } else {\n\n (array_len, 0)\n\n }\n\n}\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 0, "score": 345930.5678328563 }, { "content": "#[inline]\n\npub fn get_bit(data: &[u8], i: usize) -> bool {\n\n (data[i >> 3] & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not.\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool {\n\n (*data.add(i >> 3) & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 1, "score": 320197.50166038645 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 2, "score": 320052.1773477109 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 3, "score": 320052.1773477109 }, { "content": "/// Get the null count and the null bitmap of the arrow array\n\npub fn get_bitmap<T: Array + ?Sized>(arr: &T) -> (usize, Option<Buffer>) {\n\n let data = arr.data();\n\n (\n\n data.null_count(),\n\n data.null_bitmap().as_ref().map(|bitmap| {\n\n let buff = bitmap.buffer_ref();\n\n buff.clone()\n\n }),\n\n )\n\n}\n\n\n\n// Used in polars/src/chunked_array/apply.rs:24 to collect from aligned vecs and null bitmaps\n\nimpl<T> FromIterator<(AlignedVec<T::Native>, Option<Buffer>)> for ChunkedArray<T>\n\nwhere\n\n T: PolarsNumericType,\n\n{\n\n fn from_iter<I: IntoIterator<Item = (AlignedVec<T::Native>, Option<Buffer>)>>(iter: I) -> Self {\n\n let mut chunks = vec![];\n\n\n\n for (values, opt_buffer) in iter {\n\n let arr = values.into_primitive_array::<T>(opt_buffer);\n\n chunks.push(Arc::new(arr) as ArrayRef)\n\n }\n\n ChunkedArray::new_from_chunks(\"from_iter\", chunks)\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/builder/mod.rs", "rank": 4, "score": 258971.0045282343 }, { "content": "#[inline]\n\npub fn ceil(value: usize, divisor: usize) -> usize {\n\n let (quot, rem) = (value / divisor, value % divisor);\n\n if rem > 0 && divisor > 0 {\n\n quot + 1\n\n } else {\n\n quot\n\n }\n\n}\n\n\n\n/// Performs SIMD bitwise binary operations.\n\n///\n\n/// # Safety\n\n///\n\n/// Note that each slice should be 64 bytes and it is the callers responsibility to ensure\n\n/// that this is the case. If passed slices larger than 64 bytes the operation will only\n\n/// be performed on the first 64 bytes. Slices less than 64 bytes will panic.\n\n#[cfg(simd)]\n\npub unsafe fn bitwise_bin_op_simd<F>(left: &[u8], right: &[u8], result: &mut [u8], op: F)\n\nwhere\n\n F: Fn(u8x64, u8x64) -> u8x64,\n\n{\n\n let left_simd = u8x64::from_slice_unaligned_unchecked(left);\n\n let right_simd = u8x64::from_slice_unaligned_unchecked(right);\n\n let simd_result = op(left_simd, right_simd);\n\n simd_result.write_to_slice_unaligned_unchecked(result);\n\n}\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 5, "score": 245082.2861341732 }, { "content": "pub fn get_file_like(f: PyObject, truncate: bool) -> PyResult<Box<dyn FileLike>> {\n\n use EitherRustPythonFile::*;\n\n match get_either_file(f, truncate)? {\n\n Py(f) => Ok(Box::new(f)),\n\n Rust(f) => Ok(Box::new(f)),\n\n }\n\n}\n", "file_path": "py-polars/src/file.rs", "rank": 6, "score": 243454.1649469842 }, { "content": "/// Use a global string cache for the Categorical Types.\n\n///\n\n/// This is used to cache the string categories locally.\n\n/// This allows join operations on categorical types.\n\npub fn toggle_string_cache(toggle: bool) {\n\n USE_STRING_CACHE.store(toggle, Ordering::Release);\n\n\n\n if !toggle {\n\n STRING_CACHE.clear()\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/lib.rs", "rank": 7, "score": 230118.1662383129 }, { "content": "fn pivot_agg_median<T>(builder: &mut PrimitiveChunkedBuilder<T>, v: &mut Vec<Option<T::Native>>)\n\nwhere\n\n T: PolarsNumericType,\n\n T::Native: PartialOrd,\n\n{\n\n v.sort_unstable_by(|a, b| a.partial_cmp(b).unwrap());\n\n builder.append_option(v[v.len() / 2]);\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/pivot.rs", "rank": 8, "score": 226074.02170808055 }, { "content": "pub fn split_df(df: &DataFrame, n: usize) -> Result<Vec<DataFrame>> {\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 9, "score": 223303.60653006638 }, { "content": "pub fn get_list_builder(\n\n dt: &DataType,\n\n value_capacity: usize,\n\n list_capacity: usize,\n\n name: &str,\n\n) -> Box<dyn ListBuilderTrait> {\n\n macro_rules! get_primitive_builder {\n\n ($type:ty) => {{\n\n let values_builder = PrimitiveArrayBuilder::<$type>::new(value_capacity);\n\n let builder = ListPrimitiveChunkedBuilder::new(&name, values_builder, list_capacity);\n\n Box::new(builder)\n\n }};\n\n }\n\n macro_rules! get_bool_builder {\n\n () => {{\n\n let values_builder = BooleanArrayBuilder::new(value_capacity);\n\n let builder = ListBooleanChunkedBuilder::new(&name, values_builder, list_capacity);\n\n Box::new(builder)\n\n }};\n\n }\n", "file_path": "polars/polars-core/src/chunked_array/builder/mod.rs", "rank": 10, "score": 220852.43815567854 }, { "content": "#[cfg(not(feature = \"dtype-u8\"))]\n\nfn dummies_helper(mut groups: Vec<u32>, len: usize, name: &str) -> Int64Chunked {\n\n groups.sort_unstable();\n\n\n\n // let mut group_member_iter = groups.into_iter();\n\n let mut av = AlignedVec::with_capacity_aligned(len);\n\n for _ in 0..len {\n\n av.push(0i64)\n\n }\n\n\n\n for idx in groups {\n\n let elem = unsafe { av.inner.get_unchecked_mut(idx as usize) };\n\n *elem = 1;\n\n }\n\n\n\n ChunkedArray::new_from_aligned_vec(name, av)\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/unique.rs", "rank": 11, "score": 217380.86384266368 }, { "content": "#[cfg(feature = \"dtype-u8\")]\n\nfn dummies_helper(mut groups: Vec<u32>, len: usize, name: &str) -> UInt8Chunked {\n\n groups.sort_unstable();\n\n\n\n // let mut group_member_iter = groups.into_iter();\n\n let mut av = AlignedVec::with_capacity_aligned(len);\n\n for _ in 0..len {\n\n av.push(0u8)\n\n }\n\n\n\n for idx in groups {\n\n let elem = unsafe { av.inner.get_unchecked_mut(idx as usize) };\n\n *elem = 1;\n\n }\n\n\n\n ChunkedArray::new_from_aligned_vec(name, av)\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/unique.rs", "rank": 12, "score": 214705.13058483572 }, { "content": "fn bench_collect_bool(v: &[bool]) {\n\n let f = || v.iter().copied().collect::<ChunkedArray<_>>();\n\n criterion::black_box(f());\n\n}\n\n\n", "file_path": "polars/benches/collect.rs", "rank": 13, "score": 213875.43905680394 }, { "content": "fn pivot_agg_sum<T>(builder: &mut PrimitiveChunkedBuilder<T>, v: &[Option<T::Native>])\n\nwhere\n\n T: PolarsNumericType,\n\n T::Native: Num + Zero,\n\n{\n\n builder.append_option(v.iter().copied().fold_options(Zero::zero(), Add::add));\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/pivot.rs", "rank": 14, "score": 213386.20217281138 }, { "content": "fn pivot_agg_mean<T>(builder: &mut PrimitiveChunkedBuilder<T>, v: &[Option<T::Native>])\n\nwhere\n\n T: PolarsNumericType,\n\n T::Native: Num + Zero + NumCast,\n\n{\n\n builder.append_option(\n\n v.iter()\n\n .copied()\n\n .fold_options::<T::Native, T::Native, _>(Zero::zero(), Add::add)\n\n .map(|sum_val| sum_val / NumCast::from(v.len()).unwrap()),\n\n );\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/pivot.rs", "rank": 15, "score": 213386.20217281138 }, { "content": "fn pivot_agg_first<T>(builder: &mut PrimitiveChunkedBuilder<T>, v: &[Option<T::Native>])\n\nwhere\n\n T: PolarsNumericType,\n\n{\n\n builder.append_option(v[0]);\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/pivot.rs", "rank": 16, "score": 213386.20217281138 }, { "content": "fn pivot_agg_max<T>(builder: &mut PrimitiveChunkedBuilder<T>, v: &[Option<T::Native>])\n\nwhere\n\n T: PolarsNumericType,\n\n{\n\n let mut max = None;\n\n\n\n for val in v.iter().flatten() {\n\n match max {\n\n None => max = Some(*val),\n\n Some(maximum) => {\n\n if val > &maximum {\n\n max = Some(*val)\n\n }\n\n }\n\n }\n\n }\n\n\n\n builder.append_option(max);\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/pivot.rs", "rank": 17, "score": 213386.20217281138 }, { "content": "fn pivot_agg_min<T>(builder: &mut PrimitiveChunkedBuilder<T>, v: &[Option<T::Native>])\n\nwhere\n\n T: PolarsNumericType,\n\n{\n\n let mut min = None;\n\n\n\n for val in v.iter().flatten() {\n\n match min {\n\n None => min = Some(*val),\n\n Some(minimum) => {\n\n if val < &minimum {\n\n min = Some(*val)\n\n }\n\n }\n\n }\n\n }\n\n\n\n builder.append_option(min);\n\n}\n\n\n", "file_path": "polars/polars-core/src/frame/groupby/pivot.rs", "rank": 18, "score": 213386.20217281138 }, { "content": "pub fn get_iter_capacity<T, I: Iterator<Item = T>>(iter: &I) -> usize {\n\n match iter.size_hint() {\n\n (_lower, Some(upper)) => upper,\n\n (0, None) => 1024,\n\n (lower, None) => lower,\n\n }\n\n}\n\n\n\nmacro_rules! split_array {\n\n ($ca: expr, $n: expr, $ty : ty) => {{\n\n if $n == 1 {\n\n return Ok(vec![$ca.clone()]);\n\n }\n\n let total_len = $ca.len();\n\n let chunk_size = total_len / $n;\n\n\n\n let v = (0..$n)\n\n .map(|i| {\n\n let offset = i * chunk_size;\n\n let len = if i == ($n - 1) {\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 19, "score": 210540.11014998992 }, { "content": "/// Apply a lambda with a boolean output type\n\npub fn apply_lambda_with_bool_out_type<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<bool>,\n\n) -> ChunkedArray<BooleanType> {\n\n let columns = df.get_columns();\n\n\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = ((init_null_count + skip)..df.height()).map(|idx| {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let tpl = (PyTuple::new(py, iter),);\n\n match lambda.call1(tpl) {\n\n Ok(val) => val.extract::<bool>().ok(),\n\n Err(e) => panic!(\"python function failed {}\", e),\n\n }\n\n });\n\n iterator_to_bool(iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 20, "score": 207030.35572300036 }, { "content": "/// Find the indexes that would sort these series in order of appearance.\n\n/// That means that the first `Series` will be used to determine the ordering\n\n/// until duplicates are found. Once duplicates are found, the next `Series` will\n\n/// be used and so on.\n\npub fn argsort_by(by: Vec<Expr>, reverse: &[bool]) -> Expr {\n\n let reverse = reverse.to_vec();\n\n let function = NoEq::new(Arc::new(move |s: &mut [Series]| {\n\n polars_core::functions::argsort_by(&s, &reverse).map(|ca| ca.into_series())\n\n }) as Arc<dyn SeriesUdf>);\n\n\n\n Expr::Function {\n\n input: by,\n\n function,\n\n output_type: Some(DataType::UInt32),\n\n collect_groups: true,\n\n }\n\n}\n", "file_path": "polars/polars-lazy/src/functions.rs", "rank": 21, "score": 204435.9849351172 }, { "content": "#[inline]\n\npub fn round_upto_multiple_of_64(num: usize) -> usize {\n\n round_upto_power_of_2(num, 64)\n\n}\n\n\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 22, "score": 201771.9362261517 }, { "content": "/// Find the indexes that would sort these series in order of appearance.\n\n/// That means that the first `Series` will be used to determine the ordering\n\n/// until duplicates are found. Once duplicates are found, the next `Series` will\n\n/// be used and so on.\n\npub fn argsort_by(by: &[Series], reverse: &[bool]) -> Result<UInt32Chunked> {\n\n if by.len() != reverse.len() {\n\n return Err(PolarsError::ValueError(\n\n format!(\n\n \"The amount of ordering booleans: {} does not match amount of Series: {}\",\n\n reverse.len(),\n\n by.len()\n\n )\n\n .into(),\n\n ));\n\n }\n\n let (first, by, reverse) =\n\n prepare_argsort(by.to_vec(), reverse.iter().copied().collect()).unwrap();\n\n first.argsort_multiple(&by, &reverse)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_pearson_corr() {\n\n let a = Series::new(\"a\", &[1.0f32, 2.0]);\n\n let b = Series::new(\"b\", &[1.0f32, 2.0]);\n\n assert!((cov(&a.f32().unwrap(), &b.f32().unwrap()).unwrap() - 0.5).abs() < 0.001);\n\n assert!((pearson_corr(&a.f32().unwrap(), &b.f32().unwrap()).unwrap() - 1.0).abs() < 0.001);\n\n }\n\n}\n", "file_path": "polars/polars-core/src/functions.rs", "rank": 23, "score": 199138.37669772108 }, { "content": "fn normalize(mut df: DataFrame) -> Result<DataFrame> {\n\n let cols = &FEATURES;\n\n\n\n for &col in cols {\n\n df.may_apply(col, |s| {\n\n let ca = s.f64()?;\n\n\n\n match ca.sum() {\n\n Some(sum) => Ok(ca / sum),\n\n None => Err(PolarsError::Other(\"Nulls in column\".into())),\n\n }\n\n })?;\n\n }\n\n Ok(df)\n\n}\n\n\n", "file_path": "examples/iris_classifier/src/main.rs", "rank": 24, "score": 198609.61696934904 }, { "content": "pub fn str_to_polarstype(s: &str) -> DataType {\n\n match s {\n\n \"<class 'polars.datatypes.UInt8'>\" => DataType::UInt8,\n\n \"<class 'polars.datatypes.UInt16'>\" => DataType::UInt16,\n\n \"<class 'polars.datatypes.UInt32'>\" => DataType::UInt32,\n\n \"<class 'polars.datatypes.UInt64'>\" => DataType::UInt64,\n\n \"<class 'polars.datatypes.Int8'>\" => DataType::Int8,\n\n \"<class 'polars.datatypes.Int16'>\" => DataType::Int16,\n\n \"<class 'polars.datatypes.Int32'>\" => DataType::Int32,\n\n \"<class 'polars.datatypes.Int64'>\" => DataType::Int64,\n\n \"<class 'polars.datatypes.Float32'>\" => DataType::Float32,\n\n \"<class 'polars.datatypes.Float64'>\" => DataType::Float64,\n\n \"<class 'polars.datatypes.Boolean'>\" => DataType::Boolean,\n\n \"<class 'polars.datatypes.Utf8'>\" => DataType::Utf8,\n\n \"<class 'polars.datatypes.Date32'>\" => DataType::Date32,\n\n \"<class 'polars.datatypes.Date64'>\" => DataType::Date64,\n\n \"<class 'polars.datatypes.List'>\" => DataType::List(ArrowDataType::Null),\n\n \"<class 'polars.datatypes.Categorical'>\" => DataType::Categorical,\n\n tp => panic!(\"Type {} not implemented in str_to_polarstype\", tp),\n\n }\n\n}\n", "file_path": "py-polars/src/utils.rs", "rank": 25, "score": 196917.0929711008 }, { "content": "/// Returns the nearest multiple of `factor` that is `>=` than `num`. Here `factor` must\n\n/// be a power of 2.\n\npub fn round_upto_power_of_2(num: usize, factor: usize) -> usize {\n\n debug_assert!(factor > 0 && (factor & (factor - 1)) == 0);\n\n (num + (factor - 1)) & !(factor - 1)\n\n}\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not\n", "file_path": "polars/polars-arrow/src/bit_util.rs", "rank": 26, "score": 196151.72571759875 }, { "content": "fn rename_cols(mut df: DataFrame) -> Result<DataFrame> {\n\n df.set_column_names(&[\n\n \"sepal.length\",\n\n \"sepal.width\",\n\n \"petal.width\",\n\n \"petal.length\",\n\n \"class\",\n\n ])?;\n\n Ok(df)\n\n}\n\n\n", "file_path": "examples/iris_classifier/src/main.rs", "rank": 27, "score": 196139.63749320837 }, { "content": "fn enforce_schema(mut df: DataFrame) -> Result<DataFrame> {\n\n let dtypes = &[\n\n ArrowDataType::Float64,\n\n ArrowDataType::Float64,\n\n ArrowDataType::Float64,\n\n ArrowDataType::Float64,\n\n ArrowDataType::Utf8,\n\n ];\n\n\n\n df.schema()\n\n .fields()\n\n .iter()\n\n .zip(dtypes)\n\n .try_for_each::<_, Result<_>>(|(field, dtype)| {\n\n if field.data_type() != dtype {\n\n df.may_apply(field.name(), |col| match dtype {\n\n ArrowDataType::Float64 => col.cast::<Float64Type>(),\n\n ArrowDataType::Utf8 => col.cast::<Utf8Type>(),\n\n _ => Err(PolarsError::Other(\"unexpected type\".into())),\n\n })?;\n\n }\n\n Ok(())\n\n })?;\n\n Ok(df)\n\n}\n\n\n", "file_path": "examples/iris_classifier/src/main.rs", "rank": 28, "score": 196139.63749320837 }, { "content": "fn arg_unique<T>(a: impl Iterator<Item = T>, capacity: usize) -> AlignedVec<u32>\n\nwhere\n\n T: Hash + Eq,\n\n{\n\n let mut set = HashSet::with_hasher(RandomState::new());\n\n let mut unique = AlignedVec::with_capacity_aligned(capacity);\n\n a.enumerate().for_each(|(idx, val)| {\n\n if set.insert(val) {\n\n unique.push(idx as u32)\n\n }\n\n });\n\n unique\n\n}\n\n\n\nmacro_rules! arg_unique_ca {\n\n ($ca:expr) => {{\n\n match $ca.null_count() {\n\n 0 => arg_unique($ca.into_no_null_iter(), $ca.len()),\n\n _ => arg_unique($ca.into_iter(), $ca.len()),\n\n }\n", "file_path": "polars/polars-core/src/chunked_array/ops/unique.rs", "rank": 29, "score": 194312.7435902206 }, { "content": "pub fn lit(value: &PyAny) -> PyExpr {\n\n if let Ok(int) = value.downcast::<PyInt>() {\n\n let val = int.extract::<i64>().unwrap();\n\n dsl::lit(val).into()\n\n } else if let Ok(float) = value.downcast::<PyFloat>() {\n\n let val = float.extract::<f64>().unwrap();\n\n dsl::lit(val).into()\n\n } else if let Ok(pystr) = value.downcast::<PyString>() {\n\n dsl::lit(\n\n pystr\n\n .to_str()\n\n .expect(\"could not transform Python string to Rust Unicode\"),\n\n )\n\n .into()\n\n } else if let Ok(series) = value.extract::<PySeries>() {\n\n dsl::lit(series.series.clone()).into()\n\n } else if value.is_none() {\n\n dsl::lit(Null {}).into()\n\n } else {\n\n panic!(\"could not convert value {:?} as a Literal\", value)\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 30, "score": 194011.8539411872 }, { "content": "fn one_hot_encode(mut df: DataFrame) -> Result<DataFrame> {\n\n let y = df[\"class\"].utf8().unwrap();\n\n\n\n let unique = y.unique()?;\n\n let n_unique = unique.len();\n\n\n\n let mut ohe = y\n\n .into_iter()\n\n .map(|opt_s| {\n\n let ohe = (0..n_unique)\n\n .map(|i| if unique.get(i) == opt_s { 1 } else { 0 })\n\n .collect::<Vec<u32>>();\n\n match opt_s {\n\n Some(s) => UInt32Chunked::new_from_slice(s, &ohe).into_series(),\n\n None => UInt32Chunked::new_from_slice(\"null\", &ohe).into_series(),\n\n }\n\n })\n\n .collect::<ListChunked>()\n\n .into_series();\n\n ohe.rename(\"ohe\");\n\n df.with_column(ohe)?;\n\n\n\n Ok(df)\n\n}\n\n\n", "file_path": "examples/iris_classifier/src/main.rs", "rank": 31, "score": 193754.9882041256 }, { "content": "/// Given two datatypes, determine the supertype that both types can safely be cast to\n\npub fn get_supertype(l: &DataType, r: &DataType) -> Result<DataType> {\n\n match _get_supertype(l, r) {\n\n Some(dt) => Ok(dt),\n\n None => _get_supertype(r, l).ok_or_else(|| {\n\n PolarsError::Other(\n\n format!(\"Failed to determine supertype of {:?} and {:?}\", l, r).into(),\n\n )\n\n }),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 32, "score": 193133.14121568928 }, { "content": "pub fn to_datafusion_lit(lit: LiteralValue) -> Result<ScalarValue> {\n\n use LiteralValue::*;\n\n let sv = match lit {\n\n Boolean(v) => ScalarValue::Boolean(Some(v)),\n\n // should this be large utf8?\n\n Utf8(v) => ScalarValue::Utf8(Some(v)),\n\n UInt32(v) => ScalarValue::UInt32(Some(v)),\n\n Int32(v) => ScalarValue::Int32(Some(v)),\n\n Int64(v) => ScalarValue::Int64(Some(v)),\n\n Float32(v) => ScalarValue::Float32(Some(v)),\n\n Float64(v) => ScalarValue::Float64(Some(v)),\n\n #[cfg(all(feature = \"temporal\", feature = \"dtype-date64\"))]\n\n DateTime(v) => ScalarValue::Date64(Some(v.timestamp_millis())),\n\n lit => {\n\n return Err(PolarsError::Other(\n\n format!(\"Literal conversion for literal {:?} not yet supported\", lit).into(),\n\n ))\n\n }\n\n };\n\n Ok(sv)\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/datafusion/conversion.rs", "rank": 33, "score": 192978.0425238491 }, { "content": "/// [Cast](Expr::Cast) expression.\n\npub fn cast(expr: Expr, data_type: DataType) -> Expr {\n\n Expr::Cast {\n\n expr: Box::new(expr),\n\n data_type,\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 34, "score": 191876.6573659433 }, { "content": "pub fn get_either_file(py_f: PyObject, truncate: bool) -> PyResult<EitherRustPythonFile> {\n\n let gil = Python::acquire_gil();\n\n let py = gil.python();\n\n\n\n if let Ok(pstring) = py_f.cast_as::<PyString>(py) {\n\n let rstring = pstring.to_string();\n\n let str_slice: &str = rstring.borrow();\n\n let f = if truncate {\n\n File::create(str_slice)?\n\n } else {\n\n File::open(str_slice)?\n\n };\n\n Ok(EitherRustPythonFile::Rust(f))\n\n } else {\n\n let f = PyFileLikeObject::with_requirements(py_f, true, true, true)?;\n\n Ok(EitherRustPythonFile::Py(f))\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/file.rs", "rank": 35, "score": 187544.33457658702 }, { "content": "pub fn accumulate_dataframes_horizontal(dfs: Vec<DataFrame>) -> Result<DataFrame> {\n\n let mut iter = dfs.into_iter();\n\n let mut acc_df = iter.next().unwrap();\n\n for df in iter {\n\n acc_df.hstack_mut(df.get_columns())?;\n\n }\n\n Ok(acc_df)\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\nextern \"C\" {\n\n #[allow(dead_code)]\n\n pub fn malloc_trim(__pad: usize) -> std::os::raw::c_int;\n\n}\n\n\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 36, "score": 187196.20898384403 }, { "content": "// Helper function for fold DataFrames. It appends DataFrames to the accumulator,\n\n// if the acumulator is the default DataFrame, then, return the right DataFrame, as the\n\n// accumulator.\n\nfn right_or_append(mut accumulator: DataFrame, right: DataFrame) -> PolarResult<DataFrame> {\n\n if accumulator.width() == 0 {\n\n Ok(right)\n\n } else {\n\n accumulator.vstack_mut(&right)?;\n\n Ok(accumulator)\n\n }\n\n}\n\n\n", "file_path": "examples/aggregate_multiple_files_in_chunks/src/main.rs", "rank": 37, "score": 183593.86634565875 }, { "content": "pub fn combine_predicates<I>(iter: I, arena: &mut Arena<AExpr>) -> Node\n\nwhere\n\n I: Iterator<Item = Node>,\n\n{\n\n let mut single_pred = None;\n\n for node in iter {\n\n single_pred = match single_pred {\n\n None => Some(node),\n\n Some(left) => Some(arena.add(AExpr::BinaryExpr {\n\n left,\n\n op: Operator::And,\n\n right: node,\n\n })),\n\n };\n\n }\n\n single_pred.expect(\"an empty iterator was passed\")\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/logical_plan/optimizer/predicate_pushdown.rs", "rank": 38, "score": 181917.49143770477 }, { "content": "pub fn accumulate_dataframes_vertical<I>(dfs: I) -> Result<DataFrame>\n\nwhere\n\n I: IntoIterator<Item = DataFrame>,\n\n{\n\n let mut iter = dfs.into_iter();\n\n let mut acc_df = iter.next().unwrap();\n\n for df in iter {\n\n acc_df.vstack_mut(&df)?;\n\n }\n\n Ok(acc_df)\n\n}\n\n\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 39, "score": 181048.72427331487 }, { "content": "fn index_of<T>(slice: &[T], item: &T) -> Option<usize> {\n\n debug_assert!(std::mem::size_of::<T>() > 0);\n\n let ptr = item as *const T;\n\n unsafe {\n\n if slice.as_ptr() < ptr && slice.as_ptr().add(slice.len()) > ptr {\n\n Some(index_of_unchecked(slice, item))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n/// Used to split the mantissa and exponent of floating point numbers\n\n/// https://stackoverflow.com/questions/39638363/how-can-i-use-a-hashmap-with-f64-as-key-in-rust\n\npub(crate) fn integer_decode_f64(val: f64) -> (u64, i16, i8) {\n\n let bits: u64 = val.to_bits();\n\n let sign: i8 = if bits >> 63 == 0 { 1 } else { -1 };\n\n let mut exponent: i16 = ((bits >> 52) & 0x7ff) as i16;\n\n let mantissa = if exponent == 0 {\n\n (bits & 0xfffffffffffff) << 1\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 40, "score": 178945.81049431057 }, { "content": "pub fn split_series(s: &Series, n: usize) -> Result<Vec<Series>> {\n\n split_array!(s, n, i64)\n\n}\n\n\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 41, "score": 175363.68177044354 }, { "content": "/// Accumulate over multiple columns horizontally / row wise.\n\npub fn fold_exprs<F: 'static>(mut acc: Expr, f: F, exprs: Vec<Expr>) -> Expr\n\nwhere\n\n F: Fn(Series, Series) -> Result<Series> + Send + Sync + Copy,\n\n{\n\n for e in exprs {\n\n acc = map_binary(acc, e, f, None);\n\n }\n\n acc\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 42, "score": 173167.47848945754 }, { "content": "// Compute the mean for every field:\n\n// - calories_mean from calories_sum_sum and calories_count_sum\n\n// - fats_g_mean from fats_g_sum_sum and fats_g_count_sum\n\n// - sugars_g_mean from sugars_g_sum_sum and sugars_g_count_sum\n\n//\n\n// The input is the dataframe used to get the '${field}_count_sum' and\n\n// '${field}_sum_sum' fiels. It shall be mutable, as the fields are going\n\n// to be dropped when computed the '${field}_mean'.\n\n//\n\n// The output is a result containg the Vector of mean Series computed.\n\nfn compute_all_means(dataframe: &mut DataFrame) -> PolarResult<Vec<Series>> {\n\n const SERIES_NAMES: &[(&str, &str, &str)] = &[\n\n (\"calories_sum_sum\", \"calories_count_sum\", \"calories_mean\"),\n\n (\"fats_g_sum_sum\", \"fats_g_count_sum\", \"fats_g_mean\"),\n\n (\"sugars_g_sum_sum\", \"sugars_g_count_sum\", \"sugars_g_mean\"),\n\n ];\n\n\n\n let mut result = Vec::with_capacity(SERIES_NAMES.len());\n\n for (sum_column_name, count_column_name, mean_column_name) in SERIES_NAMES {\n\n let mean_column = compute_mean(\n\n dataframe,\n\n sum_column_name,\n\n count_column_name,\n\n mean_column_name,\n\n )?;\n\n result.push(mean_column);\n\n }\n\n\n\n Ok(result)\n\n}\n\n\n", "file_path": "examples/aggregate_multiple_files_in_chunks/src/main.rs", "rank": 43, "score": 172012.32866415082 }, { "content": "fn fmt_float<T: Num + NumCast>(f: &mut Formatter<'_>, width: usize, v: T) -> fmt::Result {\n\n let v: f64 = NumCast::from(v).unwrap();\n\n let v = (v * 1000.).round() / 1000.;\n\n if v == 0.0 {\n\n write!(f, \"{:>width$.1}\", v, width = width)\n\n } else if !(0.0001..=9999.).contains(&v) {\n\n write!(f, \"{:>width$e}\", v, width = width)\n\n } else {\n\n write!(f, \"{:>width$}\", v, width = width)\n\n }\n\n}\n\n\n\nimpl Display for AnyValue<'_> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n let width = 0;\n\n match self {\n\n AnyValue::Null => write!(f, \"null\"),\n\n AnyValue::UInt8(v) => write!(f, \"{}\", v),\n\n AnyValue::UInt16(v) => write!(f, \"{}\", v),\n\n AnyValue::UInt32(v) => write!(f, \"{}\", v),\n", "file_path": "polars/polars-core/src/fmt.rs", "rank": 44, "score": 171438.9257220584 }, { "content": "pub fn binary_function(\n\n input_a: PyExpr,\n\n input_b: PyExpr,\n\n lambda: PyObject,\n\n output_type: &PyAny,\n\n) -> PyExpr {\n\n let input_a = input_a.inner;\n\n let input_b = input_b.inner;\n\n\n\n let output_field = match output_type.is_none() {\n\n true => Field::new(\"binary_function\", DataType::Null),\n\n false => {\n\n let str_repr = output_type.str().unwrap().to_str().unwrap();\n\n let data_type = str_to_polarstype(str_repr);\n\n Field::new(\"binary_function\", data_type)\n\n }\n\n };\n\n\n\n let func = move |a: Series, b: Series| {\n\n let gil = Python::acquire_gil();\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 45, "score": 170568.5261034381 }, { "content": "pub trait PolarsDataType: Send + Sync {\n\n fn get_dtype() -> DataType;\n\n}\n\n\n\nmacro_rules! impl_polars_datatype {\n\n ($ca:ident, $variant:ident) => {\n\n impl PolarsDataType for $ca {\n\n fn get_dtype() -> DataType {\n\n DataType::$variant\n\n }\n\n }\n\n };\n\n}\n\n\n\nimpl_polars_datatype!(UInt8Type, UInt8);\n\nimpl_polars_datatype!(UInt16Type, UInt16);\n\nimpl_polars_datatype!(UInt32Type, UInt32);\n\nimpl_polars_datatype!(UInt64Type, UInt64);\n\nimpl_polars_datatype!(Int8Type, Int8);\n\nimpl_polars_datatype!(Int16Type, Int16);\n", "file_path": "polars/polars-core/src/datatypes.rs", "rank": 46, "score": 168785.00788378017 }, { "content": "/// Any type that is not nested\n\npub trait PolarsSingleType: PolarsDataType {}\n\n\n\nimpl<T> PolarsSingleType for T where T: ArrowPrimitiveType + PolarsDataType {}\n\n\n\nimpl PolarsSingleType for Utf8Type {}\n\n\n\npub type ListChunked = ChunkedArray<ListType>;\n\npub type BooleanChunked = ChunkedArray<BooleanType>;\n\npub type UInt8Chunked = ChunkedArray<UInt8Type>;\n\npub type UInt16Chunked = ChunkedArray<UInt16Type>;\n\npub type UInt32Chunked = ChunkedArray<UInt32Type>;\n\npub type UInt64Chunked = ChunkedArray<UInt64Type>;\n\npub type Int8Chunked = ChunkedArray<Int8Type>;\n\npub type Int16Chunked = ChunkedArray<Int16Type>;\n\npub type Int32Chunked = ChunkedArray<Int32Type>;\n\npub type Int64Chunked = ChunkedArray<Int64Type>;\n\npub type Float32Chunked = ChunkedArray<Float32Type>;\n\npub type Float64Chunked = ChunkedArray<Float64Type>;\n\npub type Utf8Chunked = ChunkedArray<Utf8Type>;\n\npub type Date32Chunked = ChunkedArray<Date32Type>;\n\npub type Date64Chunked = ChunkedArray<Date64Type>;\n\npub type DurationNanosecondChunked = ChunkedArray<DurationNanosecondType>;\n\npub type DurationMillisecondChunked = ChunkedArray<DurationMillisecondType>;\n\npub type Time64NanosecondChunked = ChunkedArray<Time64NanosecondType>;\n\npub type CategoricalChunked = ChunkedArray<CategoricalType>;\n\n\n", "file_path": "polars/polars-core/src/datatypes.rs", "rank": 47, "score": 168715.42405861247 }, { "content": "/// Reset the global string cache used for the Categorical Types.\n\npub fn reset_string_cache() {\n\n STRING_CACHE.clear()\n\n}\n\n\n\n/// Check if string cache is set.\n\npub(crate) fn use_string_cache() -> bool {\n\n USE_STRING_CACHE.load(Ordering::Acquire)\n\n}\n", "file_path": "polars/polars-core/src/lib.rs", "rank": 48, "score": 168447.4026359978 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn build_csv_reader<R: 'static + Read + Seek + Sync + Send>(\n\n mut reader: R,\n\n n_rows: Option<usize>,\n\n skip_rows: usize,\n\n mut projection: Option<Vec<usize>>,\n\n max_records: Option<usize>,\n\n delimiter: Option<u8>,\n\n has_header: bool,\n\n ignore_parser_errors: bool,\n\n schema: Option<SchemaRef>,\n\n columns: Option<Vec<String>>,\n\n encoding: CsvEncoding,\n\n n_threads: Option<usize>,\n\n path: Option<PathBuf>,\n\n schema_overwrite: Option<&Schema>,\n\n sample_size: usize,\n\n chunk_size: usize,\n\n low_memory: bool,\n\n) -> Result<SequentialReader<R>> {\n\n // check if schema should be inferred\n", "file_path": "polars/polars-io/src/csv_core/csv.rs", "rank": 49, "score": 168268.33486050038 }, { "content": "pub fn naive_datetime_to_date32(v: &NaiveDateTime) -> i32 {\n\n (naive_datetime_to_date64(v) / (MILLISECONDS_IN_SECOND * SECONDS_IN_DAY)) as i32\n\n}\n\n\n\npub(crate) fn naive_time_to_time64_nanoseconds(v: &NaiveTime) -> i64 {\n\n // 3600 seconds in an hour\n\n v.hour() as i64 * 3600 * NANOSECONDS_IN_SECOND\n\n // 60 seconds in a minute\n\n + v.minute() as i64 * 60 * NANOSECONDS_IN_SECOND\n\n + v.second() as i64 * NANOSECONDS_IN_SECOND\n\n + v.nanosecond() as i64\n\n}\n\n\n\npub(crate) fn time64_nanosecond_as_time(v: i64) -> NaiveTime {\n\n NaiveTime::from_num_seconds_from_midnight(\n\n // extract seconds from nanoseconds\n\n (v / NANOSECONDS_IN_SECOND) as u32,\n\n // discard extracted seconds\n\n (v % NANOSECONDS_IN_SECOND) as u32,\n\n )\n\n}\n", "file_path": "polars/polars-core/src/chunked_array/temporal/conversions_utils.rs", "rank": 50, "score": 166711.59303500643 }, { "content": "// date64 is number of milliseconds since the Unix Epoch\n\npub fn naive_datetime_to_date64(v: &NaiveDateTime) -> i64 {\n\n v.timestamp_millis()\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/temporal/conversions_utils.rs", "rank": 51, "score": 166711.59303500643 }, { "content": "#[cfg(feature = \"private\")]\n\npub fn private_left_join_multiple_keys(a: &DataFrame, b: &DataFrame) -> Vec<(u32, Option<u32>)> {\n\n left_join_multiple_keys(a, b)\n\n}\n\n\n\npub(crate) fn left_join_multiple_keys(a: &DataFrame, b: &DataFrame) -> Vec<(u32, Option<u32>)> {\n\n // we assume that the b DataFrame is the shorter relation.\n\n // b will be used for the build phase.\n\n\n\n let n_threads = n_join_threads();\n\n let dfs_a = split_df(&a, n_threads).unwrap();\n\n let dfs_b = split_df(&b, n_threads).unwrap();\n\n\n\n let (build_hashes, random_state) = df_rows_to_hashes_threaded(&dfs_b, None);\n\n let (probe_hashes, _) = df_rows_to_hashes_threaded(&dfs_a, Some(random_state));\n\n\n\n let hash_tbls = create_build_table(&build_hashes, b);\n\n // early drop to reduce memory pressure\n\n drop(build_hashes);\n\n\n\n let n_tables = hash_tbls.len() as u64;\n", "file_path": "polars/polars-core/src/frame/hash_join/multiple_keys.rs", "rank": 52, "score": 164979.6852897232 }, { "content": "fn q4(c: &mut Criterion) {\n\n c.bench_function(\"groupby q4\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id4\")])\n\n .agg(vec![col(\"v1\").mean(), col(\"v2\").mean(), col(\"v3\").mean()])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 53, "score": 164081.71103997802 }, { "content": "fn q8(c: &mut Criterion) {\n\n c.bench_function(\"groupby q8\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n // todo! accept slice of str\n\n .drop_nulls(Some(vec![col(\"v3\")]))\n\n .sort(\"v3\", true)\n\n .groupby(vec![col(\"id6\")])\n\n .agg(vec![col(\"v3\").head(Some(2)).alias(\"v3_top_2\")])\n\n .explode(&[col(\"v3_top_2\")])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 54, "score": 164081.71103997802 }, { "content": "fn q6(c: &mut Criterion) {\n\n c.bench_function(\"groupby q6\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id4\"), col(\"id5\")])\n\n .agg(vec![\n\n col(\"v3\").median().alias(\"v3_median\"),\n\n col(\"v3\").std().alias(\"v3_std\"),\n\n ])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 55, "score": 164081.71103997802 }, { "content": "fn q7(c: &mut Criterion) {\n\n c.bench_function(\"groupby q7\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id3\")])\n\n .agg(vec![\n\n col(\"v1\").max().alias(\"v1\"),\n\n col(\"v2\").min().alias(\"v2\"),\n\n ])\n\n .select(vec![\n\n col(\"id3\"),\n\n (col(\"v1\") - col(\"v2\")).alias(\"range_v1_v2\"),\n\n ])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 56, "score": 164081.71103997802 }, { "content": "fn q3(c: &mut Criterion) {\n\n c.bench_function(\"groupby q3\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id3\")])\n\n .agg(vec![col(\"v1\").sum(), col(\"v3\").mean()])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 57, "score": 164081.71103997802 }, { "content": "fn q10(c: &mut Criterion) {\n\n c.bench_function(\"groupby q10\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![\n\n col(\"id1\"),\n\n col(\"id2\"),\n\n col(\"id3\"),\n\n col(\"id4\"),\n\n col(\"id5\"),\n\n col(\"id6\"),\n\n ])\n\n .agg(vec![\n\n col(\"v3\").sum().alias(\"v3\"),\n\n col(\"v1\").count().alias(\"v1\"),\n\n ])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(name = benches;\n\nconfig = Criterion::default().sample_size(100);\n\ntargets = q1, q2, q3, q4, q5, q6, q7, q8, q9, q10);\n\ncriterion_main!(benches);\n", "file_path": "polars/benches/groupby.rs", "rank": 58, "score": 164081.71103997802 }, { "content": "fn q1(c: &mut Criterion) {\n\n c.bench_function(\"groupby q1\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id1\")])\n\n .agg(vec![col(\"v1\").sum()])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 59, "score": 164081.71103997802 }, { "content": "fn q2(c: &mut Criterion) {\n\n c.bench_function(\"groupby q2\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id1\"), col(\"id2\")])\n\n .agg(vec![col(\"v1\").sum()])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 60, "score": 164081.71103997802 }, { "content": "fn q5(c: &mut Criterion) {\n\n c.bench_function(\"groupby q5\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .groupby(vec![col(\"id6\")])\n\n .agg(vec![col(\"v1\").sum(), col(\"v2\").sum(), col(\"v3\").sum()])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 61, "score": 164081.71103997802 }, { "content": "fn q9(c: &mut Criterion) {\n\n c.bench_function(\"groupby q9\", |b| {\n\n b.iter(|| {\n\n DATA.clone()\n\n .lazy()\n\n .drop_nulls(Some(vec![col(\"v1\"), col(\"v2\")]))\n\n .groupby(vec![col(\"id2\"), col(\"id4\")])\n\n .agg(vec![pearson_corr(col(\"v1\"), col(\"v2\"))\n\n .alias(\"r2\")\n\n .pow(2.0)])\n\n .collect()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "polars/benches/groupby.rs", "rank": 62, "score": 164081.71103997802 }, { "content": "pub fn array_to_rust(obj: &PyAny) -> PyResult<ArrayRef> {\n\n // prepare a pointer to receive the Array struct\n\n let (array_ptr, schema_ptr) = ffi::ArrowArray::into_raw(unsafe { ffi::ArrowArray::empty() });\n\n\n\n // make the conversion through PyArrow's private API\n\n // this changes the pointer's memory and is thus unsafe. In particular, `_export_to_c` can go out of bounds\n\n obj.call_method1(\n\n \"_export_to_c\",\n\n (array_ptr as uintptr_t, schema_ptr as uintptr_t),\n\n )?;\n\n let array = unsafe { make_array_from_raw(array_ptr, schema_ptr) }.expect(\"arrow array\");\n\n Ok(array)\n\n}\n\n\n", "file_path": "py-polars/src/arrow_interop/to_rust.rs", "rank": 63, "score": 163874.58197398443 }, { "content": "/// Replace None values with a value\n\npub trait ChunkFillNoneValue<T> {\n\n /// Replace None values with a give value `T`.\n\n fn fill_none_with_value(&self, value: T) -> Result<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "polars/polars-core/src/chunked_array/ops/mod.rs", "rank": 64, "score": 163517.85779540695 }, { "content": "#[pyfunction]\n\nfn toggle_string_cache(toggle: bool) {\n\n polars::toggle_string_cache(toggle)\n\n}\n\n\n", "file_path": "py-polars/src/lib.rs", "rank": 65, "score": 163095.3576179424 }, { "content": "pub fn apply_lambda_unknown<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n) -> PyResult<Series> {\n\n let columns = df.get_columns();\n\n let mut null_count = 0;\n\n\n\n for idx in 0..df.height() {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let arg = (PyTuple::new(py, iter),);\n\n let out = lambda.call1(arg)?;\n\n\n\n if out.is_none() {\n\n null_count += 1;\n\n continue;\n\n } else if out.is_instance::<PyInt>().unwrap() {\n\n let first_value = out.extract::<i64>().ok();\n\n return Ok(apply_lambda_with_primitive_out_type::<Int64Type>(\n\n df,\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 66, "score": 161907.57597867495 }, { "content": "#[bench]\n\nfn bench_warmup(b: &mut Bencher) {\n\n let s: Series = (0u32..1000).collect();\n\n b.iter(|| {\n\n s.u32().unwrap().into_iter();\n\n });\n\n}\n\n\n", "file_path": "polars/benches/bench.rs", "rank": 67, "score": 161749.04785592662 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let idx = create_random_idx(1024);\n\n let ca = create_primitive_ca(1024, 0.0, 1);\n\n c.bench_function(\"take primitive 1024 0% nulls array;\", |b| {\n\n b.iter(|| bench_take(&ca, &idx))\n\n });\n\n let ca = create_primitive_ca(1024, 0.05, 1);\n\n c.bench_function(\"take primitive 1024 5% nulls array;\", |b| {\n\n b.iter(|| bench_take(&ca, &idx))\n\n });\n\n let ca = create_primitive_ca(1024, 0.05, 3);\n\n c.bench_function(\"take primitive 1024 5% nulls array; 3 chunks\", |b| {\n\n b.iter(|| bench_take(&ca, &idx))\n\n });\n\n\n\n let idx = create_random_idx(4096);\n\n let ca = create_primitive_ca(4096, 0.0, 1);\n\n c.bench_function(\"take primitive 4096 0% nulls array;\", |b| {\n\n b.iter(|| bench_take(&ca, &idx))\n\n });\n", "file_path": "polars/benches/take.rs", "rank": 68, "score": 161749.04785592662 }, { "content": "#[bench]\n\nfn bench_group_by(b: &mut Bencher) {\n\n let s1: Series = Series::new(\"item\", (0u32..10000).collect::<Vec<u32>>());\n\n let s2: Series = Series::new(\"group\", iter::repeat(0).take(10000).collect::<Vec<u32>>());\n\n\n\n let df1 = DataFrame::new(vec![s1, s2]).unwrap();\n\n\n\n b.iter(|| {\n\n df1.groupby(\"group\").unwrap().select(\"item\").sum().unwrap();\n\n });\n\n}\n", "file_path": "polars/benches/bench.rs", "rank": 69, "score": 161749.04785592662 }, { "content": "fn add_benchmark(c: &mut Criterion) {\n\n let v = vec![true; 1024];\n\n c.bench_function(\"collect bool 1024\", |b| b.iter(|| bench_collect_bool(&v)));\n\n let v = vec![true; 4096];\n\n c.bench_function(\"collect bool 4096\", |b| b.iter(|| bench_collect_bool(&v)));\n\n\n\n let v = vec![1.0; 1024];\n\n c.bench_function(\"collect num 1024\", |b| b.iter(|| bench_collect_num(&v)));\n\n let v = vec![1.0; 4096];\n\n c.bench_function(\"collect num 4096\", |b| b.iter(|| bench_collect_num(&v)));\n\n\n\n let v = create_array(1024, 0.05);\n\n c.bench_function(\"collect optional_num 1024\", |b| {\n\n b.iter(|| bench_collect_optional_num(&v))\n\n });\n\n let v = create_array(4096, 0.05);\n\n c.bench_function(\"collect optional_num 4096\", |b| {\n\n b.iter(|| bench_collect_optional_num(&v))\n\n });\n\n\n", "file_path": "polars/benches/collect.rs", "rank": 70, "score": 161749.04785592662 }, { "content": "/// Set values in a primitive array based on a mask array. This is fast when large chunks of bits are set or unset.\n\npub fn set_with_mask<T>(\n\n array: &PrimitiveArray<T>,\n\n mask: &BooleanArray,\n\n value: T::Native,\n\n) -> PrimitiveArray<T>\n\nwhere\n\n T: ArrowNumericType,\n\n T::Native: ArrowNativeType,\n\n{\n\n debug_assert!(mask.null_count() == 0);\n\n let values = array.values();\n\n\n\n if array.null_count() == 0 {\n\n let mut av = AlignedVec::with_capacity_aligned(array.len());\n\n BinaryMaskedSliceIterator::new(mask)\n\n .into_iter()\n\n .for_each(|(lower, upper, truthy)| {\n\n if truthy {\n\n av.extend((lower..upper).map(|_| value))\n\n } else {\n", "file_path": "polars/polars-arrow/src/kernels/set.rs", "rank": 71, "score": 159876.6272466008 }, { "content": "/// Apply a lambda with list output type\n\npub fn apply_lambda_with_list_out_type<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<&Series>,\n\n dt: &DataType,\n\n) -> ListChunked {\n\n let columns = df.get_columns();\n\n\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = ((init_null_count + skip)..df.height()).map(|idx| {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let tpl = PyTuple::new(py, iter);\n\n match lambda.call1(tpl) {\n\n Ok(val) => val.extract::<PySeries>().ok().map(|ps| ps.series),\n\n Err(e) => panic!(\"python function failed {}\", e),\n\n }\n\n });\n\n iterator_to_list(dt, iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 72, "score": 159871.3407190615 }, { "content": "/// Apply a lambda with utf8 output type\n\npub fn apply_lambda_with_utf8_out_type<'a>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<&str>,\n\n) -> Utf8Chunked {\n\n let columns = df.get_columns();\n\n\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = ((init_null_count + skip)..df.height()).map(|idx| {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let tpl = (PyTuple::new(py, iter),);\n\n match lambda.call1(tpl) {\n\n Ok(val) => val.extract::<&str>().ok(),\n\n Err(e) => panic!(\"python function failed {}\", e),\n\n }\n\n });\n\n iterator_to_utf8(iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 73, "score": 159871.3407190615 }, { "content": "#[bench]\n\nfn bench_join_2_frames(b: &mut Bencher) {\n\n let s1: Series = Series::new(\"id\", (0u32..10000).collect::<Vec<u32>>());\n\n let s2: Series = Series::new(\"id\", (0u32..10000).collect::<Vec<u32>>());\n\n\n\n let df1 = DataFrame::new(vec![s1]).unwrap();\n\n\n\n let df2 = DataFrame::new(vec![s2]).unwrap();\n\n\n\n let mut sum = 0;\n\n\n\n b.iter(|| {\n\n let df3 = df1.inner_join(&df2, \"id\", \"id\").unwrap();\n\n sum += df3.shape().1;\n\n });\n\n\n\n println!(\"{}\", sum)\n\n}\n\n\n", "file_path": "polars/benches/bench.rs", "rank": 74, "score": 159520.10309112 }, { "content": "#[bench]\n\nfn bench_num_2_chunks(b: &mut Bencher) {\n\n let mut s: Series = (0u32..500).collect();\n\n let s2: Series = (500u32..1000).collect();\n\n s.append(&s2).unwrap();\n\n let mut sum = 0;\n\n b.iter(|| {\n\n sum = s\n\n .u32()\n\n .unwrap()\n\n .into_iter()\n\n .map(|opt| opt.unwrap())\n\n .sum::<u32>()\n\n });\n\n println!(\"{}\", sum)\n\n}\n\n\n", "file_path": "polars/benches/bench.rs", "rank": 75, "score": 159520.10309112 }, { "content": "fn csv_parsing_benchmark(c: &mut Criterion) {\n\n c.bench_function(\"parse csv\", |b| {\n\n b.iter(|| {\n\n let reader = prepare_reader().expect(\"file does not exist?\");\n\n reader.finish().unwrap();\n\n })\n\n });\n\n}\n\n\n\ncriterion_group!(benches, csv_parsing_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "polars/benches/csv.rs", "rank": 76, "score": 159520.10309112 }, { "content": "#[bench]\n\nfn bench_std_iter(b: &mut Bencher) {\n\n let v: Vec<u32> = (0..1000).collect();\n\n let mut sum = 0;\n\n b.iter(|| sum = v.iter().sum::<u32>());\n\n println!(\"{}\", sum)\n\n}\n\n\n", "file_path": "polars/benches/bench.rs", "rank": 77, "score": 159520.10309112 }, { "content": "#[bench]\n\nfn bench_num_iter(b: &mut Bencher) {\n\n let s: Series = (0u32..1000).collect();\n\n let mut sum = 0;\n\n b.iter(|| {\n\n sum = s\n\n .u32()\n\n .unwrap()\n\n .into_iter()\n\n .map(|opt| opt.unwrap())\n\n .sum::<u32>()\n\n });\n\n println!(\"{}\", sum)\n\n}\n\n\n", "file_path": "polars/benches/bench.rs", "rank": 78, "score": 159520.10309112 }, { "content": "/// Simple wrapper to parallelize functions that can be divided over threads aggregated and\n\n/// finally aggregated in the main thread. This can be done for sum, min, max, etc.\n\npub fn parallel_op_series<F>(f: F, s: Series, n_threads: Option<usize>) -> Result<Series>\n\nwhere\n\n F: Fn(Series) -> Result<Series> + Send + Sync,\n\n{\n\n let n_threads = n_threads.unwrap_or_else(|| POOL.current_num_threads());\n\n let slices = split_series(&s, n_threads)?;\n\n\n\n let chunks = POOL.install(|| slices.into_par_iter().map(&f).collect::<Result<Vec<_>>>())?;\n\n\n\n let mut iter = chunks.into_iter();\n\n let first = iter.next().unwrap();\n\n let out = iter.fold(first, |mut acc, s| {\n\n acc.append(&s).unwrap();\n\n acc\n\n });\n\n\n\n f(out)\n\n}\n\n\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 79, "score": 159145.7820828529 }, { "content": "fn prepare_row(row: Vec<AnyValue>, n_first: usize, n_last: usize) -> Vec<String> {\n\n fn make_str_val(v: &AnyValue) -> String {\n\n let string_limit = 32;\n\n if let AnyValue::Utf8(s) = v {\n\n if s.len() > string_limit {\n\n format!(\"\\\"{}...\\\"\", &s[..string_limit])\n\n } else {\n\n format!(\"\\\"{}\\\"\", s)\n\n }\n\n } else {\n\n format!(\"{}\", v)\n\n }\n\n }\n\n\n\n let reduce_columns = n_first + n_last < row.len();\n\n let mut row_str = Vec::with_capacity(n_first + n_last + reduce_columns as usize);\n\n for v in row[0..n_first].iter() {\n\n row_str.push(make_str_val(v));\n\n }\n\n if reduce_columns {\n", "file_path": "polars/polars-core/src/fmt.rs", "rank": 80, "score": 158876.32188543817 }, { "content": "pub fn split_ca<T>(ca: &ChunkedArray<T>, n: usize) -> Result<Vec<ChunkedArray<T>>> {\n\n split_array!(ca, n, i64)\n\n}\n\n\n", "file_path": "polars/polars-core/src/utils.rs", "rank": 81, "score": 154769.0117503768 }, { "content": "/// Start a when-then-otherwise expression\n\npub fn when(predicate: Expr) -> When {\n\n When { predicate }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 82, "score": 153950.08045138055 }, { "content": "pub fn unix_time() -> NaiveDateTime {\n\n NaiveDateTime::from_timestamp(0, 0)\n\n}\n\n\n\n#[cfg(all(test, feature = \"temporal\"))]\n\nmod test {\n\n use crate::prelude::*;\n\n use chrono::{NaiveDateTime, NaiveTime};\n\n\n\n #[test]\n\n fn from_time() {\n\n let times: Vec<_> = [\"23:56:04\", \"00:00:00\"]\n\n .iter()\n\n .map(|s| NaiveTime::parse_from_str(s, \"%H:%M:%S\").unwrap())\n\n .collect();\n\n let t = Time64NanosecondChunked::new_from_naive_time(\"times\", &times);\n\n // NOTE: the values are checked and correct.\n\n assert_eq!([86164000000000, 0], t.cont_slice().unwrap());\n\n }\n\n\n", "file_path": "polars/polars-core/src/chunked_array/temporal/mod.rs", "rank": 83, "score": 152525.22572327236 }, { "content": "/// Efficiently sets value at the indices from the iterator to `set_value`.\n\n/// The new array is initialized with a `memcpy` from the old values.\n\npub fn set_at_idx_no_null<T, I>(\n\n array: &PrimitiveArray<T>,\n\n idx: I,\n\n set_value: T::Native,\n\n) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: ArrowPrimitiveType,\n\n T::Native: ArrowNativeType,\n\n I: IntoIterator<Item = usize>,\n\n{\n\n debug_assert_eq!(array.null_count(), 0);\n\n let mut av = AlignedVec::new_from_slice(array.values());\n\n idx.into_iter().try_for_each::<_, Result<_>>(|idx| {\n\n let val = av\n\n .inner\n\n .get_mut(idx)\n\n .ok_or_else(|| PolarsError::OutOfBounds(\"idx is out of bounds\".into()))?;\n\n *val = set_value;\n\n Ok(())\n\n })?;\n", "file_path": "polars/polars-arrow/src/kernels/set.rs", "rank": 84, "score": 152469.19020321523 }, { "content": "/// Apply a lambda with a primitive output type\n\npub fn apply_lambda_with_primitive_out_type<'a, D>(\n\n df: &'a DataFrame,\n\n py: Python,\n\n lambda: &'a PyAny,\n\n init_null_count: usize,\n\n first_value: Option<D::Native>,\n\n) -> ChunkedArray<D>\n\nwhere\n\n D: PyArrowPrimitiveType,\n\n D::Native: ToPyObject + FromPyObject<'a>,\n\n{\n\n let columns = df.get_columns();\n\n\n\n let skip = if first_value.is_some() { 1 } else { 0 };\n\n if init_null_count == df.height() {\n\n ChunkedArray::full_null(\"apply\", df.height())\n\n } else {\n\n let iter = ((init_null_count + skip)..df.height()).map(|idx| {\n\n let iter = columns.iter().map(|s: &Series| Wrap(s.get(idx)));\n\n let tpl = (PyTuple::new(py, iter),);\n\n match lambda.call1(tpl) {\n\n Ok(val) => val.extract::<D::Native>().ok(),\n\n Err(e) => panic!(\"python function failed {}\", e),\n\n }\n\n });\n\n iterator_to_primitive(iter, init_null_count, first_value, \"apply\", df.height())\n\n }\n\n}\n\n\n", "file_path": "py-polars/src/apply/dataframe.rs", "rank": 85, "score": 152462.44931165304 }, { "content": "/// [Not](Expr::Not) expression.\n\npub fn not(expr: Expr) -> Expr {\n\n Expr::Not(Box::new(expr))\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 86, "score": 151823.30700186398 }, { "content": "pub fn when(predicate: PyExpr) -> When {\n\n When { predicate }\n\n}\n\n\n", "file_path": "py-polars/src/lazy/dsl.rs", "rank": 87, "score": 151823.30700186398 }, { "content": "// This function reads in parallel the files in the `paths` slice, and\n\n// returns the aggregation of all the files in the slice.\n\n//\n\n// The steps are:\n\n// 1. Read DataFrame from CSV in parallel.\n\n// 2. Append the files to the same DataFrame as soon as the DataFrame\n\n// is available.\n\n// 3. Group by category.\n\n// 4. Aggregate computing the sum and the count of calories, fats_g and\n\n// sugars_g. At this point the schema will change to: ['category',\n\n// 'calories_sum', 'calories_count', 'fats_g_sum', 'fats_g_count',\n\n// 'sugars_g_sum', 'sugars_g_count']\n\n//\n\n// The input is a slice of paths to CSV files.\n\n// The output is the aggregated DataFrame for all CSVs in the slice.\n\nfn process_files_parallel(paths: &[PathBuf]) -> PolarResult<DataFrame> {\n\n paths\n\n .into_par_iter()\n\n .map(read_csv)\n\n .try_reduce(DataFrame::default, right_or_append)?\n\n .groupby(&[\"category\"])?\n\n .agg(&[\n\n (\"calories\", &[\"sum\", \"count\"]),\n\n (\"fats_g\", &[\"sum\", \"count\"]),\n\n (\"sugars_g\", &[\"sum\", \"count\"]),\n\n ])\n\n}\n\n\n", "file_path": "examples/aggregate_multiple_files_in_chunks/src/main.rs", "rank": 88, "score": 150625.52597014507 }, { "content": "/// Exclude a column from selection.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use polars_core::prelude::*;\n\n/// use polars_lazy::prelude::*;\n\n///\n\n/// // Select all columns except foo.\n\n/// fn example(df: DataFrame) -> LazyFrame {\n\n/// df.lazy()\n\n/// .select(&[\n\n/// col(\"*\"), except(\"foo\")\n\n/// ])\n\n/// }\n\n/// ```\n\npub fn except(name: &str) -> Expr {\n\n match name {\n\n \"*\" => panic!(\"cannot use a wildcard as a column exception\"),\n\n _ => Expr::Except(Box::new(col(name))),\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum Operator {\n\n Eq,\n\n NotEq,\n\n Lt,\n\n LtEq,\n\n Gt,\n\n GtEq,\n\n Plus,\n\n Minus,\n\n Multiply,\n\n Divide,\n\n Modulus,\n\n And,\n\n Or,\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 89, "score": 149802.13026907615 }, { "content": "/// Sum all the values in this Expression.\n\npub fn sum(name: &str) -> Expr {\n\n col(name).sum()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 90, "score": 149792.89942540758 }, { "content": "/// Find the median of all the values in this Expression.\n\npub fn median(name: &str) -> Expr {\n\n col(name).median()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 91, "score": 149792.84037477605 }, { "content": "/// Find the mean of all the values in this Expression.\n\npub fn mean(name: &str) -> Expr {\n\n col(name).mean()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 92, "score": 149792.84037477605 }, { "content": "/// Find the minimum of all the values in this Expression.\n\npub fn min(name: &str) -> Expr {\n\n col(name).min()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 93, "score": 149792.84037477605 }, { "content": "/// Find the mean of all the values in this Expression.\n\npub fn avg(name: &str) -> Expr {\n\n col(name).mean()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 94, "score": 149792.84037477605 }, { "content": "/// Find the maximum of all the values in this Expression.\n\npub fn max(name: &str) -> Expr {\n\n col(name).max()\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 95, "score": 149792.84037477605 }, { "content": "/// Count the number of values in this Expression.\n\npub fn count(name: &str) -> Expr {\n\n match name {\n\n \"\" => col(name).count().alias(\"count\"),\n\n _ => col(name).count(),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 96, "score": 149792.84037477605 }, { "content": "/// Create a Column Expression based on a column name.\n\npub fn col(name: &str) -> Expr {\n\n match name {\n\n \"*\" => Expr::Wildcard,\n\n _ => Expr::Column(Arc::new(name.to_owned())),\n\n }\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 97, "score": 149787.07174225053 }, { "content": "/// [IsNotNull](Expr::IsNotNull) expression.\n\npub fn is_not_null(expr: Expr) -> Expr {\n\n Expr::IsNotNull(Box::new(expr))\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 98, "score": 149787.07174225053 }, { "content": "/// [IsNull](Expr::IsNotNull) expression\n\npub fn is_null(expr: Expr) -> Expr {\n\n Expr::IsNull(Box::new(expr))\n\n}\n\n\n", "file_path": "polars/polars-lazy/src/dsl.rs", "rank": 99, "score": 149787.07174225053 } ]
Rust
crates/holochain/tests/authored_test/mod.rs
MCYBA/holochain
74a9ac250285d38985fad9d41de3955646549606
use std::convert::TryFrom; use std::convert::TryInto; use std::time::Duration; use holo_hash::AnyDhtHash; use holo_hash::EntryHash; use holochain_state::prelude::fresh_reader_test; use holochain_wasm_test_utils::TestWasm; use holochain_zome_types::Entry; use holochain::test_utils::conductor_setup::ConductorTestData; use holochain::test_utils::host_fn_caller::*; use holochain::test_utils::wait_for_integration; use rusqlite::named_params; #[tokio::test(flavor = "multi_thread")] async fn authored_test() { observability::test_run().ok(); let num_attempts = 100; let delay_per_attempt = Duration::from_millis(100); let zomes = vec![TestWasm::Create]; let mut conductor_test = ConductorTestData::two_agents(zomes, true).await; let handle = conductor_test.handle(); let alice_call_data = conductor_test.alice_call_data(); let bob_call_data = conductor_test.bob_call_data().unwrap(); let entry = Post("Hi there".into()); let entry_hash = EntryHash::with_data_sync(&Entry::try_from(entry.clone()).unwrap()); alice_call_data .get_api(TestWasm::Create) .commit_entry(entry.clone().try_into().unwrap(), POST_ID) .await; let triggers = handle.get_cell_triggers(&alice_call_data.cell_id).unwrap(); triggers.publish_dht_ops.trigger(); fresh_reader_test(alice_call_data.authored_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_authored_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp JOIN Header ON DhtOp.header_hash = Header.hash WHERE basis_hash = :hash AND Header.author = :author)", named_params! { ":hash": basis, ":author": alice_call_data.cell_id.agent_pubkey(), }, |row| row.get(0), ) .unwrap(); assert!(has_authored_entry); }); let expected_count = 3 + 14; wait_for_integration( &bob_call_data.dht_env, expected_count, num_attempts, delay_per_attempt.clone(), ) .await; fresh_reader_test(bob_call_data.authored_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_authored_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp JOIN Header ON DhtOp.header_hash = Header.hash WHERE basis_hash = :hash AND Header.author = :author)", named_params! { ":hash": basis, ":author": bob_call_data.cell_id.agent_pubkey(), }, |row| row.get(0), ) .unwrap(); assert!(!has_authored_entry); }); fresh_reader_test(bob_call_data.dht_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_integrated_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp WHERE basis_hash = :hash)", named_params! { ":hash": basis, }, |row| row.get(0), ) .unwrap(); assert!(has_integrated_entry); }); bob_call_data .get_api(TestWasm::Create) .commit_entry(entry.clone().try_into().unwrap(), POST_ID) .await; let triggers = handle.get_cell_triggers(&bob_call_data.cell_id).unwrap(); triggers.publish_dht_ops.trigger(); fresh_reader_test(bob_call_data.authored_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_authored_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp JOIN Header ON DhtOp.header_hash = Header.hash WHERE basis_hash = :hash AND Header.author = :author)", named_params! { ":hash": basis, ":author": bob_call_data.cell_id.agent_pubkey(), }, |row| row.get(0), ) .unwrap(); assert!(has_authored_entry); }); conductor_test.shutdown_conductor().await; }
use std::convert::TryFrom; use std::convert::TryInto; use std::time::Duration; use holo_hash::AnyDhtHash; use holo_hash::EntryHash; use holochain_state::prelude::fresh_reader_test; use holochain_wasm_test_utils::TestWasm; use holochain_zome_types::Entry; use holochain::test_utils::conductor_setup::ConductorTestData; use holochain::test_utils::host_fn_caller::*; use holochain::test_utils::wait_for_integration; use rusqlite::named_params; #[tokio::test(flavor = "multi_thread")] async fn authored_test() { observability::test_run().ok(); let num_attempts = 100; let delay_per_attempt = Duration::from_millis(100); let zomes = vec![TestWasm::Create]; let mut conductor_test = ConductorT
.await; let triggers = handle.get_cell_triggers(&alice_call_data.cell_id).unwrap(); triggers.publish_dht_ops.trigger(); fresh_reader_test(alice_call_data.authored_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_authored_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp JOIN Header ON DhtOp.header_hash = Header.hash WHERE basis_hash = :hash AND Header.author = :author)", named_params! { ":hash": basis, ":author": alice_call_data.cell_id.agent_pubkey(), }, |row| row.get(0), ) .unwrap(); assert!(has_authored_entry); }); let expected_count = 3 + 14; wait_for_integration( &bob_call_data.dht_env, expected_count, num_attempts, delay_per_attempt.clone(), ) .await; fresh_reader_test(bob_call_data.authored_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_authored_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp JOIN Header ON DhtOp.header_hash = Header.hash WHERE basis_hash = :hash AND Header.author = :author)", named_params! { ":hash": basis, ":author": bob_call_data.cell_id.agent_pubkey(), }, |row| row.get(0), ) .unwrap(); assert!(!has_authored_entry); }); fresh_reader_test(bob_call_data.dht_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_integrated_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp WHERE basis_hash = :hash)", named_params! { ":hash": basis, }, |row| row.get(0), ) .unwrap(); assert!(has_integrated_entry); }); bob_call_data .get_api(TestWasm::Create) .commit_entry(entry.clone().try_into().unwrap(), POST_ID) .await; let triggers = handle.get_cell_triggers(&bob_call_data.cell_id).unwrap(); triggers.publish_dht_ops.trigger(); fresh_reader_test(bob_call_data.authored_env.clone(), |txn| { let basis: AnyDhtHash = entry_hash.clone().into(); let has_authored_entry: bool = txn .query_row( "SELECT EXISTS(SELECT 1 FROM DhtOp JOIN Header ON DhtOp.header_hash = Header.hash WHERE basis_hash = :hash AND Header.author = :author)", named_params! { ":hash": basis, ":author": bob_call_data.cell_id.agent_pubkey(), }, |row| row.get(0), ) .unwrap(); assert!(has_authored_entry); }); conductor_test.shutdown_conductor().await; }
estData::two_agents(zomes, true).await; let handle = conductor_test.handle(); let alice_call_data = conductor_test.alice_call_data(); let bob_call_data = conductor_test.bob_call_data().unwrap(); let entry = Post("Hi there".into()); let entry_hash = EntryHash::with_data_sync(&Entry::try_from(entry.clone()).unwrap()); alice_call_data .get_api(TestWasm::Create) .commit_entry(entry.clone().try_into().unwrap(), POST_ID)
random
[ { "content": "fn consistency(bench: &mut Criterion) {\n\n observability::test_run().ok();\n\n let mut group = bench.benchmark_group(\"consistency\");\n\n group.sample_size(\n\n std::env::var_os(\"BENCH_SAMPLE_SIZE\")\n\n .and_then(|s| s.to_string_lossy().parse::<usize>().ok())\n\n .unwrap_or(100),\n\n );\n\n let runtime = rt();\n\n\n\n let (mut producer, mut consumer, others) = runtime.block_on(setup());\n\n if let Some(n) = std::env::var_os(\"BENCH_NUM_OPS\") {\n\n let num_ops = n.to_string_lossy().parse::<usize>().unwrap();\n\n runtime.block_on(async {\n\n producer.fill(num_ops).await;\n\n let mut cells = vec![&consumer.cell, &producer.cell];\n\n cells.extend(others.cells.iter());\n\n let num_tries = std::env::var_os(\"BENCH_NUM_WAITS\")\n\n .and_then(|s| s.to_string_lossy().parse::<usize>().ok())\n\n .unwrap_or(100);\n", "file_path": "crates/holochain/benches/consistency.rs", "rank": 0, "score": 178542.13994723168 }, { "content": "pub fn to_single_zome(zomes_to_invoke: ZomesToInvoke) -> AppValidationResult<Zome> {\n\n match zomes_to_invoke {\n\n ZomesToInvoke::All => Err(AppValidationError::LinkMultipleZomes),\n\n ZomesToInvoke::One(z) => Ok(z),\n\n }\n\n}\n\n\n\npub async fn element_to_op(\n\n element: Element,\n\n op_type: DhtOpType,\n\n cascade: &mut Cascade,\n\n) -> AppValidationOutcome<(Op, Option<Entry>)> {\n\n use DhtOpType::*;\n\n let mut activity_entry = None;\n\n let (shh, entry) = element.into_inner();\n\n let mut entry = entry.into_option();\n\n let (header, _) = shh.into_inner();\n\n // Register agent activity doesn't store the entry so we need to\n\n // save it so we can reconstruct the element later.\n\n if matches!(op_type, RegisterAgentActivity) {\n\n activity_entry = entry.take();\n\n }\n\n let dht_op = DhtOp::from_type(op_type, header, entry)?;\n\n Ok((dhtop_to_op(dht_op, cascade).await?, activity_entry))\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 1, "score": 177126.61896232684 }, { "content": "fn bootstrap(bench: &mut Criterion) {\n\n let mut group = bench.benchmark_group(\"bootstrap\");\n\n group.sample_size(\n\n std::env::var_os(\"BENCH_SAMPLE_SIZE\")\n\n .and_then(|s| s.to_string_lossy().parse::<usize>().ok())\n\n .unwrap_or(100),\n\n );\n\n let runtime = rt();\n\n let client = reqwest::Client::new();\n\n\n\n let mut url = url2!(\"http://127.0.0.1:0\");\n\n let (driver, addr) = runtime.block_on(async {\n\n kitsune_p2p_bootstrap::run(([127, 0, 0, 1], 0), vec![])\n\n .await\n\n .unwrap()\n\n });\n\n runtime.spawn(async move {\n\n driver.await;\n\n println!(\"BOOTSTRAP CLOSED\");\n\n });\n", "file_path": "crates/kitsune_p2p/bootstrap/benches/bench.rs", "rank": 2, "score": 174151.7824773412 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n\n\n let t_mem = rt.block_on(async { Share::new(Some(Test::new(TT::Mem).await)) });\n\n let t_quic = rt.block_on(async { Share::new(Some(Test::new(TT::Quic).await)) });\n\n\n\n c.bench_function(\"thru-mem\", |b| b.iter(|| thru(&rt, &t_mem)));\n\n c.bench_function(\"thru-quic\", |b| b.iter(|| thru(&rt, &t_quic)));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "crates/kitsune_p2p/proxy/benches/thru.rs", "rank": 3, "score": 174151.7824773412 }, { "content": "fn simple_bench(bench: &mut Criterion) {\n\n let _g = observability::test_run().ok();\n\n\n\n let runtime = rt();\n\n\n\n let (listener, listener_address, jh) = runtime.block_on(setup());\n\n let (mut send, mut recv) = runtime.block_on(setup_client(listener_address));\n\n\n\n let mut group = bench.benchmark_group(\"simple_bench\");\n\n // group.sample_size(100);\n\n group.bench_function(BenchmarkId::new(\"client\", \"request\"), |b| {\n\n b.iter(|| {\n\n runtime.block_on(client_request(&mut send));\n\n });\n\n });\n\n group.bench_function(BenchmarkId::new(\"client\", \"signal\"), |b| {\n\n b.iter(|| {\n\n runtime.block_on(client_signal(&mut send));\n\n });\n\n });\n", "file_path": "crates/holochain_websocket/benches/bench.rs", "rank": 4, "score": 174151.7824773412 }, { "content": "fn zome_id_to_zome(zome_id: ZomeId, dna_def: &DnaDef) -> AppValidationResult<Zome> {\n\n let zome_index = u8::from(zome_id) as usize;\n\n Ok(dna_def\n\n .zomes\n\n .get(zome_index)\n\n .ok_or(AppValidationError::ZomeId(zome_id))?\n\n .clone()\n\n .into())\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 5, "score": 172383.73327986564 }, { "content": "pub fn wasm_call_n(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"wasm_call_n\");\n\n\n\n for n in vec![\n\n 1, // 1 byte\n\n 1_000, // 1 kb\n\n 1_000_000, // 1 mb\n\n ] {\n\n group.throughput(Throughput::Bytes(n as _));\n\n\n\n group.bench_function(BenchmarkId::from_parameter(n), |b| {\n\n // bytes\n\n let bytes = vec![0; n];\n\n let _g = TOKIO_RUNTIME.lock().unwrap().enter();\n\n let ha = HOST_ACCESS_FIXTURATOR.lock().unwrap().next().unwrap();\n\n\n\n b.iter(|| {\n\n let zome: Zome = TestWasm::Bench.into();\n\n let i = ZomeCallInvocation {\n\n cell_id: CELL_ID.lock().unwrap().clone(),\n", "file_path": "crates/holochain/benches/bench.rs", "rank": 6, "score": 172147.26431544882 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let rt = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n\n\n let t = rt.block_on(async { Share::new(Some(Test::new().await)) });\n\n\n\n c.bench_function(\"api-thru-mem\", |b| b.iter(|| api_thru(&rt, &t)));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "crates/kitsune_p2p/types/benches/api_thru.rs", "rank": 7, "score": 172088.05456835445 }, { "content": "pub fn websocket_concurrent_install(c: &mut Criterion) {\n\n observability::test_run().ok();\n\n\n\n static REQ_TIMEOUT_MS: u64 = 15000;\n\n static NUM_DNA_CONCURRENCY: &[(u16, usize)] = &[(1, 1), (8, 4), (64, 10)];\n\n let admin_port = std::sync::atomic::AtomicUsize::new(9910);\n\n\n\n let mut group = c.benchmark_group(\"websocket\");\n\n for (i, j) in NUM_DNA_CONCURRENCY {\n\n group.throughput(Throughput::Elements(*i as u64 * *j as u64));\n\n\n\n group.sample_size(10);\n\n group.measurement_time(Duration::from_secs(20));\n\n\n\n let bench_id = format!(\"{}_{}\", i, j);\n\n let bench_fn = group.bench_function(BenchmarkId::from_parameter(bench_id.clone()), |b| {\n\n // separate the holochain spawn time from the measured time\n\n b.iter_batched(\n\n || {\n\n tokio_helper::block_forever_on(async {\n", "file_path": "crates/holochain/benches/websocket.rs", "rank": 8, "score": 169997.7244760336 }, { "content": "fn insert_header_and_op(txn: &mut Transaction, u: &mut Unstructured, header: &Header) -> DhtOpHash {\n\n let timestamp = Timestamp::arbitrary(u).unwrap();\n\n let op_order = OpOrder::new(DhtOpType::RegisterAgentActivity, timestamp);\n\n let any_hash: AnyDhtHash = EntryHash::arbitrary(u).unwrap().into();\n\n let header = SignedHeaderHashed::with_presigned(\n\n HeaderHashed::from_content_sync(header.clone()),\n\n Signature::arbitrary(u).unwrap(),\n\n );\n\n let hash = header.as_hash().clone();\n\n let op_hash = DhtOpHash::arbitrary(u).unwrap();\n\n mutations::insert_header(txn, &header).unwrap();\n\n mutations::insert_op_lite(\n\n txn,\n\n &DhtOpLight::RegisterAgentActivity(hash, any_hash.clone()),\n\n &op_hash,\n\n &op_order,\n\n &timestamp,\n\n )\n\n .unwrap();\n\n\n\n op_hash\n\n}\n\n\n", "file_path": "crates/holochain_state/tests/cache_tests/mod.rs", "rank": 9, "score": 164763.34471277398 }, { "content": "pub fn zome_info(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: (),\n\n) -> Result<ZomeInfo, WasmError> {\n\n match HostFnAccess::from(&call_context.host_context()) {\n\n HostFnAccess{ bindings_deterministic: Permission::Allow, .. } => {\n\n ribosome.zome_info(call_context.zome.clone()).map_err(|e| match e {\n\n RibosomeError::WasmError(wasm_error) => wasm_error,\n\n other_error => WasmError::Host(other_error.to_string()),\n\n })\n\n },\n\n _ => Err(WasmError::Host(RibosomeError::HostFnPermissions(\n\n call_context.zome.zome_name().clone(),\n\n call_context.function_name().clone(),\n\n \"zome_info\".into()\n\n ).to_string()))\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/zome_info.rs", "rank": 10, "score": 164470.6500104686 }, { "content": "/// Helper to create a zome invocation for tests\n\npub fn new_invocation<P, Z: Into<Zome>>(\n\n cell_id: &CellId,\n\n func: &str,\n\n payload: P,\n\n zome: Z,\n\n) -> Result<ZomeCallInvocation, SerializedBytesError>\n\nwhere\n\n P: serde::Serialize + std::fmt::Debug,\n\n{\n\n Ok(ZomeCallInvocation {\n\n cell_id: cell_id.clone(),\n\n zome: zome.into(),\n\n cap_secret: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()),\n\n fn_name: func.into(),\n\n payload: ExternIO::encode(payload)?,\n\n provenance: cell_id.agent_pubkey().clone(),\n\n })\n\n}\n\n\n", "file_path": "crates/holochain/src/test_utils.rs", "rank": 11, "score": 164040.36203635138 }, { "content": "fn record_instant(buffer: &mut VecDeque<Instant>) {\n\n if buffer.len() > MAX_HISTORY {\n\n buffer.pop_front();\n\n }\n\n buffer.push_back(Instant::now());\n\n}\n\n\n\nimpl std::fmt::Display for Metrics {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n static TRACE: once_cell::sync::Lazy<bool> = once_cell::sync::Lazy::new(|| {\n\n std::env::var(\"GOSSIP_METRICS\").map_or(false, |s| s == \"trace\")\n\n });\n\n let trace = *TRACE;\n\n write!(f, \"Metrics:\")?;\n\n let mut average_last_completion = std::time::Duration::default();\n\n let mut max_last_completion = std::time::Duration::default();\n\n let mut average_completion_frequency = std::time::Duration::default();\n\n let mut complete_rounds = 0;\n\n let mut min_complete_rounds = usize::MAX;\n\n for (key, info) in &self.map {\n", "file_path": "crates/kitsune_p2p/kitsune_p2p/src/metrics.rs", "rank": 12, "score": 162209.20206293202 }, { "content": "/// Fill a buffer with data that is readable as latency information.\n\n/// Note, the minimum message size to get the timing data across is 16 bytes.\n\npub fn fill_with_latency_info(buf: &mut [u8]) {\n\n if buf.is_empty() {\n\n return;\n\n }\n\n\n\n // make sure we call this first, so we don't go back in time\n\n let epoch = *LOC_EPOCH;\n\n\n\n let now = tokio::time::Instant::now();\n\n let now = now.duration_since(epoch).as_secs_f64();\n\n\n\n // create a pattern of tag/marker\n\n let mut pat = [0_u8; 16];\n\n pat[0..8].copy_from_slice(LAT_TAG);\n\n pat[8..16].copy_from_slice(&now.to_le_bytes());\n\n\n\n // copy the tag/marker pattern repeatedly into the buffer\n\n let mut offset = 0;\n\n while offset < buf.len() {\n\n let len = std::cmp::min(pat.len(), buf.len() - offset);\n\n buf[offset..offset + len].copy_from_slice(&pat[..len]);\n\n offset += len;\n\n }\n\n}\n\n\n\n/// Return the duration since the time encoded in a latency info buffer.\n\n/// Returns a unit error if we could not parse the buffer into time data.\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_utils/latency.rs", "rank": 13, "score": 158739.00713066093 }, { "content": "/// An InlineZome with simple Create and Read operations\n\nfn simple_crud_zome() -> InlineZome {\n\n let string_entry_def = EntryDef::default_with_id(\"string\");\n\n let unit_entry_def = EntryDef::default_with_id(\"unit\");\n\n\n\n InlineZome::new_unique(vec![string_entry_def.clone(), unit_entry_def.clone()])\n\n .callback(\"create_string\", move |api, s: AppString| {\n\n let entry_def_id: EntryDefId = string_entry_def.id.clone();\n\n let entry = Entry::app(AppString::from(s).try_into().unwrap()).unwrap();\n\n let hash = api.create(CreateInput::new(\n\n entry_def_id,\n\n entry,\n\n ChainTopOrdering::default(),\n\n ))?;\n\n Ok(hash)\n\n })\n\n .callback(\"create_unit\", move |api, ()| {\n\n let entry_def_id: EntryDefId = unit_entry_def.id.clone();\n\n let entry = Entry::app(().try_into().unwrap()).unwrap();\n\n let hash = api.create(CreateInput::new(\n\n entry_def_id,\n", "file_path": "crates/holochain/tests/inline_zome_spec/mod.rs", "rank": 14, "score": 157180.53351480808 }, { "content": "/// Simple zome which contains a validation rule which can fail\n\nfn simple_validation_zome() -> InlineZome {\n\n let entry_def = EntryDef::default_with_id(\"string\");\n\n\n\n InlineZome::new_unique(vec![entry_def.clone()])\n\n .callback(\"create\", move |api, s: AppString| {\n\n let entry_def_id: EntryDefId = entry_def.id.clone();\n\n let entry = Entry::app(s.try_into().unwrap()).unwrap();\n\n let hash = api.create(CreateInput::new(\n\n entry_def_id,\n\n entry,\n\n ChainTopOrdering::default(),\n\n ))?;\n\n Ok(hash)\n\n })\n\n .callback(\"read\", |api, hash: HeaderHash| {\n\n api.get(vec![GetInput::new(hash.into(), GetOptions::default())])\n\n .map_err(Into::into)\n\n })\n\n .callback(\"validate\", |_api, data: Op| {\n\n let s = match data {\n", "file_path": "crates/holochain/tests/inline_zome_spec/mod.rs", "rank": 15, "score": 157180.47855315797 }, { "content": "fn append_location(mut base: Vec<u8>) -> Vec<u8> {\n\n let mut loc_bytes = holo_dht_location_bytes(&base);\n\n base.append(&mut loc_bytes);\n\n base\n\n}\n\n\n\nfixturator!(\n\n with_vec 0 5;\n\n AgentPubKey;\n\n curve Empty AgentPubKey::from_raw_36(ThirtySixHashBytesFixturator::new_indexed(Empty, get_fixt_index!()).next().unwrap());\n\n curve Unpredictable AgentPubKey::from_raw_36(ThirtySixHashBytesFixturator::new_indexed(Unpredictable, get_fixt_index!()).next().unwrap());\n\n curve Predictable {\n\n // these agent keys match what the mock keystore spits out for the first two agents\n\n // don't mess with this unless you also update the keystore!!!\n\n let agents = vec![\n\n AgentPubKey::try_from(\"uhCAkJCuynkgVdMn_bzZ2ZYaVfygkn0WCuzfFspczxFnZM1QAyXoo\")\n\n .unwrap(),\n\n AgentPubKey::try_from(\"uhCAk39SDf7rynCg5bYgzroGaOJKGKrloI1o57Xao6S-U5KNZ0dUH\")\n\n .unwrap(),\n\n ];\n", "file_path": "crates/holo_hash/src/fixt.rs", "rank": 16, "score": 157045.54088681444 }, { "content": "fn check_line_for_admin_port(mut line: &str) -> Option<u16> {\n\n line = line.strip_prefix(\"###\")?;\n\n line = line.strip_suffix(\"###\")?;\n\n\n\n let port = line.strip_prefix(\"ADMIN_PORT:\")?;\n\n port.parse::<u16>().ok()\n\n}\n\n\n\npub async fn check_started(holochain: &mut Child) {\n\n let started = tokio::time::timeout(std::time::Duration::from_secs(1), holochain.wait()).await;\n\n if let Ok(status) = started {\n\n panic!(\"Holochain failed to start. status: {:?}\", status);\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/tests/test_utils/mod.rs", "rank": 17, "score": 156748.6586215541 }, { "content": "fn links_zome() -> InlineZome {\n\n InlineZome::new_unique(vec![])\n\n .callback(\"create_link\", move |api, base_target: BaseTarget| {\n\n let hash = api.create_link(CreateLinkInput::new(\n\n base_target.0,\n\n base_target.1,\n\n ().into(),\n\n ChainTopOrdering::default(),\n\n ))?;\n\n Ok(hash)\n\n })\n\n .callback(\n\n \"get_links\",\n\n move |api: BoxApi, base: EntryHash| -> InlineZomeResult<Vec<Vec<Link>>> {\n\n Ok(api.get_links(vec![GetLinksInput::new(base, None)])?)\n\n },\n\n )\n\n}\n\n\n\n/// A single link with an AgentPubKey for the base and target is committed by\n", "file_path": "crates/holochain/tests/agent_scaling/mod.rs", "rank": 18, "score": 154168.39201123544 }, { "content": "// Out(x..y) -> (y)\n\n// Out(x..y, z..)) -> (Out(z..), y)\n\n//\n\n// Take the awaiting dependencies and if there's a sequence from the start\n\n// then remove it and return the end of the sequence.\n\nfn find_consecutive(awaiting_deps: &mut Vec<u32>) -> Option<u32> {\n\n if awaiting_deps.len() == 1 {\n\n awaiting_deps.pop()\n\n } else {\n\n let last_consecutive_pos = awaiting_deps\n\n .iter()\n\n .zip(awaiting_deps.iter().skip(1))\n\n .position(|(n, delta)| {\n\n n.checked_add(1)\n\n .map(|n_prime| n_prime != *delta)\n\n .unwrap_or(true)\n\n });\n\n match last_consecutive_pos {\n\n Some(pos) => {\n\n let r = awaiting_deps.get(pos).copied();\n\n // Drop the consecutive seqs.\n\n drop(awaiting_deps.drain(..=pos));\n\n awaiting_deps.shrink_to_fit();\n\n r\n\n }\n", "file_path": "crates/holochain_types/src/db_cache.rs", "rank": 19, "score": 153443.36140153714 }, { "content": "fn spawn_output(holochain: &mut Child, config: oneshot::Sender<u16>) {\n\n let stdout = holochain.stdout.take();\n\n let stderr = holochain.stderr.take();\n\n tokio::task::spawn(async move {\n\n let mut needs_setup = true;\n\n let mut config = Some(config);\n\n if let Some(stdout) = stdout {\n\n let mut reader = BufReader::new(stdout).lines();\n\n while let Ok(Some(line)) = reader.next_line().await {\n\n if needs_setup {\n\n match check_sandbox(&line, &mut needs_setup) {\n\n (true, Some(port)) => {\n\n if let Some(config) = config.take() {\n\n config\n\n .send(port)\n\n .expect(\"Failed to send admin port from config\");\n\n }\n\n continue;\n\n }\n\n (true, None) => continue,\n", "file_path": "crates/hc_sandbox/src/run.rs", "rank": 20, "score": 152235.19859049405 }, { "content": "fn update_activity_inner(prev_state: &mut ActivityState, new_bounds: &ActivityBounds) {\n\n if new_bounds.integrated.is_some() {\n\n prev_state.bounds.integrated = new_bounds.integrated;\n\n }\n\n update_ready_to_integrate(prev_state, new_bounds.ready_to_integrate);\n\n}\n\n\n", "file_path": "crates/holochain_types/src/db_cache.rs", "rank": 21, "score": 151818.53977348233 }, { "content": "/// reset used both for requeuing into thread local, and for clear()\n\nfn reset(v: &mut Vec<u8>, do_truncate: bool) {\n\n crate::metrics::metric_push_pool_buf_release_size(v.capacity() as u64);\n\n if do_truncate && v.capacity() > POOL_BUF_SHRINK_TO_CAPACITY {\n\n v.truncate(POOL_BUF_SHRINK_TO_CAPACITY);\n\n v.shrink_to_fit();\n\n }\n\n v.resize(POOL_BUF_PRE_WRITE_SPACE, 0);\n\n}\n\n\n\nimpl PoolBuf {\n\n /// Create a new PoolBuf.\n\n pub fn new() -> Self {\n\n let inner = BUF_POOL.with(|p| {\n\n let mut p = p.borrow_mut();\n\n if p.is_empty() {\n\n vec![0; POOL_BUF_PRE_WRITE_SPACE]\n\n } else {\n\n p.remove(0)\n\n }\n\n });\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_utils/pool_buf.rs", "rank": 22, "score": 151746.6563059956 }, { "content": "/// An InlineZome with simple Create and Read operations\n\npub fn simple_create_read_zome() -> InlineZome {\n\n let entry_def = EntryDef::default_with_id(\"entrydef\");\n\n\n\n InlineZome::new_unique(vec![entry_def.clone()])\n\n .callback(\"create\", move |api, ()| {\n\n let entry_def_id: EntryDefId = entry_def.id.clone();\n\n let entry = Entry::app(().try_into().unwrap()).unwrap();\n\n let hash = api.create(CreateInput::new(\n\n entry_def_id,\n\n entry,\n\n ChainTopOrdering::default(),\n\n ))?;\n\n Ok(hash)\n\n })\n\n .callback(\"read\", |api, hash: HeaderHash| {\n\n api.get(vec![GetInput::new(hash.into(), GetOptions::default())])\n\n .map(|e| e.into_iter().next().unwrap())\n\n .map_err(Into::into)\n\n })\n\n}\n", "file_path": "crates/holochain/src/test_utils/inline_zomes.rs", "rank": 23, "score": 151555.73786772453 }, { "content": "/// Updates the ready to integrate state of an activity.\n\n/// This function is a bit complex but is heavily tested and maintains the\n\n/// chain activity can only be set to ready if it makes sense to.\n\nfn update_ready_to_integrate(prev_state: &mut ActivityState, new_ready: Option<u32>) {\n\n // There is a new chain item that is ready for integration.\n\n if let Some(new_ready) = new_ready {\n\n match prev_state {\n\n // Nothing is integrated or currently ready to integrate but there could\n\n // be other chain items that are awaiting dependencies.\n\n ActivityState {\n\n bounds:\n\n ActivityBounds {\n\n integrated: None,\n\n ready_to_integrate: ready @ None,\n\n },\n\n awaiting_deps,\n\n } => {\n\n // (0) -> Ready(0)\n\n //\n\n // If we have no state and new_ready is zero\n\n // then the new ready_to_integrate is set to zero.\n\n if new_ready == 0 {\n\n *ready = Some(new_ready);\n", "file_path": "crates/holochain_types/src/db_cache.rs", "rank": 24, "score": 148513.2425534654 }, { "content": "/// Get the zome information.\n\n/// There are no inputs to [ `zome_info` ].\n\n///\n\n/// Zome information includes zome name, id and properties.\n\n///\n\n/// In general any holochain compatible wasm can be compiled and run in any zome so the zome info\n\n/// needs to be looked up at runtime to e.g. know where to send/receive `call_remote` rpc calls to.\n\npub fn zome_info() -> ExternResult<ZomeInfo> {\n\n HDK.with(|h| h.borrow().zome_info(()))\n\n}\n\n\n", "file_path": "crates/hdk/src/info.rs", "rank": 25, "score": 147650.66475740587 }, { "content": "/// Write [`ConductorConfig`] to [`CONDUCTOR_CONFIG`]\n\npub fn write_config(mut path: PathBuf, config: &ConductorConfig) -> PathBuf {\n\n path.push(CONDUCTOR_CONFIG);\n\n std::fs::write(path.clone(), serde_yaml::to_string(&config).unwrap()).unwrap();\n\n path\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/config.rs", "rank": 26, "score": 147122.69404242016 }, { "content": "pub fn put(txn: &mut Transaction, wasm: DnaWasmHashed) -> StateMutationResult<()> {\n\n mutations::insert_wasm(txn, wasm)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use holo_hash::HasHash;\n\n use holochain_sqlite::prelude::DatabaseResult;\n\n use holochain_types::dna::wasm::DnaWasm;\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn wasm_store_round_trip() -> DatabaseResult<()> {\n\n use holochain_sqlite::prelude::*;\n\n observability::test_run().ok();\n\n\n\n // all the stuff needed to have a WasmBuf\n\n let env = crate::test_utils::test_wasm_env();\n\n\n\n // a wasm\n", "file_path": "crates/holochain_state/src/wasm.rs", "rank": 27, "score": 147122.69404242016 }, { "content": "fn insert_locally_validated_op(txn: &mut Transaction, op: DhtOpHashed) -> StateMutationResult<()> {\n\n // These checks are redundant but cheap and future proof this function\n\n // against anyone using it with private entries.\n\n if is_private_store_entry(op.as_content()) {\n\n return Ok(());\n\n }\n\n let op = filter_private_entry(op)?;\n\n let hash = op.as_hash();\n\n\n\n let dependency = get_dependency(op.get_type(), &op.header());\n\n\n\n // Insert the op.\n\n insert_op(txn, &op)?;\n\n // Set the status to valid because we authored it.\n\n set_validation_status(txn, hash, holochain_zome_types::ValidationStatus::Valid)?;\n\n // Set the stage to awaiting integration.\n\n if let Dependency::Null = dependency {\n\n // This set the validation stage to pending which is correct when\n\n // it's integrated.\n\n set_validation_stage(txn, hash, ValidationLimboStatus::Pending)?;\n\n set_when_integrated(txn, hash, holochain_zome_types::Timestamp::now())?;\n\n } else {\n\n set_validation_stage(txn, hash, ValidationLimboStatus::AwaitingIntegration)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/integrate.rs", "rank": 28, "score": 146983.0128236887 }, { "content": "fn tx_p2p_put(txn: &mut Transaction, record: P2pRecord) -> DatabaseResult<()> {\n\n txn.execute(\n\n sql_p2p_agent_store::INSERT,\n\n named_params! {\n\n \":agent\": &record.agent.0,\n\n\n\n \":encoded\": &record.encoded,\n\n\n\n \":signed_at_ms\": &record.signed_at_ms,\n\n \":expires_at_ms\": &record.expires_at_ms,\n\n \":storage_center_loc\": &record.storage_center_loc,\n\n\n\n \":is_active\": &record.is_active,\n\n\n\n \":storage_start_loc\": &record.storage_start_loc,\n\n \":storage_end_loc\": &record.storage_end_loc,\n\n },\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "crates/holochain_sqlite/src/db/p2p_agent_store.rs", "rank": 29, "score": 146983.0128236887 }, { "content": "#[hdk_extern]\n\nfn zome_info(_: ()) -> ExternResult<ZomeInfo> {\n\n hdk::prelude::zome_info()\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 30, "score": 146415.47328613757 }, { "content": "fn check_sandbox(line: &str, needs_setup: &mut bool) -> (bool, Option<u16>) {\n\n if let Some(line) = line.strip_prefix(\"###\") {\n\n if let Some(line) = line.strip_suffix(\"###\") {\n\n match line {\n\n \"HOLOCHAIN_SANDBOX\" => tracing::info!(\"Found config\"),\n\n \"HOLOCHAIN_SANDBOX_END\" => *needs_setup = false,\n\n _ => {\n\n if let Some(v) = line.strip_prefix(\"ADMIN_PORT:\") {\n\n if let Ok(port) = v.parse::<u16>() {\n\n return (true, Some(port));\n\n }\n\n }\n\n }\n\n }\n\n return (true, None);\n\n }\n\n }\n\n (false, None)\n\n}\n", "file_path": "crates/hc_sandbox/src/run.rs", "rank": 31, "score": 146072.57395246415 }, { "content": "pub fn write_config(mut path: PathBuf, config: &ConductorConfig) -> PathBuf {\n\n path.push(\"conductor_config.yml\");\n\n std::fs::write(path.clone(), serde_yaml::to_string(&config).unwrap()).unwrap();\n\n path\n\n}\n\n\n\n#[instrument(skip(response))]\n\npub async fn check_timeout<T>(\n\n response: impl Future<Output = Result<T, WebsocketError>>,\n\n timeout_ms: u64,\n\n) -> T {\n\n check_timeout_named(\"<unnamed>\", response, timeout_ms).await\n\n}\n\n\n\n#[instrument(skip(response))]\n\nasync fn check_timeout_named<T>(\n\n name: &'static str,\n\n response: impl Future<Output = Result<T, WebsocketError>>,\n\n timeout_millis: u64,\n\n) -> T {\n", "file_path": "crates/holochain/tests/test_utils/mod.rs", "rank": 32, "score": 145537.90955710865 }, { "content": "pub fn put(txn: &mut Transaction, dna_def: DnaDef) -> StateMutationResult<()> {\n\n mutations::insert_dna_def(txn, &DnaDefHashed::from_content_sync(dna_def))\n\n}\n", "file_path": "crates/holochain_state/src/dna_def.rs", "rank": 33, "score": 145537.90955710865 }, { "content": "/// Insert a [`DhtOp`] into the database.\n\npub fn insert_op(txn: &mut Transaction, op: &DhtOpHashed) -> StateMutationResult<()> {\n\n let hash = op.as_hash();\n\n let op = op.as_content();\n\n let op_light = op.to_light();\n\n let header = op.header();\n\n let timestamp = header.timestamp();\n\n let signature = op.signature().clone();\n\n if let Some(entry) = op.entry() {\n\n let entry_hash = header\n\n .entry_hash()\n\n .ok_or_else(|| DhtOpError::HeaderWithoutEntry(header.clone()))?;\n\n\n\n insert_entry(txn, entry_hash, entry)?;\n\n }\n\n let dependency = get_dependency(op_light.get_type(), &header);\n\n let header_hashed = HeaderHashed::with_pre_hashed(header, op_light.header_hash().to_owned());\n\n let header_hashed = SignedHeaderHashed::with_presigned(header_hashed, signature);\n\n let op_order = OpOrder::new(op_light.get_type(), header_hashed.header().timestamp());\n\n insert_header(txn, &header_hashed)?;\n\n insert_op_lite(txn, &op_light, hash, &op_order, &timestamp)?;\n\n set_dependency(txn, hash, dependency)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 34, "score": 145537.90955710865 }, { "content": "/// Insert a [`DnaWasm`] into the database.\n\npub fn insert_wasm(txn: &mut Transaction, wasm: DnaWasmHashed) -> StateMutationResult<()> {\n\n let (wasm, hash) = wasm.into_inner();\n\n sql_insert!(txn, Wasm, {\n\n \"hash\": hash,\n\n \"blob\": wasm.code.as_ref(),\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 35, "score": 145537.90955710865 }, { "content": "/// Unlock the chain by dropping all records in the lock table.\n\n/// This should be done very carefully as it can e.g. invalidate a shared\n\n/// countersigning session that is inflight.\n\npub fn unlock_chain(txn: &mut Transaction, author: &AgentPubKey) -> StateMutationResult<()> {\n\n txn.execute(\"DELETE FROM ChainLock WHERE author = ?\", [author])?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 36, "score": 145537.90955710865 }, { "content": "/// Read the [`ConductorConfig`] from the file [`CONDUCTOR_CONFIG`] in the provided path.\n\npub fn read_config(mut path: PathBuf) -> anyhow::Result<Option<ConductorConfig>> {\n\n path.push(CONDUCTOR_CONFIG);\n\n\n\n match std::fs::read_to_string(path) {\n\n Ok(yaml) => Ok(Some(serde_yaml::from_str(&yaml)?)),\n\n Err(_) => Ok(None),\n\n }\n\n}\n", "file_path": "crates/hc_sandbox/src/config.rs", "rank": 37, "score": 144430.26433666132 }, { "content": "/// Load sandbox paths from the `.hc` file.\n\npub fn load(mut hc_dir: PathBuf) -> anyhow::Result<Vec<PathBuf>> {\n\n let mut paths = Vec::new();\n\n hc_dir.push(\".hc\");\n\n if hc_dir.exists() {\n\n let existing = std::fs::read_to_string(hc_dir)?;\n\n for sandbox in existing.lines() {\n\n let path = PathBuf::from(sandbox);\n\n let mut config_path = path.clone();\n\n config_path.push(CONDUCTOR_CONFIG);\n\n if config_path.exists() {\n\n paths.push(path);\n\n } else {\n\n tracing::error!(\"Failed to load path {} from existing .hc\", path.display());\n\n }\n\n }\n\n }\n\n Ok(paths)\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/save.rs", "rank": 38, "score": 144430.26433666132 }, { "content": "/// Unset withhold publish for a [`DhtOp`].\n\npub fn unset_withhold_publish(txn: &mut Transaction, hash: &DhtOpHash) -> StateMutationResult<()> {\n\n dht_op_update!(txn, hash, {\n\n \"withhold_publish\": Null,\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 39, "score": 144007.67982733197 }, { "content": "/// Set withhold publish for a [`DhtOp`].\n\npub fn set_withhold_publish(txn: &mut Transaction, hash: &DhtOpHash) -> StateMutationResult<()> {\n\n dht_op_update!(txn, hash, {\n\n \"withhold_publish\": true,\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 40, "score": 144007.67982733197 }, { "content": "fn map_outcome(\n\n outcome: Result<app_validation_workflow::Outcome, AppValidationError>,\n\n) -> WorkflowResult<()> {\n\n match outcome.map_err(SourceChainError::other)? {\n\n app_validation_workflow::Outcome::Accepted => {}\n\n app_validation_workflow::Outcome::Rejected(reason) => {\n\n return Err(SourceChainError::InvalidCommit(reason).into());\n\n }\n\n // when the wasm is being called directly in a zome invocation any\n\n // state other than valid is not allowed for new entries\n\n // e.g. we require that all dependencies are met when committing an\n\n // entry to a local source chain\n\n // this is different to the case where we are validating data coming in\n\n // from the network where unmet dependencies would need to be\n\n // rescheduled to attempt later due to partitions etc.\n\n app_validation_workflow::Outcome::AwaitingDeps(hashes) => {\n\n return Err(SourceChainError::InvalidCommit(format!(\"{:?}\", hashes)).into());\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "crates/holochain/src/core/workflow/call_zome_workflow.rs", "rank": 41, "score": 143331.53074562625 }, { "content": "/// Insert a [`DnaDef`] into the database.\n\npub fn insert_dna_def(txn: &mut Transaction, dna_def: &DnaDefHashed) -> StateMutationResult<()> {\n\n let hash = dna_def.as_hash();\n\n let dna_def = dna_def.as_content();\n\n sql_insert!(txn, DnaDef, {\n\n \"hash\": hash,\n\n \"blob\": to_blob(dna_def)?,\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain_state/src/mutations.rs", "rank": 42, "score": 142529.23552262617 }, { "content": "pub fn spawn_output(holochain: &mut Child) -> tokio::sync::oneshot::Receiver<u16> {\n\n let stdout = holochain.stdout.take();\n\n let stderr = holochain.stderr.take();\n\n let (tx, rx) = tokio::sync::oneshot::channel();\n\n let mut tx = Some(tx);\n\n tokio::task::spawn(async move {\n\n if let Some(stdout) = stdout {\n\n let mut reader = BufReader::new(stdout).lines();\n\n while let Ok(Some(line)) = reader.next_line().await {\n\n trace!(\"holochain bin stdout: {}\", line);\n\n tx = tx\n\n .take()\n\n .and_then(|tx| match check_line_for_admin_port(&line) {\n\n Some(port) => {\n\n let _ = tx.send(port);\n\n None\n\n }\n\n None => Some(tx),\n\n });\n\n }\n", "file_path": "crates/holochain/tests/test_utils/mod.rs", "rank": 43, "score": 141982.2202112479 }, { "content": "/// Remove sandboxes by their index in the file.\n\n/// You can get the index by calling [`load`].\n\n/// If no sandboxes are passed in then all are deleted.\n\n/// If all sandboxes are deleted the `.hc` file will be removed.\n\npub fn clean(mut hc_dir: PathBuf, sandboxes: Vec<usize>) -> anyhow::Result<()> {\n\n let existing = load(hc_dir.clone())?;\n\n let sandboxes_len = sandboxes.len();\n\n let to_remove: Vec<_> = if sandboxes.is_empty() {\n\n existing.iter().collect()\n\n } else {\n\n sandboxes\n\n .into_iter()\n\n .filter_map(|i| existing.get(i))\n\n .collect()\n\n };\n\n let to_remove_len = to_remove.len();\n\n for p in to_remove {\n\n if p.exists() && p.is_dir() {\n\n if let Err(e) = std::fs::remove_dir_all(p) {\n\n tracing::error!(\"Failed to remove {} because {:?}\", p.display(), e);\n\n }\n\n }\n\n }\n\n if sandboxes_len == 0 || sandboxes_len == to_remove_len {\n", "file_path": "crates/hc_sandbox/src/save.rs", "rank": 44, "score": 141982.2202112479 }, { "content": "#[test_case(vec![] => (None, vec![]))]\n\n#[test_case(vec![0] => (Some(0), vec![]))]\n\n#[test_case(vec![0, 1] => (Some(1), vec![]))]\n\n#[test_case(vec![0, 1, 2] => (Some(2), vec![]))]\n\n#[test_case(vec![0, 1, 3] => (Some(1), vec![3]))]\n\n#[test_case(vec![0, 1, 3, 4] => (Some(1), vec![3, 4]))]\n\n#[test_case(vec![0, 3, 4] => (Some(0), vec![3, 4]))]\n\n#[test_case(vec![1, 3, 4] => (Some(1), vec![3, 4]))]\n\nfn find_consecutive_test(mut awaiting_deps: Vec<u32>) -> (Option<u32>, Vec<u32>) {\n\n let r = find_consecutive(&mut awaiting_deps);\n\n (r, awaiting_deps)\n\n}\n", "file_path": "crates/holochain_types/src/db_cache/tests.rs", "rank": 45, "score": 141315.25012157313 }, { "content": "/// Helper to create a zome invocation for tests\n\npub fn new_zome_call<P, Z: Into<ZomeName>>(\n\n cell_id: &CellId,\n\n func: &str,\n\n payload: P,\n\n zome: Z,\n\n) -> Result<ZomeCall, SerializedBytesError>\n\nwhere\n\n P: serde::Serialize + std::fmt::Debug,\n\n{\n\n Ok(ZomeCall {\n\n cell_id: cell_id.clone(),\n\n zome_name: zome.into(),\n\n cap_secret: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()),\n\n fn_name: func.into(),\n\n payload: ExternIO::encode(payload)?,\n\n provenance: cell_id.agent_pubkey().clone(),\n\n })\n\n}\n\n\n", "file_path": "crates/holochain/src/test_utils.rs", "rank": 46, "score": 140483.531555739 }, { "content": "/// Parse a list of dnas.\n\n/// If paths are directories then each directory\n\n/// will be searched for the first file that matches\n\n/// `*.dna`.\n\npub fn parse_dnas(mut dnas: Vec<PathBuf>) -> anyhow::Result<Vec<PathBuf>> {\n\n if dnas.is_empty() {\n\n dnas.push(std::env::current_dir()?);\n\n }\n\n for dna in dnas.iter_mut() {\n\n if dna.is_dir() {\n\n let file_path = search_for_dna(dna)?;\n\n *dna = file_path;\n\n }\n\n ensure!(\n\n dna.file_name()\n\n .map(|f| f.to_string_lossy().ends_with(\".dna\"))\n\n .unwrap_or(false),\n\n \"File {} is not a valid dna file name: (e.g. my-dna.dna)\",\n\n dna.display()\n\n );\n\n }\n\n Ok(dnas)\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/bundles.rs", "rank": 47, "score": 140397.4357259364 }, { "content": "/// Save all sandboxes to the `.hc` file in the `hc_dir` directory.\n\npub fn save(mut hc_dir: PathBuf, paths: Vec<PathBuf>) -> anyhow::Result<()> {\n\n use std::io::Write;\n\n std::fs::create_dir_all(&hc_dir)?;\n\n hc_dir.push(\".hc\");\n\n let mut file = std::fs::OpenOptions::new()\n\n .append(true)\n\n .create(true)\n\n .open(hc_dir)?;\n\n\n\n for path in paths {\n\n writeln!(file, \"{}\", path.display())?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/hc_sandbox/src/save.rs", "rank": 48, "score": 140397.4357259364 }, { "content": "fn create_link_zomes_to_invoke(\n\n create_link: &CreateLink,\n\n dna_def: &DnaDef,\n\n) -> AppValidationOutcome<ZomesToInvoke> {\n\n let zome = zome_id_to_zome(create_link.zome_id, dna_def)?;\n\n Ok(ZomesToInvoke::One(zome))\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 49, "score": 139299.3363103219 }, { "content": "/// Insert a p2p record from within a write transaction.\n\npub fn p2p_put_single(txn: &mut Transaction<'_>, signed: &AgentInfoSigned) -> DatabaseResult<()> {\n\n let record = P2pRecord::from_signed(signed)?;\n\n tx_p2p_put(txn, record)\n\n}\n\n\n", "file_path": "crates/holochain_sqlite/src/db/p2p_agent_store.rs", "rank": 50, "score": 138407.56192511172 }, { "content": "fn get_ops(txn: &mut Transaction<'_>) -> HashMap<Arc<DhtOpHash>, DhtOpHashed> {\n\n txn.prepare(\n\n \"\n\n SELECT DhtOp.hash, DhtOp.type AS dht_type,\n\n Header.blob AS header_blob, Entry.blob AS entry_blob\n\n FROM DHtOp\n\n JOIN Header ON DhtOp.header_hash = Header.hash\n\n LEFT JOIN Entry ON Header.entry_hash = Entry.hash\n\n \",\n\n )\n\n .unwrap()\n\n .query_map([], |row| {\n\n let header = from_blob::<SignedHeader>(row.get(\"header_blob\")?).unwrap();\n\n let op_type: DhtOpType = row.get(\"dht_type\")?;\n\n let hash: DhtOpHash = row.get(\"hash\")?;\n\n // Check the entry isn't private before gossiping it.\n\n let e: Option<Vec<u8>> = row.get(\"entry_blob\")?;\n\n let entry = e.map(|entry| from_blob::<Entry>(entry).unwrap());\n\n let op = DhtOp::from_type(op_type, header, entry).unwrap();\n\n let op = DhtOpHashed::with_pre_hashed(op, hash.clone());\n\n Ok((Arc::new(hash), op))\n\n })\n\n .unwrap()\n\n .collect::<Result<HashMap<_, _>, _>>()\n\n .unwrap()\n\n}\n\n\n", "file_path": "crates/holochain/src/test_utils/network_simulation.rs", "rank": 51, "score": 138407.56192511172 }, { "content": "fn check_already_tried(inner: &mut Inner, agent: &Arc<KitsuneAgent>) -> bool {\n\n if inner.already_tried.contains(agent) {\n\n true\n\n } else {\n\n inner.already_tried.insert(agent.clone());\n\n false\n\n }\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/kitsune_p2p/src/spawn/actor/space/rpc_multi_logic.rs", "rank": 52, "score": 138379.59758512737 }, { "content": "fn set_integrated(db: &DbWrite<DbKindDht>, u: &mut Unstructured, op_hash: &DhtOpHash) {\n\n db.test_commit(|txn| {\n\n mutations::set_validation_stage(txn, op_hash, ValidationLimboStatus::Pending).unwrap();\n\n mutations::set_when_integrated(txn, op_hash, Timestamp::arbitrary(u).unwrap()).unwrap();\n\n });\n\n}\n\n\n", "file_path": "crates/holochain_state/tests/cache_tests/mod.rs", "rank": 53, "score": 137025.10266891267 }, { "content": "/// Decode message-pack data from given reader into an owned item.\n\n/// You may wish to first wrap your reader in a BufReader.\n\npub fn rmp_decode<R, D>(r: &mut R) -> Result<D, std::io::Error>\n\nwhere\n\n R: std::io::Read,\n\n for<'de> D: Sized + serde::Deserialize<'de>,\n\n{\n\n let mut de = rmp_serde::decode::Deserializer::new(r);\n\n D::deserialize(&mut de).map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/codec.rs", "rank": 54, "score": 136112.48569050682 }, { "content": "/// Creates a db with some data in it then corrupts the db.\n\nfn create_corrupt_db<Kind: DbKindT>(kind: Kind, u: &mut arbitrary::Unstructured) -> TempDir {\n\n let testdir = tempfile::Builder::new()\n\n .prefix(\"corrupt_source_chain\")\n\n .tempdir()\n\n .unwrap();\n\n let path = testdir.path().join(kind.filename());\n\n std::fs::create_dir_all(path.parent().unwrap()).unwrap();\n\n let mut conn = Connection::open(&path).unwrap();\n\n holochain_sqlite::schema::SCHEMA_CELL\n\n .initialize(&mut conn, Some(kind.kind()))\n\n .unwrap();\n\n let op = DhtOpHashed::from_content_sync(DhtOp::RegisterAgentActivity(\n\n Signature::arbitrary(u).unwrap(),\n\n Header::arbitrary(u).unwrap(),\n\n ));\n\n let mut txn = conn\n\n .transaction_with_behavior(holochain_sqlite::rusqlite::TransactionBehavior::Exclusive)\n\n .unwrap();\n\n mutations_helpers::insert_valid_integrated_op(&mut txn, &op).unwrap();\n\n txn.commit().unwrap();\n\n conn.close().unwrap();\n\n corrupt_db(path.as_ref());\n\n testdir\n\n}\n", "file_path": "crates/holochain_state/tests/corrupt_db/mod.rs", "rank": 55, "score": 135959.51779969828 }, { "content": "#[cfg(feature = \"full-dna-def\")]\n\npub fn random_uid() -> String {\n\n nanoid::nanoid!()\n\n}\n\n\n\n#[cfg(feature = \"full-dna-def\")]\n\nimpl DnaDefBuilder {\n\n /// Provide a random UID\n\n pub fn random_uid(&mut self) -> &mut Self {\n\n self.uid = Some(random_uid());\n\n self\n\n }\n\n}\n\n\n\n/// A DnaDef paired with its DnaHash\n\n#[cfg(feature = \"full-dna-def\")]\n\npub type DnaDefHashed = HoloHashed<DnaDef>;\n\n\n\n#[cfg(feature = \"full-dna-def\")]\n\nimpl_hashable_content!(DnaDef, Dna);\n", "file_path": "crates/holochain_zome_types/src/dna_def.rs", "rank": 56, "score": 135912.60963314926 }, { "content": "#[hdk_extern]\n\npub fn zome_info(_: ()) -> ExternResult<ZomeInfo> {\n\n hdk::prelude::zome_info()\n\n}", "file_path": "crates/test_utils/wasm/wasm_workspace/entry_defs/src/lib.rs", "rank": 57, "score": 135764.00299349072 }, { "content": "fn register_agent_activity(mut a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n a.link_add.header_seq = 5;\n\n let dep = DhtOp::RegisterAgentActivity(a.signature.clone(), a.link_add.clone().into());\n\n let hash = HeaderHash::with_data_sync(&Header::CreateLink(a.link_add.clone()));\n\n let mut new_header = a.link_add.clone();\n\n new_header.prev_header = hash;\n\n new_header.header_seq += 1;\n\n let op = DhtOp::RegisterAgentActivity(a.signature.clone(), new_header.clone().into());\n\n let pre_state = vec![Db::Integrated(dep.clone()), Db::IntQueue(op.clone())];\n\n let expect = vec![\n\n Db::Integrated(dep.clone()),\n\n Db::MetaActivity(a.link_add.clone().into()),\n\n Db::Integrated(op.clone()),\n\n Db::MetaActivity(new_header.clone().into()),\n\n ];\n\n (pre_state, expect, \"register agent activity\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 58, "score": 134577.05854349927 }, { "content": "pub fn entry_creation_zomes_to_invoke(\n\n header: &EntryCreationHeader,\n\n dna_def: &DnaDef,\n\n) -> AppValidationOutcome<ZomesToInvoke> {\n\n match header {\n\n EntryCreationHeader::Create(Create {\n\n entry_type: EntryType::App(aet),\n\n ..\n\n })\n\n | EntryCreationHeader::Update(Update {\n\n entry_type: EntryType::App(aet),\n\n ..\n\n }) => {\n\n let zome = zome_id_to_zome(aet.zome_id(), dna_def)?;\n\n Ok(ZomesToInvoke::One(zome))\n\n }\n\n _ => Ok(ZomesToInvoke::All),\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 59, "score": 134526.9879200161 }, { "content": "/// Separate gossip into chunks to keep messages under the max size.\n\nfn into_chunks(gossip: &mut Vec<ShardedGossipWire>, ops: Vec<KOp>, complete: u8) {\n\n let mut chunk = Vec::with_capacity(ops.len());\n\n let mut size = 0;\n\n\n\n // If there are no ops missing we send back an empty final chunk\n\n // so the other side knows we're done.\n\n if ops.is_empty() {\n\n gossip.push(ShardedGossipWire::missing_ops(\n\n Vec::with_capacity(0),\n\n complete,\n\n ));\n\n }\n\n\n\n for op in ops {\n\n // Bytes for this op.\n\n let bytes = op.size();\n\n\n\n // Check if this op will fit without going over the max.\n\n if size + bytes <= MAX_SEND_BUF_BYTES {\n\n // Op will fit so add it to the chunk and update the size.\n", "file_path": "crates/kitsune_p2p/kitsune_p2p/src/gossip/sharded_gossip/ops.rs", "rank": 60, "score": 133239.12375395512 }, { "content": "/// A fixture example dna for unit testing.\n\npub fn fake_dna_zomes(uid: &str, zomes: Vec<(ZomeName, DnaWasm)>) -> DnaFile {\n\n fake_dna_zomes_named(uid, \"test\", zomes)\n\n}\n\n\n", "file_path": "crates/holochain_types/src/test_utils.rs", "rank": 61, "score": 133194.2785765563 }, { "content": "/// A fixture CapSecret for unit testing.\n\npub fn fake_cap_secret() -> CapSecret {\n\n [0; CAP_SECRET_BYTES].into()\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 62, "score": 132036.75017353226 }, { "content": "#[cfg(feature = \"packing\")]\n\npub fn prune_path<P: AsRef<Path>>(mut path: PathBuf, subpath: P) -> UnpackingResult<PathBuf> {\n\n if path.ends_with(&subpath) {\n\n for _ in subpath.as_ref().components() {\n\n let _ = path.pop();\n\n }\n\n Ok(path)\n\n } else {\n\n Err(UnpackingError::ManifestPathSuffixMismatch(\n\n path,\n\n subpath.as_ref().to_owned(),\n\n ))\n\n }\n\n}\n", "file_path": "crates/mr_bundle/src/util.rs", "rank": 63, "score": 131674.5677642687 }, { "content": "/// Construct a bound async read/write memory channel\n\npub fn bound_async_mem_channel(\n\n max_bytes: usize,\n\n maybe_active: Option<&Active>,\n\n) -> (\n\n Box<dyn futures::io::AsyncWrite + 'static + Send + Unpin>,\n\n Box<dyn futures::io::AsyncRead + 'static + Send + Unpin>,\n\n) {\n\n let buf = Vec::with_capacity(max_bytes);\n\n\n\n let inner = Arc::new(Share::new(MemInner {\n\n buf,\n\n max_bytes,\n\n closed: false,\n\n want_read_waker: None,\n\n want_write_waker: None,\n\n }));\n\n\n\n if let Some(active) = maybe_active {\n\n let k_inner = inner.clone();\n\n active.register_kill_cb(move || {\n", "file_path": "crates/kitsune_p2p/types/src/tx2/tx2_utils/mem_chan.rs", "rank": 64, "score": 131307.39441999354 }, { "content": "/// Encode a serde::Serialize item as message-pack data to given writer.\n\n/// You may wish to first wrap your writer in a BufWriter.\n\npub fn rmp_encode<W, S>(write: &mut W, item: S) -> Result<(), std::io::Error>\n\nwhere\n\n W: std::io::Write,\n\n S: serde::Serialize,\n\n{\n\n let mut se = rmp_serde::encode::Serializer::new(write)\n\n .with_struct_map()\n\n .with_string_variants();\n\n item.serialize(&mut se)\n\n .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/kitsune_p2p/types/src/codec.rs", "rank": 65, "score": 131218.0816292628 }, { "content": "/// Another fixture AgentPubKey for unit testing.\n\n/// NB: This must match up with AgentPubKeyFixturator's Predictable curve\n\npub fn fake_agent_pubkey_2() -> AgentPubKey {\n\n AgentPubKey::try_from(\"uhCAk39SDf7rynCg5bYgzroGaOJKGKrloI1o57Xao6S-U5KNZ0dUH\").unwrap()\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 66, "score": 130208.31035475791 }, { "content": "/// A fixture AgentPubKey for unit testing.\n\n/// NB: This must match up with AgentPubKeyFixturator's Predictable curve\n\npub fn fake_agent_pubkey_1() -> AgentPubKey {\n\n AgentPubKey::try_from(\"uhCAkJCuynkgVdMn_bzZ2ZYaVfygkn0WCuzfFspczxFnZM1QAyXoo\").unwrap()\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 67, "score": 130208.31035475791 }, { "content": "#[hdk_extern]\n\nfn set_access(_: ()) -> ExternResult<()> {\n\n let mut functions: GrantedFunctions = BTreeSet::new();\n\n functions.insert((hdk::prelude::zome_info()?.name, \"call_info\".into()));\n\n functions.insert((hdk::prelude::zome_info()?.name, \"remote_call_info\".into()));\n\n create_cap_grant(CapGrantEntry {\n\n tag: \"\".into(),\n\n // empty access converts to unrestricted\n\n access: ().into(),\n\n functions,\n\n })?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 68, "score": 130208.31035475791 }, { "content": "/// WIP: Fact: The headers form a valid SourceChain\n\npub fn valid_chain() -> Facts<'static, Header> {\n\n facts![ValidChainFact::default(),]\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 69, "score": 127933.41358439896 }, { "content": "/// A named dna for unit testing.\n\npub fn fake_dna_zomes_named(uid: &str, name: &str, zomes: Vec<(ZomeName, DnaWasm)>) -> DnaFile {\n\n let mut dna = DnaDef {\n\n name: name.to_string(),\n\n properties: YamlProperties::new(serde_yaml::from_str(\"p: hi\").unwrap())\n\n .try_into()\n\n .unwrap(),\n\n uid: uid.to_string(),\n\n origin_time: Timestamp::HOLOCHAIN_EPOCH,\n\n zomes: Vec::new(),\n\n };\n\n tokio_helper::block_forever_on(async move {\n\n let mut wasm_code = Vec::new();\n\n for (zome_name, wasm) in zomes {\n\n let wasm = crate::dna::wasm::DnaWasmHashed::from_content(wasm).await;\n\n let (wasm, wasm_hash) = wasm.into_inner();\n\n dna.zomes\n\n .push((zome_name, ZomeDef::Wasm(WasmZome { wasm_hash })));\n\n wasm_code.push(wasm);\n\n }\n\n DnaFile::new(dna, wasm_code).await\n", "file_path": "crates/holochain_types/src/test_utils.rs", "rank": 70, "score": 127316.01221205937 }, { "content": "pub fn is_new_entry_header() -> Facts<'static, Header> {\n\n facts![or(\n\n \"is NewEntryHeader\",\n\n is_of_type(HeaderType::Create),\n\n is_of_type(HeaderType::Update)\n\n )]\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 71, "score": 126104.9737656246 }, { "content": "pub fn header_and_entry_match() -> Facts<'static, Pair> {\n\n facts![\n\n brute(\n\n \"Header type matches Entry existence\",\n\n |(header, entry): &Pair| {\n\n let has_header = header.entry_data().is_some();\n\n let has_entry = entry.is_some();\n\n has_header == has_entry\n\n }\n\n ),\n\n mapped(\n\n \"If there is entry data, the header must point to it\",\n\n |pair: &Pair| {\n\n if let Some(entry) = &pair.1 {\n\n // NOTE: this could be a `lens` if the previous check were short-circuiting,\n\n // but it is possible that this check will run even if the previous check fails,\n\n // so use a prism instead.\n\n facts![prism(\n\n \"header's entry hash\",\n\n |pair: &mut Pair| pair.0.entry_data_mut().map(|(hash, _)| hash),\n", "file_path": "crates/holochain_zome_types/src/element/facts.rs", "rank": 72, "score": 126104.9737656246 }, { "content": "/// Fact: The header must be a NewEntryHeader\n\npub fn new_entry_header() -> Facts<'static, Header> {\n\n facts![brute(\"Is a NewEntryHeader\", |h: &Header| {\n\n matches!(h.header_type(), HeaderType::Create | HeaderType::Update)\n\n }),]\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_valid_chain_fact() {\n\n let mut u = Unstructured::new(&crate::NOISE);\n\n\n\n let chain = build_seq(&mut u, 5, valid_chain());\n\n check_seq(chain.as_slice(), valid_chain()).unwrap();\n\n\n\n let hashes: Vec<_> = chain\n\n .iter()\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 73, "score": 126104.9737656246 }, { "content": "#[deprecated = \"Raising visibility into a change that needs to happen after `use_existing` is implemented\"]\n\npub fn we_must_remember_to_rework_cell_panic_handling_after_implementing_use_existing_cell_resolution(\n\n) {\n\n}\n\n\n\n/// The answer to the question:\n\n/// \"how do we concretely assign DNAs to the open roles of this App?\"\n\n/// Includes the DNAs selected to fill the roles and the details of the role assignments.\n\n// TODO: rework, make fields private\n\n#[allow(missing_docs)]\n\n#[derive(PartialEq, Eq, Debug)]\n\npub struct AppRoleResolution {\n\n pub agent: AgentPubKey,\n\n pub dnas_to_register: Vec<(DnaFile, Option<MembraneProof>)>,\n\n pub role_assignments: Vec<(AppRoleId, AppRoleAssignment)>,\n\n}\n\n\n\n#[allow(missing_docs)]\n\nimpl AppRoleResolution {\n\n pub fn new(agent: AgentPubKey) -> Self {\n\n Self {\n", "file_path": "crates/holochain_types/src/app/app_bundle.rs", "rank": 74, "score": 124848.31042841231 }, { "content": "fn make_call_zome_handle(cell_id: CellId) -> CellConductorReadHandle {\n\n let handle = Arc::new(MockConductorHandleT::new());\n\n let cell_conductor_api = CellConductorApi::new(handle, cell_id);\n\n Arc::new(cell_conductor_api)\n\n}\n\n\n\nfixturator!(\n\n CellConductorReadHandle;\n\n vanilla fn make_call_zome_handle(CellId);\n\n);\n\n\n\nfixturator!(\n\n ZomeCallHostAccess;\n\n constructor fn new(HostFnWorkspace, MetaLairClient, HolochainP2pDna, SignalBroadcaster, CellConductorReadHandle);\n\n);\n\n\n\nfixturator!(\n\n EntryDefsInvocation;\n\n constructor fn new();\n\n);\n", "file_path": "crates/holochain/src/fixt.rs", "rank": 75, "score": 124570.91555172228 }, { "content": "#[hdk_extern]\n\nfn call_info(_: ()) -> ExternResult<CallInfo> {\n\n // Commit something here so we can show the as_at won't shift in the call\n\n // info returned.\n\n create_entry(Thing)?;\n\n hdk::prelude::call_info()\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 76, "score": 124570.91555172228 }, { "content": "#[hdk_extern]\n\nfn dna_info(_: ()) -> ExternResult<DnaInfo> {\n\n hdk::prelude::dna_info()\n\n}\n\n\n\n/// `serde_yaml::Value` approach to handling properties.\n\n/// As yaml is much more loosely typed then Rust is, everything in the yaml\n\n/// ends up in a generic nested `Value` enum. Consider the following yaml:\n\n///\n\n/// foo:\n\n/// bar: 1\n\n/// bing: baz\n\n/// -2: 6.0\n\n///\n\n/// Here we have key/value of a mapping of ints, floats, strings all in\n\n/// positions that Rust doesn't handle particularly well. These keys and values\n\n/// can all be present or absent. Rust would represent this as enums for every\n\n/// key/value that can be multiple types and `Option` along with default values\n\n/// for anything that can be absent.\n\n///\n\n/// For well known or relatively simple properties it may be ergonomic to\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 77, "score": 124570.91555172228 }, { "content": "/// A fixture HeaderHash for unit testing.\n\npub fn fake_header_hash(name: u8) -> HeaderHash {\n\n fake_holo_hash(name, hash_type::Header::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 78, "score": 124344.13019200218 }, { "content": "/// A fixture DnaHash for unit testing.\n\npub fn fake_dna_hash(name: u8) -> DnaHash {\n\n fake_holo_hash(name, hash_type::Dna::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 79, "score": 124344.13019200218 }, { "content": "/// A fixture example CellId for unit testing.\n\npub fn fake_cell_id(name: u8) -> CellId {\n\n (fake_dna_hash(name), fake_agent_pubkey_1()).into()\n\n}\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 80, "score": 124344.13019200218 }, { "content": "/// A fixture EntryHash for unit testing.\n\npub fn fake_entry_hash(name: u8) -> EntryHash {\n\n fake_holo_hash(name, hash_type::Entry::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 81, "score": 124344.13019200218 }, { "content": "pub fn element_with_no_entry(signature: Signature, header: Header) -> Element {\n\n let shh =\n\n SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(header), signature);\n\n Element::new(shh, None)\n\n}\n\n\n\nfixturator!(\n\n Entry;\n\n variants [\n\n Agent(AgentPubKey)\n\n App(AppEntryBytes)\n\n CapClaim(CapClaim)\n\n CapGrant(ZomeCallCapGrant)\n\n ];\n\n\n\n curve AppEntry {\n\n Entry::App(\n\n AppEntryBytesFixturator::new_indexed(Unpredictable, get_fixt_index!()).next().unwrap()\n\n )\n\n };\n", "file_path": "crates/holochain_zome_types/src/fixt.rs", "rank": 82, "score": 122863.94189777234 }, { "content": "/// A fixture AgentPubKey for unit testing.\n\npub fn fake_agent_pub_key(name: u8) -> AgentPubKey {\n\n fake_holo_hash(name, hash_type::Agent::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 83, "score": 121010.77239588303 }, { "content": "/// A fixture DhtOpHash for unit testing.\n\npub fn fake_dht_op_hash(name: u8) -> DhtOpHash {\n\n fake_holo_hash(name, hash_type::DhtOp::new())\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 84, "score": 121010.77239588303 }, { "content": "#[hdk_extern]\n\nfn dna_info_nested(_: ()) -> ExternResult<Option<i64>> {\n\n Ok(MaybePropertiesDirect::try_from(hdk::prelude::dna_info()?.properties)?.0.and_then(|properties| properties.baz[\"foo\"][\"bar\"].as_i64()))\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use hdk::prelude::*;\n\n use ::fixt::prelude::*;\n\n\n\n #[test]\n\n fn zome_info_smoke() {\n\n let mut mock_hdk = hdk::prelude::MockHdkT::new();\n\n\n\n let output = fixt!(ZomeInfo);\n\n let output_closure = output.clone();\n\n mock_hdk.expect_zome_info()\n\n .with(hdk::prelude::mockall::predicate::eq(()))\n\n .times(1)\n\n .return_once(move |_| Ok(output_closure));\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 85, "score": 121010.77239588303 }, { "content": "#[hdk_extern]\n\nfn dna_info_bar_direct(_: ()) -> ExternResult<Option<String>> {\n\n Ok(MaybePropertiesDirect::try_from(hdk::prelude::dna_info()?.properties)?.0.and_then(|properties| properties.bar))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 86, "score": 119431.66156675712 }, { "content": "#[hdk_extern]\n\nfn dna_info_foo_direct(_: ()) -> ExternResult<Option<Foo>> {\n\n Ok(MaybePropertiesDirect::try_from(hdk::prelude::dna_info()?.properties)?.0.and_then(|properties| properties.foo))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 87, "score": 119431.66156675712 }, { "content": "pub fn is_of_type(header_type: HeaderType) -> Facts<'static, Header> {\n\n facts![brute(\"header is of type\", move |h: &Header| h\n\n .header_type()\n\n == header_type)]\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/header/facts.rs", "rank": 88, "score": 119406.17054304454 }, { "content": "#[hdk_extern]\n\nfn dna_info_value(k: String) -> ExternResult<serde_yaml::Value> {\n\n Ok(\n\n YamlProperties::try_from(hdk::prelude::dna_info()?.properties)?.into_inner()[k].clone()\n\n )\n\n}\n\n\n\n/// Yaml doesn't enforce the type of any value.\n\n/// Rust can support multiple options for the type of a value as an enum.\n\n/// Serialization will fail unless `#[serde(untagged)]` is applied to the enum\n\n/// so that variant names are ignored.\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 89, "score": 116196.53669628172 }, { "content": "#[hdk_extern]\n\nfn remote_call_info(agent: AgentPubKey) -> ExternResult<CallInfo> {\n\n match call_remote(\n\n agent,\n\n hdk::prelude::zome_info()?.name,\n\n \"call_info\".to_string().into(),\n\n None,\n\n &(),\n\n )? {\n\n ZomeCallResponse::Ok(extern_io) => Ok(extern_io.decode()?),\n\n not_ok => {\n\n tracing::warn!(?not_ok);\n\n Err(WasmError::Guest(format!(\"{:?}\", not_ok)))\n\n },\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 90, "score": 115009.63174437585 }, { "content": "#[hdk_extern]\n\nfn try_cap_claim(cap_for: crate::CapFor) -> ExternResult<ZomeCallResponse> {\n\n call_remote(\n\n cap_for.1,\n\n zome_info()?.name,\n\n \"needs_cap_claim\".to_string().into(),\n\n Some(cap_for.0),\n\n &(),\n\n )\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/capability/src/lib.rs", "rank": 91, "score": 114723.3853413443 }, { "content": "#[hdk_extern]\n\nfn remote_remote_call_info(agent: AgentPubKey) -> ExternResult<CallInfo> {\n\n match call_remote(\n\n agent,\n\n hdk::prelude::zome_info()?.name,\n\n \"remote_call_info\".to_string().into(),\n\n None,\n\n agent_info()?.agent_initial_pubkey,\n\n )? {\n\n ZomeCallResponse::Ok(extern_io) => Ok(extern_io.decode()?),\n\n not_ok => {\n\n tracing::warn!(?not_ok);\n\n Err(WasmError::Guest(format!(\"{:?}\", not_ok)))\n\n },\n\n }\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 92, "score": 113632.12180373544 }, { "content": " pub trait HostFnApiT {\n\n $(\n\n fn $f(&self, _: $in_arg) -> Result<$out_arg, HostFnApiError>;\n\n )*\n\n }\n\n }\n\n}\n\n\n\n// Every externed function that the zome developer exposes to holochain returns `ExternIO`.\n\n// The zome developer can expose callbacks in a \"sparse\" way based on names and the functions\n\n// can take different input (e.g. validation vs. hooks like init, etc.).\n\n// All we can say is that some SerializedBytes are being received and returned.\n\n// In the case of ZomeExtern functions exposed to a client, the data input/output is entirely\n\n// arbitrary so we can't say anything at all. In this case the happ developer must BYO\n\n// deserialization context to match the client, either directly or via. the HDK.\n\n// Note though, that _unlike_ zome externs, the host _does_ know exactly the guest should be\n\n// returning for callbacks, it's just that the unpacking of the return happens in two steps:\n\n// - first the sparse callback is triggered with SB input/output\n\n// - then the guest inflates the expected input or the host the expected output based on the\n\n// callback flavour\n", "file_path": "crates/holochain_zome_types/src/zome_io.rs", "rank": 93, "score": 113079.00607016293 }, { "content": "/// Called from Conductor: \n\n///\n\n/// - Conductor receiving a call_zome request (TODO: tracing?)\n\n/// - \n\n/// - Initialization of zomes\n\n///\n\n/// - Receive callback\n\n///\n\n///\n\n///\n\n/// Parameters (expected types/structures):\n\n///\n\n/// ZomeCall Params\n\n/// As-At (header_seq that is equal to chain_head of Source Chain at the time of initiating the Call ZomeFn workflow)\n\n/// Provenance\n\n/// Capability token secret (if this is not calling a Public trait function)\n\n/// Cell Context (also handle to keystore)\n\n///\n\n///\n\n/// Data X (data & structure) from Store Y:\n", "file_path": "docs/cell/agent/call_zome_fn.rs", "rank": 94, "score": 112169.32208060453 }, { "content": "///\n\n/// - Get source chain head as our \"as at\"\n\n/// - Private entry CAS to look up for the Capability secret we have a parameter\n\n///\n\n///\n\n///\n\n/// Functions / Workflows:\n\n///\n\n/// 1. Check if there is a Capability token secret in the parameters. If there isn't and the function to be called isn't public, we stop the process and return an error.\n\n///\n\n/// 1.1 If there is a secret, we look up our private CAS and see if it matches any secret for a Capability Grant entry that we have stored. If it does, check that this Capability Grant is not revoked and actually grants permissions to call the ZomeFn that is being called.\n\n///\n\n/// 1.2 Check if the Capability Grant has assignees=None (means this Capability is transferable). If it has assignees=Vec<Address> (means this Capability is on Assigned mode, check that the provenance's agent key is in that assignees.\n\n///\n\n/// 1.3 If the CapabiltyGrant has pre-filled parameters, check that the ui is passing exactly the parameters needed and no more to complete the call.\n\n///\n\n/// 2. Set Context (Cascading Cursor w/ Pre-flight chain extension)\n\n///\n\n/// 3. Invoke WASM (w/ Cursor)\n\n///\n", "file_path": "docs/cell/agent/call_zome_fn.rs", "rank": 95, "score": 112164.93200683537 }, { "content": "/// Write the new Headers records on Source Chain, with dht_transforms_completed=false.\n\n/// Store CRUDstatus=Live in CAS-meta\n\n/// Write the new chain_head on Source Chain.\n\n///\n\n///\n\n/// 5. Return WASM Result & Destroy temp workspace\n\n///\n\n///\n\n///\n\n/// Persisted X Changes to Store Y (data & structure):\n\n///\n\n/// New Headers to Source Chain\n\n/// New Chain head to Source Chain\n\n/// New Headers and Entries to CAS\n\n/// Store CRUDstatus=Live in CAS-meta\n\n///\n\n///\n\n/// Returned Results (type & structure):\n\n///\n\n/// Return WASM result to the caller\n\n///\n\n///\n\n/// Triggers:\n\n///\n\n/// Publish to DHT (Public Chain Entries, Headers)\n", "file_path": "docs/cell/agent/call_zome_fn.rs", "rank": 96, "score": 112157.54828410357 }, { "content": "/// WASM receives external call handles: (gets & commits via cascading cursor, crypto functions & bridge calls via conductor, send via network function call for send direct message)\n\n/// 4. When the WASM code execution finishes, If workspace has new chain entries:\n\n///\n\n/// 4.1. Call system validation of list of entries and headers:\n\n///\n\n/// Check entry hash\n\n/// Check header hash\n\n/// Check header signature\n\n/// Check header timestamp is later than previous timestamp\n\n/// Check entry content matches entry schema\n\n/// Depending on the type of the commit, validate all possible validations for the DHT Op that would be produced by it\n\n/// 4.2. Call app validation of list of entries and headers:\n\n///\n\n/// Call validate_set_of_entries_and_headers (any necessary get results where we receive None / Timeout on retrieving validation dependencies, should produce error/fail)\n\n/// 4.3. Write output results via SC gatekeeper (wrap in transaction):\n\n///\n\n/// Get write handle to Source Chain\n\n/// Check if chain_head === 'as-at'. If it is not, fail the whole process. It is important that we read after we have opened the write handle since this will lock the handle and we'll avoid race conditions.\n\n/// Write the new Entries and Headers into the CAS.\n\n/// Write the new Entries and Headers CRUDstatus=Live to CAS-Meta.\n", "file_path": "docs/cell/agent/call_zome_fn.rs", "rank": 97, "score": 112157.54828410357 }, { "content": "fn fake_holo_hash<T: holo_hash::HashType>(name: u8, hash_type: T) -> HoloHash<T> {\n\n HoloHash::from_raw_36_and_type([name; HOLO_HASH_UNTYPED_LEN].to_vec(), hash_type)\n\n}\n\n\n", "file_path": "crates/holochain_zome_types/src/test_utils.rs", "rank": 98, "score": 105299.57571739113 }, { "content": "#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod test {\n\n use holochain_wasm_test_utils::TestWasm;\n\n use holochain_zome_types::prelude::*;\n\n use crate::core::ribosome::wasm_test::RibosomeTestFixture;\n\n\n\n #[tokio::test(flavor = \"multi_thread\")]\n\n async fn zome_info_test() {\n\n observability::test_run().ok();\n\n let RibosomeTestFixture {\n\n conductor, alice, ..\n\n } = RibosomeTestFixture::new(TestWasm::EntryDefs).await;\n\n\n\n let zome_info: ZomeInfo = conductor.call(&alice, \"zome_info\", ()).await;\n\n assert_eq!(zome_info.name, \"entry_defs\".into());\n\n assert_eq!(\n\n zome_info.id,\n\n ZomeId::new(0)\n\n );\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/zome_info.rs", "rank": 99, "score": 105179.54019310814 } ]
Rust
src/soft_f32/soft_f32_add.rs
Inokinoki/softfpu-rs
152f71f131d5d38a4112bf5d2e2c2975af2c1e15
use super::util::{ f32_shift_right_jam, f32_norm_round_and_pack, f32_round_and_pack, f32_pack_raw, f32_pack, f32_propagate_nan, f32_sign, f32_exp, f32_frac, }; use crate::soft_f32::f32_sub; pub fn f32_add(a: u32, b: u32) -> u32 { let mut a_sign = f32_sign(a); let mut b_sign = f32_sign(b); let mut r_sign; if a_sign != b_sign { return f32_sub(a, b); } let mut a_exp = f32_exp(a); let mut b_exp = f32_exp(b); let mut r_exp; let mut a_frac = f32_frac(a); let mut b_frac = f32_frac(b); let mut r_frac; let diff_exp = a_exp - b_exp; if diff_exp == 0 { if a_exp == 0 { r_sign = a_sign; r_exp = a_exp; r_frac = a_frac + b_frac; return f32_pack_raw(r_sign, r_exp, r_frac); } if a_exp == 0xFF { if (a_frac | b_frac) != 0 { return f32_propagate_nan(a, b); } else { r_sign = a_sign; r_exp = a_exp; r_frac = a_frac + b_frac; return f32_pack_raw(r_sign, r_exp, r_frac); } } r_sign = a_sign; r_exp = a_exp; r_frac = 0x01000000 + a_frac + b_frac; if (r_frac & 0x01) == 0 && r_exp < 0xFE { return f32_pack_raw(r_sign, r_exp, r_frac >> 1); } r_frac <<= 6; } else { r_sign = a_sign; a_frac <<= 6; b_frac <<= 6; if diff_exp < 0 { if b_exp == 0xFF { if b_sign != 0 { return f32_propagate_nan(a, b); } else { return f32_pack_raw(r_sign, 0xFF, 0); } } r_exp = b_exp; if a_exp != 0 { a_frac += 0x20000000; } else { a_frac += a_frac; } a_frac = f32_shift_right_jam(a_frac, -diff_exp); } else { if a_exp == 0xFF { if a_sign != 0 { return f32_propagate_nan(a, b); } else { return f32_pack_raw(a_sign, a_exp, a_frac); } } r_exp = a_exp; if b_exp != 0 { b_frac += 0x20000000; } else { b_frac += b_frac; } b_frac = f32_shift_right_jam(b_frac, diff_exp); } r_frac = 0x20000000 + a_frac + b_frac; if r_frac < 0x40000000 { r_exp -= 1; r_frac <<= 1; } } f32_round_and_pack(r_sign, r_exp, r_frac) } #[cfg(test)] mod tests { #[test] fn test_f32_add() { assert_eq!(crate::soft_f32::f32_add(0x3DCCCCCD, 0x3E4CCCCD), 0x3E99999A); assert_eq!(crate::soft_f32::f32_add(0xBDCCCCCD, 0xBE4CCCCD), 0xBE99999A); assert_eq!(crate::soft_f32::f32_add(0x4640E400, 0x47849900), 0x479CB580); assert_eq!(crate::soft_f32::f32_add(0xC640E400, 0xC7849900), 0xC79CB580); assert_eq!(crate::soft_f32::f32_add(0x3B03126F, 0x3B03126F), 0x3B83126F); assert_eq!(crate::soft_f32::f32_add(0xBB03126F, 0xBB03126F), 0xBB83126F); assert_eq!(crate::soft_f32::f32_add(0xBDCCCCCD, 0x3E4CCCCD), 0x3DCCCCCD); assert_eq!(crate::soft_f32::f32_add(0x3DCCCCCD, 0xBE4CCCCD), 0xBDCCCCCD); } #[test] fn test_f32_add_inf_nan() { assert_eq!(crate::soft_f32::f32_add(0x7F800000, 0x3F800000), 0x7F800000); assert_eq!(crate::soft_f32::f32_add(0xFF800000, 0x3F800000), 0xFF800000); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFF800000, 0x7F800000)), true); assert_eq!(crate::soft_f32::f32_add(0x7F800000, 0x3F800000), 0x7F800000); assert_eq!(crate::soft_f32::f32_add(0xFF800000, 0x3F800000), 0xFF800000); assert_eq!(crate::soft_f32::f32_add(0xFFFFFFFF, 0x3F800000), 0xFFFFFFFF); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFFFFFFFF, 0x3F800000)), true); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFFFFFFFF, 0x7F800000)), true); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFFFFFFFF, 0xFF800000)), true); assert_eq!(crate::soft_f32::f32_add(0x0, 0xBDCCCCCE), 0xBDCCCCCE); assert_eq!(crate::soft_f32::f32_add(0x0, 0x3DCCCCCE), 0x3DCCCCCE); assert_eq!(crate::soft_f32::f32_add(0x80000000, 0xBDCCCCCE), 0xBDCCCCCE); assert_eq!(crate::soft_f32::f32_add(0x0, 0x0), 0x0); assert_eq!(crate::soft_f32::f32_add(0x0, 0x80000000), 0x0); } }
use super::util::{ f32_shift_right_jam, f32_norm_round_and_pack, f32_round_and_pack, f32_pack_raw, f32_pack, f32_propagate_nan, f32_sign, f32_exp, f32_frac, }; use crate::soft_f32::f32_sub; pub fn f32_add(a: u32, b: u32) -> u32 { let mut a_sign = f32_sign(a); let mut b_sign = f32_sign(b); let mut r_sign; if a_sign != b_sign { return f32_sub(a, b); } let mut a_exp = f32_exp(a); let mut b_exp = f32_exp(b); let mut r_exp; let mut a_frac = f32_frac(a); let mut b_frac = f32_frac(b); let mut r_frac; let diff_exp = a_exp - b_exp; if diff_exp == 0 { if a_exp == 0 { r_sign = a_sign; r_exp = a_exp; r_frac = a_frac + b_frac; return f32_pack_raw(r_sign, r_exp, r_frac); } if a_exp == 0xFF { if (a_frac | b_frac) != 0 { return f32_propagate_nan(a, b); } else { r_sign = a_sign; r_exp = a_exp; r_frac = a_frac + b_frac; return f32_pack_raw(r_sign, r_exp, r_frac); } } r_sign = a_sign; r_exp = a_exp; r_frac = 0x01000000 + a_frac + b_frac; if (r_frac & 0x01) == 0 && r_exp < 0xFE { return f32_pack_raw(r_sign, r_exp, r_frac >> 1); } r_frac <<= 6; } else { r_sign = a_sign; a_frac <<= 6; b_frac <<= 6; if diff_exp < 0 { if b_exp == 0xFF { if b_sign != 0 { return f32_propagate_nan(a, b); } else { return f32_pack_raw(r_sign, 0xFF, 0); } } r_exp = b_exp; if a_exp != 0 { a_frac += 0x20000000; } else { a_frac += a_frac; } a_frac = f32_shift_right_jam(a_frac, -diff_exp); } else { if a_exp == 0xFF { if a_sign != 0 { return f32_propagate_nan(a, b); } else { return f32_pack_raw(a_sign, a_exp, a_frac); } } r_exp = a_exp; if b_exp != 0 { b_frac += 0x20000000; } else { b_frac += b_frac; } b_frac = f32_shift_right_jam(b_frac, diff_exp); } r_frac = 0x20000000 + a_frac + b_frac; if r_frac < 0x40000000 { r_exp -= 1; r_frac <<= 1; } } f32_round_and_pack(r_sign, r_exp, r_frac) } #[cfg(test)] mod tests { #[test] fn test_f32_add() { assert_eq!(crate::soft_f32::f32_add(0x3DCCCCCD, 0x3E4CCCCD), 0x3E99999A); assert_eq!(crate::soft_f32::f32_add(0xBDCCCCCD, 0xBE4CCCCD), 0xBE99999A); assert_eq!(crate::soft_f32::f32_add(0x4640E400, 0x47849900), 0x479CB580); assert_eq!(crate::soft_f32::f32_add(0xC640E400, 0xC7849900), 0xC79CB580); assert_eq!(crate::soft_f32::f32_add(0x3B03126F, 0x3B03126F), 0x3B83126F); assert_eq!(crate::soft_f32::f32_add(0xBB03126F, 0xBB03126F), 0xBB83126F); assert_eq!(crate::soft_f32::f32_add(0xBDCCCCCD, 0x3E4CCCCD), 0x3DCCCCCD); assert_eq!(crate::soft_f32::f32_add(0x3DCCCCCD, 0xBE4CCCCD), 0xBDCCCCCD); } #[test]
}
fn test_f32_add_inf_nan() { assert_eq!(crate::soft_f32::f32_add(0x7F800000, 0x3F800000), 0x7F800000); assert_eq!(crate::soft_f32::f32_add(0xFF800000, 0x3F800000), 0xFF800000); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFF800000, 0x7F800000)), true); assert_eq!(crate::soft_f32::f32_add(0x7F800000, 0x3F800000), 0x7F800000); assert_eq!(crate::soft_f32::f32_add(0xFF800000, 0x3F800000), 0xFF800000); assert_eq!(crate::soft_f32::f32_add(0xFFFFFFFF, 0x3F800000), 0xFFFFFFFF); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFFFFFFFF, 0x3F800000)), true); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFFFFFFFF, 0x7F800000)), true); assert_eq!(crate::soft_f32::f32_is_nan(crate::soft_f32::f32_add(0xFFFFFFFF, 0xFF800000)), true); assert_eq!(crate::soft_f32::f32_add(0x0, 0xBDCCCCCE), 0xBDCCCCCE); assert_eq!(crate::soft_f32::f32_add(0x0, 0x3DCCCCCE), 0x3DCCCCCE); assert_eq!(crate::soft_f32::f32_add(0x80000000, 0xBDCCCCCE), 0xBDCCCCCE); assert_eq!(crate::soft_f32::f32_add(0x0, 0x0), 0x0); assert_eq!(crate::soft_f32::f32_add(0x0, 0x80000000), 0x0); }
function_block-full_function
[ { "content": "pub fn f32_div(a: u32, b: u32) -> u32 {\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n let mut b_sign = f32_sign(b);\n\n let mut r_sign = a_sign ^ b_sign;\n\n\n\n // Exp\n\n let mut a_exp = f32_exp(a);\n\n let mut b_exp = f32_exp(b);\n\n let mut r_exp;\n\n\n\n // Frac\n\n let mut a_frac = f32_frac(a);\n\n let mut b_frac = f32_frac(b);\n\n let mut r_frac;\n\n\n\n if a_exp == 0xFF {\n\n if a_frac != 0 {\n\n return f32_propagate_nan(a, b);\n\n }\n", "file_path": "src/soft_f32/soft_f32_div.rs", "rank": 0, "score": 84023.7260045667 }, { "content": "pub fn f32_sub(a: u32, b: u32) -> u32 {\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n let mut b_sign = f32_sign(b);\n\n let mut r_sign = a_sign;\n\n\n\n // Exp\n\n let mut a_exp = f32_exp(a);\n\n let mut b_exp = f32_exp(b);\n\n let mut r_exp = 0;\n\n\n\n // Frac\n\n let mut a_frac = f32_frac(a);\n\n let mut b_frac = f32_frac(b);\n\n let mut r_frac = 0;\n\n\n\n let diff_exp = a_exp - b_exp;\n\n\n\n a_frac <<= 7;\n\n b_frac <<= 7;\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 1, "score": 84023.7260045667 }, { "content": "pub fn f32_mul(a: u32, b: u32) -> u32 {\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n let mut b_sign = f32_sign(b);\n\n let mut r_sign = a_sign ^ b_sign;\n\n\n\n // Exp\n\n let mut a_exp = f32_exp(a);\n\n let mut b_exp = f32_exp(b);\n\n let mut r_exp;\n\n\n\n // Frac\n\n let mut a_frac = f32_frac(a);\n\n let mut b_frac = f32_frac(b);\n\n let mut r_frac;\n\n\n\n if a_exp == 0xFF {\n\n // NaN\n\n if a_frac != 0 || (b_exp == 0xFF && b_frac != 0) {\n\n return f32_propagate_nan(a, b);\n", "file_path": "src/soft_f32/soft_f32_mul.rs", "rank": 2, "score": 84023.7260045667 }, { "content": "pub fn f32_ne(a: u32, b: u32) -> bool {\n\n if f32_is_nan(a) || f32_is_nan(b) {\n\n // Unable to compare\n\n return false;\n\n }\n\n\n\n !f32_eq(a, b)\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 4, "score": 79219.66762850093 }, { "content": "pub fn f32_eq(a: u32, b: u32) -> bool {\n\n if f32_is_nan(a) || f32_is_nan(b) {\n\n // Unable to compare\n\n return false;\n\n }\n\n\n\n a == b || ((a | b) << 1) == 0\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 5, "score": 79219.66762850093 }, { "content": "pub fn f32_ge(a: u32, b: u32) -> bool {\n\n if f32_is_nan(a) || f32_is_nan(b) {\n\n // Unable to compare\n\n return false;\n\n }\n\n\n\n !f32_lt(a, b)\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 6, "score": 79219.66762850093 }, { "content": "pub fn f32_le(a: u32, b: u32) -> bool {\n\n if f32_is_nan(a) || f32_is_nan(b) {\n\n // Unable to compare\n\n return false;\n\n }\n\n\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n let mut b_sign = f32_sign(b);\n\n\n\n if a_sign != b_sign {\n\n // Different sign\n\n // FIXME: Add a test for this case\n\n if (a | b) << 1 == 0 || a_sign == 1 {\n\n return true;\n\n }\n\n } else {\n\n // Same sign\n\n if a == b {\n\n return true;\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 7, "score": 79219.66762850093 }, { "content": "pub fn f32_gt(a: u32, b: u32) -> bool {\n\n if f32_is_nan(a) || f32_is_nan(b) {\n\n // Unable to compare\n\n return false;\n\n }\n\n\n\n !f32_le(a, b)\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 8, "score": 79219.66762850093 }, { "content": "pub fn f32_lt(a: u32, b: u32) -> bool {\n\n if f32_is_nan(a) || f32_is_nan(b) {\n\n // Unable to compare\n\n return false;\n\n }\n\n\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n let mut b_sign = f32_sign(b);\n\n\n\n if a_sign != b_sign {\n\n // Different sign\n\n // FIXME: Add a test for this case\n\n if (a | b) << 1 != 0 {\n\n return a_sign == 1;\n\n }\n\n } else {\n\n // Same sign\n\n if a != b {\n\n if a_sign != 0 {\n\n return a > b;\n\n } else {\n\n return a < b;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 9, "score": 79219.66762850093 }, { "content": "pub fn f32_round(a: u32) -> u32 {\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n\n\n // Exp\n\n let mut a_exp = f32_exp(a);\n\n\n\n // Frac\n\n let mut a_frac = f32_frac(a);\n\n\n\n if (a_exp < 0x7E) {\n\n if ((a << 1) == 0) {\n\n return a;\n\n }\n\n let mut z = a & f32_pack_raw(1, 0, 0);\n\n if (a_frac != 0) {\n\n if (a_exp == 0x7E) {\n\n z |= f32_pack_raw(0, 0x7F, 0);\n\n }\n\n }\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 10, "score": 72932.53920991665 }, { "content": "pub fn f32_sqrt(a: u32) -> u32 {\n\n // Sign\n\n let mut a_sign = f32_sign(a);\n\n\n\n // Exp\n\n let mut a_exp = f32_exp(a);\n\n let mut r_exp;\n\n\n\n // Frac\n\n let mut a_frac = f32_frac(a);\n\n let mut r_frac;\n\n\n\n if a_exp == 0xFF {\n\n if a_frac != 0 {\n\n return f32_propagate_nan(a, 0);\n\n }\n\n if a_sign == 0 {\n\n return a;\n\n }\n\n\n", "file_path": "src/soft_f32/soft_f32_sqrt.rs", "rank": 11, "score": 72932.53920991665 }, { "content": "pub fn f32_pack(in_sign: i32, in_exp: i32, in_frac: i32) -> u32 {\n\n ((in_sign << 31) | ((in_exp & 0x0FF) << 23) | (in_frac & 0x007fffff)) as u32\n\n}\n\n\n\npub(crate) fn f32_round_and_pack(in_sign: i32, in_exp: i32, in_frac: i32) -> u32 {\n\n let rounding_mode = RoundingMode::NearEven;\n\n let detect_tininess = DetectTininess::After;\n\n\n\n let mut round_increment = 0x40;\n\n\n\n let mut sign = in_sign;\n\n let mut exp = in_exp;\n\n let mut frac = in_frac;\n\n\n\n match &rounding_mode {\n\n NearEven => { /* Do nothing */ }\n\n NearMaxMag => { /* Do nothing */ }\n\n _ => {\n\n if sign == 1 {\n\n match &rounding_mode {\n", "file_path": "src/soft_f32/util.rs", "rank": 12, "score": 68474.29446758796 }, { "content": "pub fn f32_is_nan(a: u32) -> bool {\n\n let is_exp_nan = ((a & 0x7F800000) == 0x7F800000);\n\n let is_frac_nan = ((a & 0x007FFFFF) != 0);\n\n\n\n is_exp_nan && is_frac_nan\n\n}\n\n\n\npub(crate) fn f32_is_frac_nan(a: u32) -> bool {\n\n let is_exp_nan = ((a & 0x7FC00000) == 0x7F800000);\n\n let is_frac_nan = (a & 0x003FFFFF) != 0;\n\n\n\n is_exp_nan && is_frac_nan\n\n}\n\n\n\npub(crate) fn f32_propagate_nan(in_a: u32, in_b: u32) -> u32 {\n\n let mut a = in_a | 0x00400000;\n\n let mut b = in_b | 0x00400000;\n\n\n\n let is_a_frac_nan = f32_is_frac_nan(in_a);\n\n let is_b_frac_nan = f32_is_frac_nan(in_b);\n", "file_path": "src/soft_f32/util.rs", "rank": 13, "score": 68419.41342344432 }, { "content": "// TODO: Add more convertors\n\npub fn to_int32(a: u32) -> i32 {\n\n let p = f32_round(a);\n\n let mut r: i32 = 0;\n\n\n\n if (f32_is_nan(p)) {\n\n return std::i32::MAX;\n\n } else if (p == 0x7F800000 || p == 0xFF800000) {\n\n // Infinity\n\n if (f32_sign(p) != 0) {\n\n return std::i32::MIN;\n\n }\n\n return std::i32::MAX;\n\n } else {\n\n if (p == 0 || p == 0x80000000) {\n\n // +- 0\n\n return 0;\n\n }\n\n\n\n let sign = f32_sign(a);\n\n let exp = f32_exp(a);\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 14, "score": 66269.54680940496 }, { "content": "pub fn from_int32(a: i32) -> u32 {\n\n /*\n\n 1. Convert the int representation into a sign and a positive binary number\n\n 2. Convert the positive binary number to a fixed point representation\n\n where the integral part = 1.xxxxx\n\n (This step uses shift operations - you shift the decimal point to the left\n\n until you find the most significant 1 bit in the binary number)\n\n Let M be the mantissa with the leading 1 bit omitted\n\n Let E be the exponent of the fixed point representation\n\n 3. Express the exponent E in excess 127 code\n\n 4. Assemble:\n\n Sign Exponent Mantissa into a IEEE 754 respresentation \n\n */\n\n if (a == 0) {\n\n return 0;\n\n }\n\n\n\n let frac: i32;\n\n if (a < 0) {\n\n frac = (!a) + 1;\n\n } else {\n\n frac = a;\n\n }\n\n let leading_zero = f32_count_leading_zero(frac);\n\n let shift = (32 - leading_zero - 1);\n\n let exp = shift + 0x7F;\n\n let sign = if (a < 0) { 1 } else { 0 };\n\n f32_pack(sign, exp, (frac << (24 - shift - 1)) & 0x7fffff)\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 15, "score": 66269.54680940496 }, { "content": "use soft_f32_comp::f32_gt;\n\nuse soft_f32_comp::f32_le;\n\nuse soft_f32_comp::f32_ge;\n\n\n\n// Others\n\nuse soft_f32_sqrt::f32_sqrt;\n\nuse soft_f32_round::f32_round;\n\n\n\n// Utilities\n\npub use util::{\n\n f32_is_nan\n\n};\n\n\n\n// F32 struct\n\npub struct F32 {\n\n value: u32,\n\n}\n\n\n\nimpl F32 {\n\n pub fn from_u32(value: u32) -> F32 {\n", "file_path": "src/soft_f32/mod.rs", "rank": 16, "score": 24256.012025395514 }, { "content": " f32_le(self.value, other.value)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_f32_add_with_struct() {\n\n let v0_1 = crate::soft_f32::F32::from_u32(0x3DCCCCCD);\n\n let v0_2 = crate::soft_f32::F32::from_u32(0x3E4CCCCD);\n\n\n\n let v0_3 = v0_1 + v0_2;\n\n\n\n assert_eq!(v0_3.value(), 0x3E99999A);\n\n }\n\n\n\n #[test]\n\n fn test_f32_sub_with_struct() {\n\n let v0_1 = crate::soft_f32::F32::from_u32(0x3DCCCCCD);\n\n let v0_2 = crate::soft_f32::F32::from_u32(0x3E4CCCCD);\n", "file_path": "src/soft_f32/mod.rs", "rank": 17, "score": 24254.63007921323 }, { "content": " F32 {\n\n value: value\n\n }\n\n }\n\n\n\n pub fn value(self) -> u32 {\n\n self.value\n\n }\n\n\n\n pub fn sqrt(self) -> Self {\n\n F32 {\n\n value: f32_sqrt(self.value)\n\n }\n\n }\n\n}\n\n\n\nuse std::ops;\n\n\n\nimpl ops::Add<F32> for F32 {\n\n type Output = F32;\n", "file_path": "src/soft_f32/mod.rs", "rank": 18, "score": 24254.387104786943 }, { "content": "\n\nmod soft_f32_add;\n\nmod soft_f32_sub;\n\nmod soft_f32_mul;\n\nmod soft_f32_div;\n\nmod soft_f32_comp;\n\nmod soft_f32_sqrt;\n\nmod soft_f32_round;\n\nmod util;\n\n\n\n// Operations\n\nuse soft_f32_add::f32_add;\n\nuse soft_f32_sub::f32_sub;\n\nuse soft_f32_mul::f32_mul;\n\nuse soft_f32_div::f32_div;\n\n\n\n// Comparisons\n\nuse soft_f32_comp::f32_eq;\n\nuse soft_f32_comp::f32_ne;\n\nuse soft_f32_comp::f32_lt;\n", "file_path": "src/soft_f32/mod.rs", "rank": 19, "score": 24253.931433339054 }, { "content": "\n\n let v0_1_result = v0_2 - v0_1;\n\n\n\n assert_eq!(v0_1_result.value(), 0x3DCCCCCD);\n\n }\n\n\n\n #[test]\n\n fn test_f32_mul_with_struct() {\n\n let v0_1 = crate::soft_f32::F32::from_u32(0x3DCCCCCD);\n\n let v0_2 = crate::soft_f32::F32::from_u32(0x3E4CCCCD);\n\n\n\n let v0_02 = v0_2 * v0_1;\n\n\n\n assert_eq!(v0_02.value(), 0x3CA3D70B);\n\n }\n\n\n\n #[test]\n\n fn test_f32_div_with_struct() {\n\n let v0_1 = crate::soft_f32::F32::from_u32(0x3DCCCCCD);\n\n let v0_2 = crate::soft_f32::F32::from_u32(0x3E4CCCCD);\n", "file_path": "src/soft_f32/mod.rs", "rank": 20, "score": 24252.919434451647 }, { "content": " assert_eq!(v0_1 >= v0_2, false);\n\n assert_eq!(v0_1 > v0_1, false);\n\n assert_eq!(v0_1 >= v0_1, true);\n\n }\n\n\n\n #[test]\n\n fn test_f32_sqrt_with_struct() {\n\n // sqrt(0.01) = 0.1\n\n let v0_01 = crate::soft_f32::F32::from_u32(0x3C23D70A);\n\n\n\n let v0_1 = v0_01.sqrt();\n\n\n\n assert_eq!(v0_1.value(), 0x3DCCCCCD);\n\n }\n\n}\n", "file_path": "src/soft_f32/mod.rs", "rank": 21, "score": 24251.678465025605 }, { "content": "\n\n let v0_5 = v0_1 / v0_2;\n\n\n\n assert_eq!(v0_5.value(), 0x3F000000);\n\n }\n\n\n\n #[test]\n\n fn test_f32_compare_with_struct() {\n\n let v0_1 = crate::soft_f32::F32::from_u32(0x3DCCCCCD);\n\n let v0_2 = crate::soft_f32::F32::from_u32(0x3E4CCCCD);\n\n\n\n assert_eq!(v0_1 == v0_2, false);\n\n assert_eq!(v0_1 != v0_2, true);\n\n\n\n assert_eq!(v0_1 < v0_2, true);\n\n assert_eq!(v0_1 <= v0_2, true);\n\n assert_eq!(v0_1 < v0_1, false);\n\n assert_eq!(v0_1 <= v0_1, true);\n\n\n\n assert_eq!(v0_1 > v0_2, false);\n", "file_path": "src/soft_f32/mod.rs", "rank": 22, "score": 24251.47018407326 }, { "content": "\n\n fn mul(self, other: Self) -> Self {\n\n F32 {\n\n value: crate::soft_f32::soft_f32_mul::f32_mul(self.value, other.value)\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Div<F32> for F32 {\n\n type Output = Self;\n\n\n\n fn div(self, other: Self) -> Self {\n\n F32 {\n\n value: crate::soft_f32::soft_f32_div::f32_div(self.value, other.value)\n\n }\n\n }\n\n}\n\n\n\nuse std::cmp;\n\n\n", "file_path": "src/soft_f32/mod.rs", "rank": 23, "score": 24249.40005865094 }, { "content": "impl cmp::PartialEq for F32 {\n\n // Implement equal with only symmetric and transitive for F32.\n\n //\n\n // Ref: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html\n\n // For example, in floating point numbers NaN != NaN,\n\n // so floating point types implement PartialEq but not Eq.\n\n fn eq(&self, other: &Self) -> bool {\n\n f32_eq(self.value, other.value)\n\n }\n\n\n\n fn ne(&self, other: &Self) -> bool {\n\n f32_ne(self.value, other.value)\n\n }\n\n}\n\n\n\nimpl cmp::PartialOrd for F32 {\n\n // Implement compare with only symmetric and transitive for F32.\n\n //\n\n // Ref: https://doc.rust-lang.org/std/cmp/trait.PartialOrd.html\n\n fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {\n", "file_path": "src/soft_f32/mod.rs", "rank": 24, "score": 24247.839550169436 }, { "content": " if f32_eq(self.value, other.value) { return Some(cmp::Ordering::Equal); }\n\n if f32_lt(self.value, other.value) { return Some(cmp::Ordering::Less); }\n\n if f32_gt(self.value, other.value) { return Some(cmp::Ordering::Greater); }\n\n\n\n None\n\n }\n\n\n\n fn gt(&self, other: &Self) -> bool {\n\n f32_gt(self.value, other.value)\n\n }\n\n\n\n fn lt(&self, other: &Self) -> bool {\n\n f32_lt(self.value, other.value)\n\n }\n\n\n\n fn ge(&self, other: &Self) -> bool {\n\n f32_ge(self.value, other.value)\n\n }\n\n\n\n fn le(&self, other: &Self) -> bool {\n", "file_path": "src/soft_f32/mod.rs", "rank": 25, "score": 24247.839550169436 }, { "content": "\n\n fn add(self, other: F32) -> F32 {\n\n F32 {\n\n value: crate::soft_f32::soft_f32_add::f32_add(self.value, other.value)\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Sub<F32> for F32 {\n\n type Output = Self;\n\n\n\n fn sub(self, other: Self) -> Self {\n\n F32 {\n\n value: crate::soft_f32::soft_f32_sub::f32_sub(self.value, other.value)\n\n }\n\n }\n\n}\n\n\n\nimpl ops::Mul<F32> for F32 {\n\n type Output = Self;\n", "file_path": "src/soft_f32/mod.rs", "rank": 26, "score": 24247.839550169436 }, { "content": " let mut frac = f32_frac(a);\n\n\n\n frac |= 0x800000;\n\n\n\n if (exp < 0x7F) {\n\n // It must be a zero, because it is too tiny\n\n return 0;\n\n }\n\n\n\n let shift = exp - 0x7F;\n\n if (sign == 0) {\n\n return frac >> (23 - shift);\n\n } else {\n\n return -!((frac >> (23 - shift)) - 1);\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 27, "score": 6.584390983109742 }, { "content": "\n\npub(crate) fn f32_shift_right_jam(a: i32, dist: i32) -> i32 {\n\n if dist < 31 {\n\n if (a << (-dist & 31)) != 0 {\n\n (a >> dist) | 1\n\n } else {\n\n (a >> dist) | 0\n\n }\n\n } else {\n\n if a != 0 {\n\n 1\n\n } else {\n\n 0\n\n }\n\n }\n\n}\n\n\n\nuse crate::soft_float::{ RoundingMode, DetectTininess };\n\n\n\npub(crate) fn f32_sign(a: u32) -> i32 {\n", "file_path": "src/soft_f32/util.rs", "rank": 28, "score": 6.29165210724212 }, { "content": "\n\nmod soft_float;\n\npub mod soft_f32;\n", "file_path": "src/lib.rs", "rank": 29, "score": 6.104391920742319 }, { "content": " ((a >> 31) & 0x01) as i32\n\n}\n\n\n\npub(crate) fn f32_exp(a: u32) -> i32 {\n\n ((a >> 23) & 0x0FF) as i32\n\n}\n\n\n\npub(crate) fn f32_frac(a: u32) -> i32 {\n\n (a & 0x7FFFFF) as i32\n\n}\n\n\n\npub(crate) fn f32_frac_old(a: u32) -> i32 {\n\n ((a & 0x7FFFFF) | (0x01 << 23)) as i32\n\n}\n\n\n\npub(crate) fn f32_pack_raw(in_sign: i32, in_exp: i32, in_frac: i32) -> u32 {\n\n // FIXME: why + is not equivalent to |\n\n ((in_sign << 31) | (in_exp << 23) + in_frac) as u32\n\n}\n\n\n", "file_path": "src/soft_f32/util.rs", "rank": 30, "score": 5.806471390092008 }, { "content": " let mut e_sqr_r0: u32 = r0 as u32 * r0 as u32;\n\n\n\n if odd_exp == 0 {\n\n e_sqr_r0 <<= 1;\n\n }\n\n\n\n let delta0 = !(((e_sqr_r0 as u64 * a as u64)>>23) as u32);\n\n\n\n let mut r: u32 = ((r0 as u32) << 16) + (((r0 as u64) * delta0 as u64) >> 25) as u32;\n\n\n\n let sqr_delta0 = (((delta0 as u64) * (delta0 as u64)) >> 32) & 0xFFFFFFFF;\n\n\n\n let r_temp_left_u64 = ((r >> 1) as u64) + ((r >> 3) as u64) - ((r0 as u64) << 14);\n\n let r_temp: u64 = (r_temp_left_u64 * (sqr_delta0 as u64));\n\n r += ((r_temp >> 48) & 0xFFFFFFFF) as u32;\n\n\n\n if r & 0x80000000 == 0 {\n\n r = 0x80000000;\n\n }\n\n\n\n r\n\n}\n", "file_path": "src/soft_f32/util.rs", "rank": 31, "score": 5.321726702939805 }, { "content": "# Software-emulated FPU in Rust\n\n\n\nUse 32-bit unsigned integer to represent Float32 in [IEEE-754](https://en.wikipedia.org/wiki/IEEE_754) and do float calculation.\n\n\n\n# Usage\n\n\n\nThere is an operator-trait-based API:\n\n\n\n## Add\n\n\n\n```rust\n\nlet v0_1 = soft_f32::F32::from_u32(0x3DCCCCCD); // 0.1\n\nlet v0_2 = soft_f32::F32::from_u32(0x3E4CCCCD); // 0.2\n\n\n\nlet v0_3 = v0_1 + v0_2; // 0.1 + 0.2\n\n\n\nassert_eq!(v0_3.value(), 0x3E99999A);\n\n```\n\n\n\nand a procedure-style API:\n\n\n\n```rust\n\nlet v0_1 = 0x3DCCCCCD; // 0.1\n\nlet v0_2 = 0x3E4CCCCD; // 0.2\n\n\n\nlet v0_3 = soft_f32::f32_add(v0_1, v0_2); // 0.1 + 0.2\n\n\n\nassert_eq!(v0_3, 0x3E99999A);\n\n```\n\n\n\n## Subtract\n\n\n\n```rust\n\nlet v0_1 = soft_f32::F32::from_u32(0x3DCCCCCD); // 0.1\n\nlet v0_2 = soft_f32::F32::from_u32(0x3E4CCCCD); // 0.2\n\n\n\nlet v0_1_result = v0_2 - v0_1; // 0.2 - 0.1\n\n\n\nassert_eq!(v0_1_result.value(), 0x3DCCCCCD);\n\n```\n\n\n\n## Multiply\n\n\n\n```rust\n\nlet v0_1 = soft_f32::F32::from_u32(0x3DCCCCCD); // 0.1\n\nlet v0_2 = soft_f32::F32::from_u32(0x3E4CCCCD); // 0.2\n\n\n\nlet v0_02 = v0_2 * v0_1; // 0.2 * 0.1\n\n\n\nassert_eq!(v0_02.value(), 0x3CA3D70B);\n\n```\n\n\n\n## Division\n\n\n\n```rust\n\nlet v0_1 = soft_f32::F32::from_u32(0x3DCCCCCD); // 0.1\n\nlet v0_2 = soft_f32::F32::from_u32(0x3E4CCCCD); // 0.2\n\n\n\nlet v0_5 = v0_1 / v0_2; // 0.1 / 0.2\n\n\n\nassert_eq!(v0_5.value(), 0x3F000000);\n\n```\n\n\n\n## Squared-root\n\n\n\n```rust\n\nlet v0_01 = crate::soft_f32::F32::from_u32(0x3C23D70A); // 0.01\n\n\n\nlet v0_1 = v0_01.sqrt(); // sqrt(0.01)\n\n\n\nassert_eq!(v0_1.value(), 0x3DCCCCCD);\n\n```\n\n\n\n## Comparison\n\n\n\n```rust\n\nlet v0_1 = 0x3DCCCCCD; // 0.1\n\nlet v0_2 = 0x3E4CCCCD; // 0.2\n\n\n\nassert_eq!(v0_1 < v0_2, true);\n\nassert_eq!(v0_1 <= v0_2, true);\n\nassert_eq!(v0_1 < v0_1, false);\n\nassert_eq!(v0_1 <= v0_1, true);\n\nassert_eq!(v0_1 == v0_1, true);\n\nassert_eq!(v0_1 != v0_1, false);\n\n```\n\n\n\n# Development\n\n\n\nCurrently only aiming at implementing Float32.\n\n\n\n## TODOs\n\n\n\n- [ ] Publish on crate.io\n\n- [ ] Float32 Log2 (v0.2.X)\n\n- [ ] Float32 Exp (v0.2.X)\n\n- [ ] Float32 Sin, Cos (v0.2.X)\n\n- [ ] Float80 (v0.3.X)\n\n\n\n# Conclusion\n\n\n\nIt is a by-product of one of my projects to bring a float number subsystem into a PL that is not capable of handling float numbers. And finally, implement a more complex system.\n", "file_path": "README.md", "rank": 32, "score": 5.288761709639278 }, { "content": " a_frac = frac;\n\n }\n\n\n\n r_exp = a_exp - b_exp + 0x7E;\n\n a_frac |= 0x00800000;\n\n b_frac |= 0x00800000;\n\n\n\n // Use u64 to divide u32\n\n let mut a_frac_u64 = a_frac as u64;\n\n if a_frac < b_frac {\n\n r_exp -= 1;\n\n a_frac_u64 <<= 31;\n\n } else {\n\n a_frac_u64 <<= 30;\n\n }\n\n let mut r_frac_u64 = a_frac_u64 / (b_frac as u64);\n\n\n\n if (r_frac_u64 & 0x3F) == 0 {\n\n if r_frac_u64 * (b_frac as u64) != a_frac_u64 {\n\n r_frac_u64 = r_frac_u64 | 0x01;\n", "file_path": "src/soft_f32/soft_f32_div.rs", "rank": 33, "score": 4.963080340758205 }, { "content": "pub(crate) fn f32_norm_subnormal_frac(frac: i32) -> (i32, i32) {\n\n let shift_count = f32_count_leading_zero(frac) - 8;\n\n\n\n (1 - shift_count, frac << shift_count)\n\n}\n\n\n\nuse std::convert::TryFrom;\n\n\n\npub(crate) fn f32_short_shift_right_jam64(a: u64, count: i32) -> i32 {\n\n let b: i32 = i32::try_from((a >> count) & 0xFFFFFFFF).unwrap();\n\n if (a & ((0x01 << count) - 1)) != 0 {\n\n return b | 1;\n\n }\n\n b\n\n}\n\n\n\npub(crate) fn f32_approx_recip(a: u32) -> u32 {\n\n let k0s: &[u64] = &[\n\n 0xFFC4, 0xF0BE, 0xE363, 0xD76F, 0xCCAD, 0xC2F0, 0xBA16, 0xB201,\n\n 0xAA97, 0xA3C6, 0x9D7A, 0x97A6, 0x923C, 0x8D32, 0x887E, 0x8417,\n", "file_path": "src/soft_f32/util.rs", "rank": 34, "score": 4.909161523348207 }, { "content": "\n\n let result = (r + (r * sqr_delta0) >> 48) as u32;\n\n\n\n result\n\n}\n\n\n\npub(crate) fn f32_approx_recip_sqrt(odd_exp: u32, a: u32) -> u32 {\n\n let k0s: &[u16] = &[\n\n 0xB4C9, 0xFFAB, 0xAA7D, 0xF11C, 0xA1C5, 0xE4C7, 0x9A43, 0xDA29,\n\n 0x93B5, 0xD0E5, 0x8DED, 0xC8B7, 0x88C6, 0xC16D, 0x8424, 0xBAE1,\n\n ];\n\n\n\n let k1s: &[u16] = &[\n\n 0xA5A5, 0xEA42, 0x8C21, 0xC62D, 0x788F, 0xAA7F, 0x6928, 0x94B6,\n\n 0x5CC7, 0x8335, 0x52A6, 0x74E2, 0x4A3E, 0x68FE, 0x432B, 0x5EFD,\n\n ];\n\n\n\n let index = (((a >> 27) & 0x0E) + odd_exp) as usize;\n\n let eps = (a >> 12) & 0x0000FFFF; // Only use the low 16 bits\n\n let r0: u16 = (k0s[index] - (((k1s[index] as u64 * eps as u64) >> 20) & 0xFFFF) as u16);\n", "file_path": "src/soft_f32/util.rs", "rank": 35, "score": 4.899691670688425 }, { "content": " }\n\n count + f32_count_leading_zeros_8[((frac >> 24) & 0xFF) as usize]\n\n}\n\n\n\npub(crate) fn f32_norm_round_and_pack(in_sign: i32, in_exp: i32, in_frac: i32) -> u32 {\n\n let shift_count = f32_count_leading_zero(in_frac) - 1;\n\n let mut sign = in_sign;\n\n let mut exp = in_exp - shift_count;\n\n let mut frac = in_frac;\n\n\n\n if exp < 0xFD && shift_count >= 7 {\n\n if frac == 0 {\n\n exp = 0;\n\n }\n\n return f32_pack_raw(sign, exp, frac << shift_count);\n\n } else {\n\n return f32_round_and_pack(sign, exp, frac << shift_count);\n\n }\n\n}\n\n\n", "file_path": "src/soft_f32/util.rs", "rank": 36, "score": 4.894776796860981 }, { "content": " let neg_rem = (r_shifted_frac as u64) * (r_shifted_frac as u64);\n\n r_frac &= (!0x03);\n\n\n\n if neg_rem & 0x80000000 != 0 {\n\n r_frac |= 0x01;\n\n } else {\n\n if neg_rem != 0 {\n\n r_frac -= 1;\n\n }\n\n }\n\n }\n\n\n\n f32_round_and_pack(0, r_exp, r_frac)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_f32_sqrt() {\n\n // sqrt(0.01) = 0.1\n", "file_path": "src/soft_f32/soft_f32_sqrt.rs", "rank": 37, "score": 4.827476566018401 }, { "content": " a_frac = frac;\n\n }\n\n\n\n r_exp = ((a_exp - 0x7F) >> 1) + 0x7E;\n\n a_exp &= 1;\n\n\n\n let a_frac_u32: u32 = ((a_frac | 0x00800000) as u32) << 8;\n\n let mut result = f32_approx_recip_sqrt(a_exp as u32, a_frac_u32);\n\n\n\n let r_frac_u64: u64 = (a_frac_u32 as u64) * (result as u64);\n\n let r_frac_u32 = (r_frac_u64>> 32) as u32;\n\n r_frac = r_frac_u32 as i32;\n\n\n\n if a_exp != 0 {\n\n r_frac >>= 1;\n\n }\n\n r_frac += 2;\n\n\n\n if (r_frac & 0x3F) < 2 {\n\n let r_shifted_frac = (r_frac >> 2) as u32;\n", "file_path": "src/soft_f32/soft_f32_sqrt.rs", "rank": 38, "score": 4.807180640989356 }, { "content": " }\n\n\n\n if a_sign != 0 {\n\n return a > b;\n\n } else {\n\n return a < b;\n\n }\n\n }\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_f32_eq() {\n\n // 0.3 == 0.2 - false\n\n assert_eq!(crate::soft_f32::f32_eq(0x3E99999A, 0x3E4CCCCD), false);\n\n\n\n // 0.2 == 0.3 - false\n\n assert_eq!(crate::soft_f32::f32_eq(0x3E4CCCCD, 0x3E99999A), false);\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 39, "score": 4.578777380891589 }, { "content": "\n\n if diff_exp == 0 {\n\n if a_exp == 0xFF {\n\n if (a_sign | b_sign) != 0 {\n\n // Propagate NaN\n\n return f32_propagate_nan(a, b);\n\n } else {\n\n // Return a NaN\n\n // FIXME: 0x7FC00000 is used in IBM IEEE, while 0xFFC00000 is used otherwise\n\n return f32_pack_raw(0, 0xFF, 0);\n\n }\n\n }\n\n\n\n if a_exp == 0 {\n\n a_exp = 1;\n\n b_exp = 1;\n\n }\n\n\n\n if a_frac > b_frac {\n\n // Fraction of A is greater\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 41, "score": 4.121301335943735 }, { "content": " }\n\n }\n\n\n\n r_frac = (r_frac_u64 & 0xFFFFFFFF) as i32;\n\n\n\n f32_round_and_pack(r_sign, r_exp, r_frac)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_f32_div() {\n\n // 0.1 / 0.2 = 0.5\n\n assert_eq!(crate::soft_f32::f32_div(0x3DCCCCCD, 0x3E4CCCCD), 0x3F000000);\n\n // -0.1 / -0.2 = 0.5\n\n assert_eq!(crate::soft_f32::f32_div(0xBDCCCCCD, 0xBE4CCCCD), 0x3F000000);\n\n\n\n // 12345 / 67890 = 8.381021E8\n\n assert_eq!(crate::soft_f32::f32_div(0x4640E400, 0x47849900), 0x3E3A33D0);\n\n // -12345 / -67890 = 8.381021E8\n", "file_path": "src/soft_f32/soft_f32_div.rs", "rank": 42, "score": 3.8709267498895636 }, { "content": " r_frac = b_frac - a_frac;\n\n }\n\n return f32_norm_round_and_pack(r_sign, r_exp - 1, r_frac);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_f32_sub() {\n\n // 0.3 - 0.2 = 0.1\n\n assert_eq!(crate::soft_f32::f32_sub(0x3E99999A, 0x3E4CCCCD), 0x3DCCCCCE);\n\n\n\n // 0.2 - 0.3 = -0.1\n\n assert_eq!(crate::soft_f32::f32_sub(0x3E4CCCCD, 0x3E99999A), 0xBDCCCCCE);\n\n\n\n // FIXME: 0.2 - -0.1 = 0.3\n\n // 0x3DCCCCCC\n\n // assert_eq!(crate::soft_f32::f32_sub(0x3E4CCCCD, 0xBDCCCCCE), 0x3E99999A);\n\n\n\n // FIXME: -0.2 - 0.1 = -0.3\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 43, "score": 3.818399747708396 }, { "content": " Min => round_increment = 0x7F,\n\n _ => round_increment = 0,\n\n }\n\n } else {\n\n match &rounding_mode {\n\n Max => round_increment = 0x7F,\n\n _ => round_increment = 0,\n\n }\n\n }\n\n }\n\n }\n\n let mut round_bits = frac & 0x7F;\n\n\n\n if exp >= 0xFD { // FIXME: exponential value is wrongly detected here\n\n if exp < 0 {\n\n let is_tiny = (\n\n match detect_tininess {\n\n Before => true,\n\n _ => false,\n\n } || exp < -1 || (frac as u32) + (round_increment as u32) < 0x80000000\n", "file_path": "src/soft_f32/util.rs", "rank": 44, "score": 3.7136615162146773 }, { "content": " r_frac <<= 1;\n\n }\n\n\n\n f32_round_and_pack(r_sign, r_exp, r_frac)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn test_f32_mul() {\n\n // 0.1 x 0.2 = 0.02\n\n assert_eq!(crate::soft_f32::f32_mul(0x3DCCCCCD, 0x3E4CCCCD), 0x3CA3D70B);\n\n // -0.1 x -0.2 = 0.02\n\n assert_eq!(crate::soft_f32::f32_mul(0xBDCCCCCD, 0xBE4CCCCD), 0x3CA3D70B);\n\n\n\n // 12345 x 67890 = 8.381021E8\n\n assert_eq!(crate::soft_f32::f32_mul(0x4640E400, 0x47849900), 0x4E47D1B1);\n\n // -12345 + -67890 = 8.381021E8\n\n assert_eq!(crate::soft_f32::f32_mul(0xC640E400, 0xC7849900), 0x4E47D1B1);\n\n\n", "file_path": "src/soft_f32/soft_f32_mul.rs", "rank": 45, "score": 3.623073027364049 }, { "content": " return z;\n\n }\n\n if (0x96 <= a_exp) {\n\n if (a_exp == 0xFF && a_frac != 0) {\n\n return f32_propagate_nan(a, 0);\n\n }\n\n return a;\n\n }\n\n let last_bit_mask = (1 << (0x96 - a_exp));\n\n let round_bits_mask = last_bit_mask - 1;\n\n // By default use near even round mode\n\n let mut r = a;\n\n r += (last_bit_mask >> 1);\n\n if ((r & round_bits_mask) == 0) {\n\n r &= (!last_bit_mask);\n\n }\n\n r &= (!round_bits_mask);\n\n return r;\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 48, "score": 3.2613888204494454 }, { "content": "\n\n let (exp, frac) = f32_norm_subnormal_frac(b_frac);\n\n b_exp = exp;\n\n b_frac = frac;\n\n }\n\n\n\n r_exp = a_exp + b_exp - 0x7F;\n\n\n\n a_frac = (a_frac | 0x00800000) << 7;\n\n b_frac = (b_frac | 0x00800000) << 8;\n\n\n\n let a_frac_u32: u32 = a_frac as u32;\n\n let b_frac_u32: u32 = b_frac as u32;\n\n let a_frac_u64: u64 = a_frac_u32 as u64;\n\n let b_frac_u64: u64 = b_frac_u32 as u64;\n\n let frac_prod: u64 = a_frac_u64 * b_frac_u64;\n\n r_frac = f32_short_shift_right_jam64(frac_prod, 32);\n\n\n\n if r_frac < 0x40000000 {\n\n r_exp -= 1;\n", "file_path": "src/soft_f32/soft_f32_mul.rs", "rank": 49, "score": 3.1593538246554207 }, { "content": " ];\n\n\n\n let k1s: &[u64] = &[\n\n 0xF0F1, 0xD62C, 0xBFA1, 0xAC77, 0x9C0A, 0x8DDB, 0x8185, 0x76BA,\n\n 0x6D3B, 0x64D4, 0x5D5C, 0x56B1, 0x50B6, 0x4B55, 0x4679, 0x4211,\n\n ];\n\n\n\n let a_u64 = a as u64;\n\n\n\n let index = ((a >> 27) & 0x0F) as usize;\n\n\n\n let eps = (a >> 11) as u64;\n\n\n\n let r0: u64 = k0s[index] - ((k1s[index] * eps) >> 20);\n\n \n\n let delta0: u32 = ((r0 * a_u64) >> 7) as u32;\n\n\n\n let r: u64 = (r0 << 16) + ((r0 * delta0 as u64) >> 24);\n\n\n\n let sqr_delta0 = (delta0 as u64 * delta0 as u64) >> 32;\n", "file_path": "src/soft_f32/util.rs", "rank": 50, "score": 3.148587978024583 }, { "content": " 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n\n ];\n\n\n\n let mut count = 0;\n\n let mut frac = in_frac;\n\n if frac < 0x10000 {\n\n count = 16;\n\n frac <<= 16;\n\n }\n\n if frac < 0x1000000 {\n\n count += 8;\n\n frac <<= 8;\n", "file_path": "src/soft_f32/util.rs", "rank": 51, "score": 3.1191699657354905 }, { "content": "pub enum RoundingMode {\n\n NearEven,\n\n MinMag,\n\n Min,\n\n Max,\n\n NearMaxMag,\n\n Odd,\n\n}\n\n\n\npub enum DetectTininess {\n\n Before,\n\n After,\n\n}\n", "file_path": "src/soft_float.rs", "rank": 52, "score": 2.988733268731477 }, { "content": "use super::util::{\n\n f32_sign, f32_exp, f32_frac,\n\n f32_is_nan,\n\n};\n\n\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 53, "score": 2.466274338446018 }, { "content": " assert_eq!(crate::soft_f32::f32_sqrt(0x3C23D70A), 0x3DCCCCCD);\n\n // sqrt(4) = 2\n\n assert_eq!(crate::soft_f32::f32_sqrt(0x40800000), 0x40000000);\n\n\n\n // sqrt(0) = 0\n\n assert_eq!(crate::soft_f32::f32_sqrt(0x00), 0x00);\n\n // sqrt(-0) = -0\n\n assert_eq!(crate::soft_f32::f32_sqrt(0x80000000), 0x80000000);\n\n }\n\n\n\n #[test]\n\n fn test_f32_sqrt_inf_nan() {\n\n // TODO: add some tests\n\n }\n\n}\n\n\n", "file_path": "src/soft_f32/soft_f32_sqrt.rs", "rank": 54, "score": 2.445979925636375 }, { "content": " );\n\n frac = f32_shift_right_jam(frac, -exp);\n\n exp = 0;\n\n round_bits = frac & 0x7F;\n\n if is_tiny && round_bits != 0 {\n\n // Underflow\n\n }\n\n } else if exp > 0xFD || (frac as u32) + (round_increment as u32) >= 0x80000000 {\n\n // Overflow and inexact\n\n return (((sign << 31) | ((exp & 0x0FF) << 23) | (frac & 0x7fffff)) - ! round_increment) as u32\n\n }\n\n }\n\n\n\n frac = (frac + round_increment) >> 7;\n\n\n\n // if ( roundBits ) { softfloat_exceptionFlags |= softfloat_flag_inexact; }\n\n\n\n match &rounding_mode {\n\n NearEven => {\n\n if round_bits ^ 0x40 == 0 {\n", "file_path": "src/soft_f32/util.rs", "rank": 56, "score": 2.376793178146132 }, { "content": " #[test]\n\n fn test_f32_round() {\n\n // round(0.01) = 0\n\n assert_eq!(crate::soft_f32::f32_round(0x3C23D70A), 0x00000000);\n\n // round(4) = 4\n\n assert_eq!(crate::soft_f32::f32_round(0x40800000), 0x40800000);\n\n }\n\n\n\n #[test]\n\n fn test_f32_to_i32() {\n\n // round(0.01) = 0\n\n assert_eq!(crate::soft_f32::soft_f32_round::to_int32(0x3C23D70A), 0);\n\n // round(4) = 4\n\n assert_eq!(crate::soft_f32::soft_f32_round::to_int32(0x40800000), 4);\n\n // round(244.5) = 244\n\n assert_eq!(crate::soft_f32::soft_f32_round::to_int32(0x43748000), 244);\n\n // round(128.2) = 128\n\n assert_eq!(crate::soft_f32::soft_f32_round::to_int32(0x43003333), 128);\n\n\n\n // FIXME: round(0x4f000000) = 2147483647)\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 57, "score": 2.351814810703016 }, { "content": "use super::util::{\n\n f32_shift_right_jam,\n\n f32_norm_round_and_pack,\n\n f32_round_and_pack,\n\n f32_pack_raw, f32_pack,\n\n f32_propagate_nan,\n\n f32_sign, f32_exp, f32_frac,\n\n f32_norm_subnormal_frac,\n\n f32_short_shift_right_jam64,\n\n};\n\n\n\nuse crate::soft_f32::f32_sub;\n\n\n", "file_path": "src/soft_f32/soft_f32_mul.rs", "rank": 58, "score": 2.2016876516749133 }, { "content": " frac &= 0x7FFFFFFE;\n\n } else {\n\n frac &= 0x7FFFFFFF;\n\n }\n\n }\n\n }\n\n if frac == 0 { exp = 0; }\n\n\n\n f32_pack_raw(sign, exp, frac)\n\n}\n\n\n\npub(crate) fn f32_count_leading_zero(in_frac: i32) -> i32 {\n\n let f32_count_leading_zeros_8: &[i32] = &[\n\n 8, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4,\n\n 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n\n 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n\n 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", "file_path": "src/soft_f32/util.rs", "rank": 59, "score": 1.8954245278978639 }, { "content": "use super::util::{\n\n f32_shift_right_jam,\n\n f32_norm_round_and_pack,\n\n f32_round_and_pack,\n\n f32_pack_raw, f32_pack,\n\n f32_propagate_nan,\n\n f32_sign, f32_exp, f32_frac,\n\n};\n\n\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 60, "score": 1.8401571253117694 }, { "content": " if b_frac != 0 {\n\n // Propagate NaN\n\n return f32_propagate_nan(a, b);\n\n } else {\n\n // Return a NaN\n\n return f32_pack_raw(r_sign ^ 1, 0xFF, 0);\n\n }\n\n }\n\n\n\n if a_exp != 0 {\n\n a_frac += 0x40000000;\n\n } else {\n\n a_frac += a_frac;\n\n }\n\n\n\n a_frac = f32_shift_right_jam(a_frac, -diff_exp);\n\n b_frac |= 0x40000000;\n\n\n\n r_sign ^= 1;\n\n r_exp = b_exp;\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 61, "score": 1.798786515950551 }, { "content": "\n\n // -Inf < 0.004 - true\n\n assert_eq!(crate::soft_f32::f32_lt(0xFF800000, 0x3B83126F), true);\n\n\n\n // -Inf < Inf - true\n\n assert_eq!(crate::soft_f32::f32_lt(0xFF800000, 0x7F800000), true);\n\n\n\n // -Inf < Nan - false\n\n assert_eq!(crate::soft_f32::f32_lt(0xFF800000, 0xFFFFFFFF), false);\n\n\n\n // Inf < Nan - false\n\n assert_eq!(crate::soft_f32::f32_lt(0x7F800000, 0xFFFFFFFF), false);\n\n\n\n // Nan < Nan - false\n\n assert_eq!(crate::soft_f32::f32_lt(0xFFFFFFFF, 0xFFFFFFFF), false);\n\n }\n\n\n\n #[test]\n\n fn test_f32_le() {\n\n // 0.3 <= 0.2 - false\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 62, "score": 1.722236171570541 }, { "content": " r_exp = a_exp;\n\n r_frac = a_frac;\n\n }\n\n return f32_pack_raw(r_sign, r_exp, r_frac);\n\n }\n\n\n\n if b_exp != 0 {\n\n b_frac += 0x40000000;\n\n } else {\n\n b_frac += b_frac;\n\n }\n\n\n\n b_frac = f32_shift_right_jam(b_frac, diff_exp);\n\n a_frac |= 0x40000000; // Add the implicit 1\n\n\n\n r_exp = a_exp;\n\n r_frac = a_frac - b_frac;\n\n } else {\n\n // Exp of B is greater\n\n if b_exp == 0xFF {\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 63, "score": 1.693234551154772 }, { "content": "use super::util::{\n\n f32_shift_right_jam,\n\n f32_norm_round_and_pack,\n\n f32_round_and_pack,\n\n f32_pack_raw, f32_pack,\n\n f32_propagate_nan,\n\n f32_sign, f32_exp, f32_frac,\n\n f32_is_nan,\n\n f32_count_leading_zero,\n\n};\n\n\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 64, "score": 1.6888345596769945 }, { "content": " assert_eq!(crate::soft_f32::f32_eq(0xFF800000, 0x7F800000), false);\n\n\n\n // -Inf == Nan - false\n\n assert_eq!(crate::soft_f32::f32_eq(0xFF800000, 0xFFFFFFFF), false);\n\n\n\n // Inf == Nan - false\n\n assert_eq!(crate::soft_f32::f32_eq(0x7F800000, 0xFFFFFFFF), false);\n\n\n\n // Nan == Nan - false\n\n assert_eq!(crate::soft_f32::f32_eq(0xFFFFFFFF, 0xFFFFFFFF), false);\n\n\n\n // -Inf == -Inf - true\n\n assert_eq!(crate::soft_f32::f32_eq(0xFF800000, 0xFF800000), true);\n\n\n\n // Inf == Inf - true\n\n assert_eq!(crate::soft_f32::f32_eq(0x7F800000, 0x7F800000), true);\n\n }\n\n\n\n #[test]\n\n fn test_f32_lt() {\n", "file_path": "src/soft_f32/soft_f32_comp.rs", "rank": 65, "score": 1.6088822935264635 }, { "content": " // -0.1 x 0.2 = -0.02\n\n assert_eq!(crate::soft_f32::f32_mul(0xBDCCCCCD, 0x3E4CCCCD), 0xBCA3D70B);\n\n // 0.1 x -0.2 = -0.02\n\n assert_eq!(crate::soft_f32::f32_mul(0x3DCCCCCD, 0xBE4CCCCD), 0xBCA3D70B);\n\n }\n\n\n\n #[test]\n\n fn test_f32_mul_inf_nan() {\n\n // Inf x 1 = Inf\n\n assert_eq!(crate::soft_f32::f32_mul(0x7F800000, 0x3F800000), 0x7F800000);\n\n\n\n // -Inf x 1 = -Inf\n\n assert_eq!(crate::soft_f32::f32_mul(0xFF800000, 0x3F800000), 0xFF800000);\n\n\n\n // -Inf x Inf = -Inf\n\n assert_eq!(crate::soft_f32::f32_mul(0xFF800000, 0x7F800000), 0xFF800000);\n\n\n\n // Inf x -1 = -Inf\n\n assert_eq!(crate::soft_f32::f32_mul(0x7F800000, 0xBF800000), 0xFF800000);\n\n\n", "file_path": "src/soft_f32/soft_f32_mul.rs", "rank": 66, "score": 1.59836221742684 }, { "content": " // assert_eq!(crate::soft_f32::soft_f32_round::to_int32(0x4F000000), std::i32::MAX);\n\n // FIXME: round(0xcf000000) = -2147483648)\n\n // assert_eq!(crate::soft_f32::soft_f32_round::to_int32(0xCF000000), std::i32::MIN);\n\n }\n\n\n\n #[test]\n\n fn test_i32_to_f32() {\n\n // from_int32(0) = 0.0 \n\n assert_eq!(crate::soft_f32::soft_f32_round::from_int32(0), 0x0);\n\n\n\n // from_int32(4) = 4.0\n\n assert_eq!(crate::soft_f32::soft_f32_round::from_int32(4), 0x40800000);\n\n\n\n // from_int32(-4) = -4.0\n\n assert_eq!(crate::soft_f32::soft_f32_round::from_int32(-4), 0xC0800000);\n\n\n\n // from_int32(80235) = 0x479CB580\n\n assert_eq!(crate::soft_f32::soft_f32_round::from_int32(80235), 0x479CB580);\n\n\n\n // from_int32(80235) = 0x479CB580\n", "file_path": "src/soft_f32/soft_f32_round.rs", "rank": 67, "score": 1.59836221742684 }, { "content": " r_exp = a_exp;\n\n r_frac = a_frac - b_frac;\n\n } else if b_frac > a_frac {\n\n // Fraction of B is greater\n\n r_sign ^= 1;\n\n r_exp = b_exp;\n\n r_frac = b_frac - a_frac;\n\n } else {\n\n // Same, will cause a 0\n\n return f32_pack(0, 0, 0);\n\n }\n\n return f32_norm_round_and_pack(r_sign, r_exp - 1, r_frac);\n\n } else if diff_exp > 0 {\n\n // Exp of A is greater\n\n if a_exp == 0xFF {\n\n if a_frac != 0 {\n\n // Propagate NaN\n\n return f32_propagate_nan(a, b);\n\n } else {\n\n r_sign = a_sign;\n", "file_path": "src/soft_f32/soft_f32_sub.rs", "rank": 68, "score": 1.5886660861932467 }, { "content": " assert_eq!(crate::soft_f32::f32_div(0xC640E400, 0xC7849900), 0x3E3A33D0);\n\n\n\n // -0.1 / 0.2 = -0.5\n\n assert_eq!(crate::soft_f32::f32_div(0xBDCCCCCD, 0x3E4CCCCD), 0xBF000000);\n\n // 0.1 / -0.2 = -0.5\n\n assert_eq!(crate::soft_f32::f32_div(0x3DCCCCCD, 0xBE4CCCCD), 0xBF000000);\n\n }\n\n\n\n #[test]\n\n fn test_f32_div_inf_nan() {\n\n // 1 / 0 = Inf\n\n assert_eq!(crate::soft_f32::f32_div(0x3F800000, 0x00000000), 0x7F800000);\n\n\n\n // 1 / -0 = -Inf\n\n assert_eq!(crate::soft_f32::f32_div(0x3F800000, 0x80000000), 0xFF800000);\n\n\n\n // Inf / 1 = Inf\n\n assert_eq!(crate::soft_f32::f32_div(0x7F800000, 0x3F800000), 0x7F800000);\n\n\n\n // -Inf / 1 = -Inf\n", "file_path": "src/soft_f32/soft_f32_div.rs", "rank": 69, "score": 1.587978823850619 }, { "content": "use super::util::{\n\n f32_shift_right_jam,\n\n f32_norm_round_and_pack,\n\n f32_round_and_pack,\n\n f32_pack_raw, f32_pack,\n\n f32_propagate_nan,\n\n f32_sign, f32_exp, f32_frac,\n\n f32_norm_subnormal_frac,\n\n f32_short_shift_right_jam64,\n\n f32_approx_recip,\n\n};\n\n\n\n\n", "file_path": "src/soft_f32/soft_f32_div.rs", "rank": 70, "score": 1.5605084815026635 }, { "content": "use super::util::{\n\n f32_shift_right_jam,\n\n f32_norm_round_and_pack,\n\n f32_round_and_pack,\n\n f32_pack_raw, f32_pack,\n\n f32_propagate_nan,\n\n f32_sign, f32_exp, f32_frac,\n\n f32_norm_subnormal_frac,\n\n f32_short_shift_right_jam64,\n\n f32_approx_recip_sqrt,\n\n f32_is_nan,\n\n};\n\n\n\n\n", "file_path": "src/soft_f32/soft_f32_sqrt.rs", "rank": 71, "score": 1.503390909427781 } ]
Rust
third_party/rust_crates/vendor/tokio-executor/src/park.rs
casey/fuchsia
2b965e9a1e8f2ea346db540f3611a5be16bb4d6b
use std::marker::PhantomData; use std::rc::Rc; use std::sync::Arc; use std::time::Duration; use crossbeam_utils::sync::{Parker, Unparker}; pub trait Park { type Unpark: Unpark; type Error; fn unpark(&self) -> Self::Unpark; fn park(&mut self) -> Result<(), Self::Error>; fn park_timeout(&mut self, duration: Duration) -> Result<(), Self::Error>; } pub trait Unpark: Sync + Send + 'static { fn unpark(&self); } impl Unpark for Box<dyn Unpark> { fn unpark(&self) { (**self).unpark() } } impl Unpark for Arc<dyn Unpark> { fn unpark(&self) { (**self).unpark() } } #[derive(Debug)] pub struct ParkThread { _anchor: PhantomData<Rc<()>>, } #[derive(Debug)] pub struct ParkError { _p: (), } #[derive(Clone, Debug)] pub struct UnparkThread { inner: Unparker, } thread_local! { static CURRENT_PARKER: Parker = Parker::new(); } impl ParkThread { pub fn new() -> ParkThread { ParkThread { _anchor: PhantomData, } } fn with_current<F, R>(&self, f: F) -> R where F: FnOnce(&Parker) -> R, { CURRENT_PARKER.with(|inner| f(inner)) } } impl Park for ParkThread { type Unpark = UnparkThread; type Error = ParkError; fn unpark(&self) -> Self::Unpark { let inner = self.with_current(|inner| inner.unparker().clone()); UnparkThread { inner } } fn park(&mut self) -> Result<(), Self::Error> { self.with_current(|inner| inner.park()); Ok(()) } fn park_timeout(&mut self, duration: Duration) -> Result<(), Self::Error> { self.with_current(|inner| inner.park_timeout(duration)); Ok(()) } } impl Unpark for UnparkThread { fn unpark(&self) { self.inner.unpark(); } }
use std::marker::PhantomData; use std::rc::Rc; use std::sync::Arc; use std::time::Duration; use crossbeam_utils::sync::{Parker, Unparker}; pub trait Park { type Unpark: Unpark; type Error; fn unpark(&self) -> Self::Unpark; fn park(&mut self) -> Result<(), Self::Error>; fn park_timeout(&mut self, duration: Duration) -> Result<(), Self::Error>; } pub trait Unpark: Sync + Send + 'static { fn unpark(&self); } impl Unpark for Box<dyn Unpark> { fn unpark(&self) { (**self).unpark() } } impl Unpark for Arc<dyn Unpark> { fn unpark(&self) { (**self).unpark() } } #[derive(Debug)] pub struct ParkThread { _anchor: PhantomData<Rc<()>>, } #[derive(Debug)] pub struct ParkError { _p: (), } #[derive(Clone, Debug)] pub struct UnparkThread { inner: Unparker, } thread_local! { static CURRENT_PARKER: Parker = Parker::new(); } impl ParkThread { pub fn new() -> ParkThread { ParkThread { _anchor: PhantomData, } }
} impl Park for ParkThread { type Unpark = UnparkThread; type Error = ParkError; fn unpark(&self) -> Self::Unpark { let inner = self.with_current(|inner| inner.unparker().clone()); UnparkThread { inner } } fn park(&mut self) -> Result<(), Self::Error> { self.with_current(|inner| inner.park()); Ok(()) } fn park_timeout(&mut self, duration: Duration) -> Result<(), Self::Error> { self.with_current(|inner| inner.park_timeout(duration)); Ok(()) } } impl Unpark for UnparkThread { fn unpark(&self) { self.inner.unpark(); } }
fn with_current<F, R>(&self, f: F) -> R where F: FnOnce(&Parker) -> R, { CURRENT_PARKER.with(|inner| f(inner)) }
function_block-function_prefix_line
[]
Rust
src/db.rs
lovesh/merkle_trees
0db6b68bbfb219d584a96d503e2d6e4e4c7147a6
use crate::errors::{MerkleTreeError, MerkleTreeErrorKind}; use num_bigint::BigUint; use std::collections::HashMap; use std::iter::FromIterator; pub trait HashValueDb<H, V: Clone> { fn put(&mut self, hash: H, value: V) -> Result<(), MerkleTreeError>; fn get(&self, hash: &H) -> Result<V, MerkleTreeError>; } #[derive(Clone, Debug)] pub struct InMemoryHashValueDb<V: Clone> { db: HashMap<Vec<u8>, V>, } impl<V: Clone> HashValueDb<Vec<u8>, V> for InMemoryHashValueDb<V> { fn put(&mut self, hash: Vec<u8>, value: V) -> Result<(), MerkleTreeError> { self.db.insert(hash, value); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<V, MerkleTreeError> { match self.db.get(hash) { Some(val) => Ok(val.clone()), None => Err(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), } .into()), } } } impl<T: Clone> InMemoryHashValueDb<T> { pub fn new() -> Self { let db = HashMap::<Vec<u8>, T>::new(); Self { db } } } #[derive(Clone, Debug)] pub struct InMemoryBigUintHashDb<V: Clone> { db: HashMap<Vec<u8>, V>, } impl<V: Clone> HashValueDb<BigUint, V> for InMemoryBigUintHashDb<V> { fn put(&mut self, hash: BigUint, value: V) -> Result<(), MerkleTreeError> { self.db.insert(hash.to_bytes_be(), value); Ok(()) } fn get(&self, hash: &BigUint) -> Result<V, MerkleTreeError> { let b = hash.to_bytes_be(); match self.db.get(&b) { Some(val) => Ok(val.clone()), None => Err(MerkleTreeErrorKind::HashNotFoundInDB { hash: b }.into()), } } } impl<T: Clone> InMemoryBigUintHashDb<T> { pub fn new() -> Self { let db = HashMap::<Vec<u8>, T>::new(); Self { db } } } #[cfg(test)] pub mod unqlite_db { use super::{HashValueDb, MerkleTreeError, MerkleTreeErrorKind}; extern crate unqlite; use unqlite::{Config, Cursor, UnQLite, KV}; pub struct UnqliteHashValueDb { db_name: String, db: UnQLite, } impl UnqliteHashValueDb { pub fn new(db_name: String) -> Self { let db = UnQLite::create(&db_name); Self { db_name, db } } } impl HashValueDb<Vec<u8>, Vec<u8>> for UnqliteHashValueDb { fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> { self.db.kv_store(&hash, &value).unwrap(); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> { self.db.kv_fetch(hash).map_err(|_| { MerkleTreeError::from_kind(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), }) }) } } } #[cfg(test)] pub mod rusqlite_db { use super::{HashValueDb, MerkleTreeError, MerkleTreeErrorKind}; extern crate rusqlite; use rusqlite::{params, Connection, NO_PARAMS}; pub struct RusqliteHashValueDb { db_path: String, pub table_name: String, pub db_conn: Connection, } impl RusqliteHashValueDb { pub fn new(db_path: String, table_name: String) -> Self { let db_conn = Connection::open(&db_path).unwrap(); let sql = format!( "create table if not exists {} (key string primary key, value blob not null)", table_name ); db_conn.execute(&sql, NO_PARAMS).unwrap(); Self { db_path, table_name, db_conn, } } pub fn hash_to_hex(hash: &Vec<u8>) -> String { format!("{:x?}", hash) .replace(", ", "") .replace("[", "") .replace("]", "") } } impl HashValueDb<Vec<u8>, Vec<u8>> for RusqliteHashValueDb { fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> { let hash_hex = Self::hash_to_hex(&hash); let sql = format!( "insert into {} (key, value) values (?1, ?2)", self.table_name ); self.db_conn .execute(&sql, params![hash_hex, value]) .unwrap(); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> { let sql = format!( "select value from {} where key='{}'", self.table_name, Self::hash_to_hex(hash) ); self.db_conn .query_row(&sql, NO_PARAMS, |row| row.get(0)) .map_err(|_| { MerkleTreeError::from_kind(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), }) }) } } } #[cfg(test)] pub mod sled_db { use super::{HashValueDb, MerkleTreeError, MerkleTreeErrorKind}; extern crate sled; use self::sled::{Config, Db}; use crate::sha2::{Digest, Sha256}; use std::marker::PhantomData; pub struct SledHashDb { config: Config, db: Db, } impl SledHashDb { pub fn new() -> Self { let config = Config::new().temporary(true); let db = config.open().unwrap(); Self { config, db } } } impl HashValueDb<Vec<u8>, Vec<u8>> for SledHashDb { fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> { self.db.insert(hash, value); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> { match self.db.get(hash) { Ok(Some(ivec)) => Ok(ivec.to_vec()), _ => Err(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), } .into()), } } } } #[cfg(test)] mod tests { use super::*; use crate::sha2::{Digest, Sha256}; use std::fs; fn check_db_put_get(db: &mut HashValueDb<Vec<u8>, Vec<u8>>) { let data_1 = "Hello world!".as_bytes().to_vec(); let mut hasher = Sha256::new(); hasher.input(&data_1); let hash_1 = hasher.result().to_vec(); db.put(hash_1.clone(), data_1.clone()).unwrap(); assert_eq!(db.get(&hash_1).unwrap(), data_1); let data_2 = "Byte!".as_bytes().to_vec(); let mut hasher = Sha256::new(); hasher.input(&data_2); let hash_2 = hasher.result().to_vec(); assert!(db.get(&hash_2).is_err()); db.put(hash_2.clone(), data_2.clone()).unwrap(); assert_eq!(db.get(&hash_2).unwrap(), data_2); } #[test] fn test_in_memory_db_string_val() { let mut db = InMemoryHashValueDb::<String>::new(); let data_1 = String::from("Hello world!"); let mut hasher = Sha256::new(); hasher.input(data_1.as_bytes()); let hash_1 = hasher.result().to_vec(); db.put(hash_1.clone(), data_1.clone()).unwrap(); assert_eq!(db.get(&hash_1).unwrap(), data_1); let data_2 = String::from("Byte!"); let mut hasher = Sha256::new(); hasher.input(data_2.as_bytes()); let hash_2 = hasher.result().to_vec(); assert!(db.get(&hash_2).is_err()); db.put(hash_2.clone(), data_2.clone()).unwrap(); assert_eq!(db.get(&hash_2).unwrap(), data_2); } #[test] fn test_unqlite_db_string_val() { let db_name = "unqlite_test.db"; fs::remove_file(db_name); let mut db = unqlite_db::UnqliteHashValueDb::new(String::from(db_name)); check_db_put_get(&mut db); } #[test] fn test_rusqlite_db_string_val() { let db_path = "./rusqlite_test.db"; fs::remove_file(db_path); let mut db = rusqlite_db::RusqliteHashValueDb::new(String::from(db_path), String::from("kv_table")); check_db_put_get(&mut db); } #[test] fn test_sled_db_string_val() { let mut db = sled_db::SledHashDb::new(); check_db_put_get(&mut db); } }
use crate::errors::{MerkleTreeError, MerkleTreeErrorKind}; use num_bigint::BigUint; use std::collections::HashMap; use std::iter::FromIterator; pub trait HashValueDb<H, V: Clone> { fn put(&mut self, hash: H, value: V) -> Result<(), MerkleTreeError>; fn get(&self, hash: &H) -> Result<V, MerkleTreeError>; } #[derive(Clone, Debug)] pub struct InMemoryHashValueDb<V: Clone> { db: HashMap<Vec<u8>, V>, } impl<V: Clone> HashValueDb<Vec<u8>, V> for InMemoryHashValueDb<V> { fn put(&mut self, hash: Vec<u8>, value: V) -> Result<(), MerkleTreeError> { self.db.insert(hash, value); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<V, MerkleTreeError> { match self.db.get(hash) { Some(val) => Ok(val.clone()), None => Err(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), } .into()), } } } impl<T: Clone> InMemoryHashValueDb<T> { pub fn new() -> Self { let db = HashMap::<Vec<u8>, T>::new(); Self { db } } } #[derive(Clone, Debug)] pub struct InMemoryBigUintHashDb<V: Clone> { db: HashMap<Vec<u8>, V>, } impl<V: Clone> HashValueDb<BigUint, V> for InMemoryBigUintHashDb<V> { fn put(&mut self, hash: BigUint, value: V) -> Result<(), MerkleTreeError> { self.db.insert(hash.to_bytes_be(), value); Ok(()) }
} impl<T: Clone> InMemoryBigUintHashDb<T> { pub fn new() -> Self { let db = HashMap::<Vec<u8>, T>::new(); Self { db } } } #[cfg(test)] pub mod unqlite_db { use super::{HashValueDb, MerkleTreeError, MerkleTreeErrorKind}; extern crate unqlite; use unqlite::{Config, Cursor, UnQLite, KV}; pub struct UnqliteHashValueDb { db_name: String, db: UnQLite, } impl UnqliteHashValueDb { pub fn new(db_name: String) -> Self { let db = UnQLite::create(&db_name); Self { db_name, db } } } impl HashValueDb<Vec<u8>, Vec<u8>> for UnqliteHashValueDb { fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> { self.db.kv_store(&hash, &value).unwrap(); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> { self.db.kv_fetch(hash).map_err(|_| { MerkleTreeError::from_kind(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), }) }) } } } #[cfg(test)] pub mod rusqlite_db { use super::{HashValueDb, MerkleTreeError, MerkleTreeErrorKind}; extern crate rusqlite; use rusqlite::{params, Connection, NO_PARAMS}; pub struct RusqliteHashValueDb { db_path: String, pub table_name: String, pub db_conn: Connection, } impl RusqliteHashValueDb { pub fn new(db_path: String, table_name: String) -> Self { let db_conn = Connection::open(&db_path).unwrap(); let sql = format!( "create table if not exists {} (key string primary key, value blob not null)", table_name ); db_conn.execute(&sql, NO_PARAMS).unwrap(); Self { db_path, table_name, db_conn, } } pub fn hash_to_hex(hash: &Vec<u8>) -> String { format!("{:x?}", hash) .replace(", ", "") .replace("[", "") .replace("]", "") } } impl HashValueDb<Vec<u8>, Vec<u8>> for RusqliteHashValueDb { fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> { let hash_hex = Self::hash_to_hex(&hash); let sql = format!( "insert into {} (key, value) values (?1, ?2)", self.table_name ); self.db_conn .execute(&sql, params![hash_hex, value]) .unwrap(); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> { let sql = format!( "select value from {} where key='{}'", self.table_name, Self::hash_to_hex(hash) ); self.db_conn .query_row(&sql, NO_PARAMS, |row| row.get(0)) .map_err(|_| { MerkleTreeError::from_kind(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), }) }) } } } #[cfg(test)] pub mod sled_db { use super::{HashValueDb, MerkleTreeError, MerkleTreeErrorKind}; extern crate sled; use self::sled::{Config, Db}; use crate::sha2::{Digest, Sha256}; use std::marker::PhantomData; pub struct SledHashDb { config: Config, db: Db, } impl SledHashDb { pub fn new() -> Self { let config = Config::new().temporary(true); let db = config.open().unwrap(); Self { config, db } } } impl HashValueDb<Vec<u8>, Vec<u8>> for SledHashDb { fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> { self.db.insert(hash, value); Ok(()) } fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> { match self.db.get(hash) { Ok(Some(ivec)) => Ok(ivec.to_vec()), _ => Err(MerkleTreeErrorKind::HashNotFoundInDB { hash: hash.to_vec(), } .into()), } } } } #[cfg(test)] mod tests { use super::*; use crate::sha2::{Digest, Sha256}; use std::fs; fn check_db_put_get(db: &mut HashValueDb<Vec<u8>, Vec<u8>>) { let data_1 = "Hello world!".as_bytes().to_vec(); let mut hasher = Sha256::new(); hasher.input(&data_1); let hash_1 = hasher.result().to_vec(); db.put(hash_1.clone(), data_1.clone()).unwrap(); assert_eq!(db.get(&hash_1).unwrap(), data_1); let data_2 = "Byte!".as_bytes().to_vec(); let mut hasher = Sha256::new(); hasher.input(&data_2); let hash_2 = hasher.result().to_vec(); assert!(db.get(&hash_2).is_err()); db.put(hash_2.clone(), data_2.clone()).unwrap(); assert_eq!(db.get(&hash_2).unwrap(), data_2); } #[test] fn test_in_memory_db_string_val() { let mut db = InMemoryHashValueDb::<String>::new(); let data_1 = String::from("Hello world!"); let mut hasher = Sha256::new(); hasher.input(data_1.as_bytes()); let hash_1 = hasher.result().to_vec(); db.put(hash_1.clone(), data_1.clone()).unwrap(); assert_eq!(db.get(&hash_1).unwrap(), data_1); let data_2 = String::from("Byte!"); let mut hasher = Sha256::new(); hasher.input(data_2.as_bytes()); let hash_2 = hasher.result().to_vec(); assert!(db.get(&hash_2).is_err()); db.put(hash_2.clone(), data_2.clone()).unwrap(); assert_eq!(db.get(&hash_2).unwrap(), data_2); } #[test] fn test_unqlite_db_string_val() { let db_name = "unqlite_test.db"; fs::remove_file(db_name); let mut db = unqlite_db::UnqliteHashValueDb::new(String::from(db_name)); check_db_put_get(&mut db); } #[test] fn test_rusqlite_db_string_val() { let db_path = "./rusqlite_test.db"; fs::remove_file(db_path); let mut db = rusqlite_db::RusqliteHashValueDb::new(String::from(db_path), String::from("kv_table")); check_db_put_get(&mut db); } #[test] fn test_sled_db_string_val() { let mut db = sled_db::SledHashDb::new(); check_db_put_get(&mut db); } }
fn get(&self, hash: &BigUint) -> Result<V, MerkleTreeError> { let b = hash.to_bytes_be(); match self.db.get(&b) { Some(val) => Ok(val.clone()), None => Err(MerkleTreeErrorKind::HashNotFoundInDB { hash: b }.into()), } }
function_block-full_function
[ { "content": "/// Interface for the database used to store the leaf and node hashes\n\npub trait HashDb<H> {\n\n /// The database stores all leaves\n\n fn add_leaf(&mut self, leaf_hash: H) -> Result<(), MerkleTreeError>;\n\n\n\n /// The database stores roots of all full subtrees of the datbase\n\n fn add_full_subtree_root(&mut self, node_hash: H) -> Result<(), MerkleTreeError>;\n\n\n\n fn get_leaf(&self, leaf_index: TreeSizeType) -> Result<H, MerkleTreeError>;\n\n\n\n fn get_full_subtree_root(&self, node_index: TreeSizeType) -> Result<H, MerkleTreeError>;\n\n}\n\n\n\n/// Uses an in-memory vectors for storing leaf and node hashes. Used for testing.\n\n#[derive(Clone, Debug)]\n\npub struct InMemoryHashDb<H> {\n\n leaves: Vec<H>,\n\n nodes: Vec<H>,\n\n}\n\n\n\nimpl<H: Clone> HashDb<H> for InMemoryHashDb<H> {\n", "file_path": "src/compact_merkle_tree.rs", "rank": 1, "score": 154255.13346540404 }, { "content": "/// Serialize node for creating hash or storing in database\n\npub trait PatriciaTrieNodeSerializer<H, V, S: KnownLength> {\n\n fn is_empty_root(&self, root: &H) -> bool;\n\n fn serialize(&self, node: NodeType<H, V>) -> Result<S, MerkleTreeError>;\n\n fn deserialize(&self, serz: S) -> Result<NodeType<H, V>, MerkleTreeError>;\n\n\n\n /// For leaf and extension nodes. The return type causes heap allocation but avoiding\n\n /// it (like an array with negative number when there is only one flag) pushes the logic\n\n /// of handling 1 or 2 nibbles to serializer.\n\n fn get_flagged_prefix_for_leaf(path_in_nibbles: &[u8]) -> Vec<u8> {\n\n if path_in_nibbles.len() % 2 == 1 {\n\n // path is odd, add only 1 nibble\n\n vec![3]\n\n } else {\n\n // path is even, add 2 nibbles\n\n vec![2, 0]\n\n }\n\n }\n\n\n\n fn get_flagged_prefix_for_extension(path_in_nibbles: &[u8]) -> Vec<u8> {\n\n if path_in_nibbles.len() % 2 == 1 {\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 2, "score": 114187.61097041823 }, { "content": "/// To be used with a binary tree\n\n/// `D` is the type of data the leaf has, like a string or a big number, etc.\n\n/// `H` is the type for the hash\n\npub trait Arity2Hasher<D, H> {\n\n /// Hash the given leaf data to get the leaf hash\n\n fn hash_leaf_data(&self, leaf: D) -> Result<H, MerkleTreeError>;\n\n\n\n /// Hash 2 adjacent nodes (leaves or inner nodes) to get their root hash\n\n fn hash_tree_nodes(&self, left_node: H, right_node: H) -> Result<H, MerkleTreeError>;\n\n}\n\n\n", "file_path": "src/hasher.rs", "rank": 3, "score": 107560.08565895172 }, { "content": "/// To be used with a 4-ary tree\n\n/// `D` is the type of data the leaf has, like a string or a big number, etc.\n\n/// `H` is the type for the hash\n\npub trait Arity4Hasher<D, H> {\n\n /// Hash the given leaf data to get the leaf hash\n\n fn hash_leaf_data(&self, leaf: D) -> Result<H, MerkleTreeError>;\n\n\n\n /// Hash 4 adjacent nodes (leaves or inner nodes) to get their root hash\n\n fn hash_tree_nodes(\n\n &self,\n\n node_0: H,\n\n node_1: H,\n\n node_2: H,\n\n node_3: H,\n\n ) -> Result<H, MerkleTreeError>;\n\n}\n\n\n\n/// When SHA-256 is used for hashing in a merkle tree. Since SHA-256 is used for hashing leaf data and\n\n/// nodes, a domain separator is used to differentiate\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct Sha256Hasher {\n\n pub leaf_data_domain_separator: u8,\n\n pub node_domain_separator: u8,\n", "file_path": "src/hasher.rs", "rank": 4, "score": 107560.08565895172 }, { "content": "pub trait NodeHasher<I, H> {\n\n fn output_size(&self) -> usize;\n\n fn hash(&self, node: I) -> Result<H, MerkleTreeError>;\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Sha3Hasher {}\n\n\n\nimpl NodeHasher<Vec<u8>, Vec<u8>> for Sha3Hasher {\n\n fn output_size(&self) -> usize {\n\n 32\n\n }\n\n fn hash(&self, node: Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> {\n\n let mut hasher = Sha3_256::new();\n\n hasher.input(&node);\n\n Ok(hasher.result().to_vec())\n\n }\n\n}\n\n\n\n/// The type `V` is for the value of the data being stored in the trie.\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 5, "score": 104136.4369522523 }, { "content": "/// Type for indexing leaves of a sparse merkle tree\n\npub trait LeafIndex {\n\n /// Path from root to leaf\n\n // TODO: Return type can be arrayvec?\n\n fn to_leaf_path(&self, arity: u8, tree_depth: usize) -> Vec<u8>;\n\n}\n\n\n\n/// When sparse merkle tree can have 2^64 leaves at max\n\nimpl LeafIndex for u64 {\n\n /// Returns the representation of the `u64` as a byte array in MSB form\n\n fn to_leaf_path(&self, arity: u8, tree_depth: usize) -> Vec<u8> {\n\n assert!(arity.is_power_of_two());\n\n let shift = (arity as f32).log2() as u64;\n\n let arity_minus_1 = (arity - 1) as u64;\n\n let mut path = vec![];\n\n let mut leaf_index = self.clone();\n\n while (path.len() != tree_depth) && (leaf_index != 0) {\n\n // Get last `shift` bytes\n\n path.push((leaf_index & arity_minus_1) as u8);\n\n // Remove last `shift` bytes\n\n leaf_index >>= shift;\n", "file_path": "src/types.rs", "rank": 6, "score": 80187.51619271032 }, { "content": "pub trait Key {\n\n fn to_nibbles(&self) -> Vec<u8>;\n\n}\n\n\n\nimpl Key for Vec<u8> {\n\n fn to_nibbles(&self) -> Vec<u8> {\n\n bytes_to_nibbles(self)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub enum NodeType<H, V> {\n\n Empty,\n\n Leaf(Leaf<V>),\n\n Extension(Extension<H, V>),\n\n Branch(Branch<H, V>),\n\n}\n\n\n\nimpl<H, V> NodeType<H, V> {\n\n fn is_empty(&self) -> bool {\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 7, "score": 77415.25695851512 }, { "content": "pub trait KnownLength {\n\n fn len(&self) -> usize;\n\n}\n\n\n\nimpl KnownLength for Vec<u8> {\n\n fn len(&self) -> usize {\n\n self.len()\n\n }\n\n}\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 8, "score": 74910.0502193571 }, { "content": "fn bytes_to_nibbles(bytes: &[u8]) -> Vec<u8> {\n\n // XXX: Each iteration results in creation of a heap allocation (Vector). A simple for loop\n\n // might be a better choice\n\n bytes\n\n .into_iter()\n\n .flat_map(|b| vec![b >> 4, b & 15])\n\n .collect::<Vec<u8>>()\n\n}\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 23, "score": 32324.774729102483 }, { "content": "fn nibbles_to_bytes(nibbles: &[u8]) -> Vec<u8> {\n\n assert_eq!(nibbles.len() % 2, 0);\n\n (0..nibbles.len())\n\n .step_by(2)\n\n .map(|i| (nibbles[i] << 4) + nibbles[i + 1])\n\n .collect::<Vec<u8>>()\n\n}\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 24, "score": 32324.774729102483 }, { "content": "/// Least significant bit set in `n`,\n\nfn least_significant_set_bit(n: TreeSizeType) -> u8 {\n\n if n == 0 {\n\n return 0;\n\n }\n\n n.trailing_zeros() as u8\n\n}\n\n\n", "file_path": "src/compact_merkle_tree.rs", "rank": 25, "score": 31494.30294991192 }, { "content": "/// Number of bits required to represent `n`\n\nfn num_bits(mut n: TreeSizeType) -> u8 {\n\n if n == 0 {\n\n return 0;\n\n }\n\n let mut index = 0;\n\n while n != 0 {\n\n index += 1;\n\n n >>= 1;\n\n }\n\n index\n\n}\n\n\n", "file_path": "src/compact_merkle_tree.rs", "rank": 26, "score": 31450.259033679326 }, { "content": "/// Largest power of 2 less than `n`, 2^k < n <= 2^{k+1}, return 2^k\n\nfn largest_power_of_2_less_than(n: TreeSizeType) -> TreeSizeType {\n\n if n < 2 {\n\n return 0;\n\n }\n\n let mut cur = 1u64;\n\n let mut largest = 1u64;\n\n while cur < n {\n\n largest = cur;\n\n let r = cur.wrapping_shl(1);\n\n if r < cur {\n\n // `cur` has wrapped around\n\n break;\n\n } else {\n\n cur = r;\n\n }\n\n }\n\n largest\n\n}\n\n\n", "file_path": "src/compact_merkle_tree.rs", "rank": 27, "score": 30747.960889774167 }, { "content": "/// Returns the number of bits set in `n`\n\nfn count_set_bits(mut n: TreeSizeType) -> u8 {\n\n // Brian Kernighan's was https://graphics.stanford.edu/~seander/bithacks.html#CountBitsSetKernighan\n\n let mut count = 0;\n\n while n != 0 {\n\n n &= n - 1;\n\n count += 1\n\n }\n\n count\n\n}\n\n\n", "file_path": "src/compact_merkle_tree.rs", "rank": 28, "score": 30643.635499453536 }, { "content": "/// Break a number on decreasing powers of 2, eg 4 -> 4, 5 -> [4, 1], 6 -> [4, 2], 7 -> [4, 2, 1], 8 -> [8]\n\nfn powers_of_2(mut n: TreeSizeType) -> Vec<TreeSizeType> {\n\n if n == 0 {\n\n return vec![];\n\n }\n\n let mut powers = vec![];\n\n loop {\n\n if n.is_power_of_two() {\n\n powers.push(n);\n\n break;\n\n } else {\n\n let p = largest_power_of_2_less_than(n);\n\n n = n - p;\n\n powers.push(p);\n\n }\n\n }\n\n powers\n\n}\n\n\n", "file_path": "src/compact_merkle_tree.rs", "rank": 29, "score": 29163.61081650958 }, { "content": "## Database\n\nThe database needs to support a key-value style CRU (create, read, update) operations, hence the trait `HashValueDb` is provided.\n\n```rust\n\n/// Database to map hashes to values (H -> V)\n\n/// `H` is the type for the hash\n\n/// `V` is the type for the value\n\npub trait HashValueDb<H, V: Clone> {\n\n fn put(&mut self, hash: H, value: V) -> Result<(), MerkleTreeError>;\n\n\n\n fn get(&self, hash: &H) -> Result<V, MerkleTreeError>;\n\n}\n\n```\n\n\n\nFor most of the testing an in-memory implementation is used which keeps a `HashMap`. Since most of the code uses SHA-2 or SHA-3, \n\nthe hash output can be treated as bytes \n\n```rust\n\n/// Uses an in-memory hashmap and assumes the hash is bytes\n\n#[derive(Clone, Debug)]\n\npub struct InMemoryHashValueDb<V: Clone> {\n\n db: HashMap<Vec<u8>, V>,\n\n}\n\n\n\nimpl<V: Clone> HashValueDb<Vec<u8>, V> for InMemoryHashValueDb<V> {\n\n fn put(&mut self, hash: Vec<u8>, value: V) -> Result<(), MerkleTreeError> {\n\n ...\n\n }\n\n\n\n fn get(&self, hash: &Vec<u8>) -> Result<V, MerkleTreeError> {\n\n ...\n\n }\n\n}\n\n```\n\n\n\nAn implementation that assumes the hash output to be big numbers (like when MiMC is used) can be supported as well\n\n```rust\n\nimpl<V: Clone> HashValueDb<BigUint, V> for InMemoryBigUintHashDb<V> {\n\n fn put(&mut self, hash: BigUint, value: V) -> Result<(), MerkleTreeError> {\n\n ...\n\n }\n\n\n\n fn get(&self, hash: &BigUint) -> Result<V, MerkleTreeError> {\n\n ...\n\n }\n\n}\n\n```\n\n\n\nFor demonstration, `HashValueDb` is implemented for persistent some databases as well like sqlite.\n\n```rust\n\n/// Testing implementation for sqlite\n\npub struct RusqliteHashValueDb {\n\n ...\n\n}\n\n\n\nimpl HashValueDb<Vec<u8>, Vec<u8>> for RusqliteHashValueDb {\n\n fn put(&mut self, hash: Vec<u8>, value: Vec<u8>) -> Result<(), MerkleTreeError> {\n\n ...\n\n }\n\n\n\n fn get(&self, hash: &Vec<u8>) -> Result<Vec<u8>, MerkleTreeError> {\n\n ...\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 30, "score": 35.87972027494799 }, { "content": " hash_db,\n\n )?;\n\n match new_trie.get(key, &mut None, hash_db) {\n\n Ok(v) => Ok(v == *value),\n\n Err(_) => Ok(false),\n\n }\n\n }\n\n\n\n /// Get all key-value pairs in a tree with root `root_node`. If `proof` is not None, it is populated\n\n /// with a proof. The argument `nibbles_to_key` is a function to convert nibbles to keys.\n\n /// Don't want to require `Key` trait to have a nibble_to_key function as this function might not be\n\n /// needed by all implementations.\n\n pub fn get_key_values<K>(\n\n &self,\n\n root_node: &NodeType<H, V>,\n\n proof: &mut Option<Vec<NodeType<H, V>>>,\n\n hash_db: &dyn HashValueDb<H, S>,\n\n nibbles_to_key: &dyn Fn(&[u8]) -> K, // TODO: nibbles_to_key can return an error, return type should be a result\n\n ) -> Result<Vec<(K, V)>, MerkleTreeError> {\n\n // TODO: Return value should be a iterator as the tree can contain lots of keys\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 31, "score": 29.226275192556496 }, { "content": " let new_root_hash = self.store_root_node_in_db(new_root_node.clone(), hash_db)?;\n\n self.root_node = new_root_node;\n\n Ok(new_root_hash)\n\n }\n\n\n\n /// Verify that a tree with root hash `root_hash` has a key `key` with value `value`\n\n pub fn verify_proof(\n\n key: &dyn Key,\n\n value: &V,\n\n proof: Vec<NodeType<H, V>>,\n\n hasher: NH,\n\n node_serializer: NS,\n\n root_hash: &H,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<bool, MerkleTreeError> {\n\n let new_trie = Self::initialize_with_given_nodes_and_root_hash(\n\n hasher,\n\n node_serializer,\n\n root_hash,\n\n proof,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 32, "score": 27.29914661677914 }, { "content": " hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<V, MerkleTreeError> {\n\n self.get_from_tree_with_root(&self.root_node, key, proof, hash_db)\n\n }\n\n\n\n /// Get value of the given key in a tree with root `tree_root`. If `proof` is not None, it is populated with a proof.\n\n pub fn get_from_tree_with_root(\n\n &self,\n\n tree_root: &NodeType<H, V>,\n\n key: &dyn Key,\n\n proof: &mut Option<Vec<NodeType<H, V>>>,\n\n hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<V, MerkleTreeError> {\n\n let path = key.to_nibbles();\n\n let need_proof = proof.is_some();\n\n let mut proof_nodes = Vec::<NodeType<H, V>>::new();\n\n let val = self.get_from_subtree(tree_root, path, (need_proof, &mut proof_nodes), hash_db);\n\n if need_proof {\n\n match proof {\n\n Some(v) => {\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 33, "score": 24.363290973002236 }, { "content": " v.push(tree_root.clone());\n\n v.append(&mut proof_nodes);\n\n }\n\n None => (),\n\n }\n\n }\n\n val\n\n }\n\n\n\n /// Insert a key-value into the trie.\n\n pub fn insert(\n\n &mut self,\n\n key: &dyn Key,\n\n value: V,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<H, MerkleTreeError> {\n\n let path = key.to_nibbles();\n\n let old_root_node = self.root_node.clone();\n\n // XXX: Maybe i should pass old_root_node and not its reference\n\n let new_root_node = self.insert_into_subtree(&old_root_node, path, value, hash_db)?;\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 34, "score": 24.26390869462731 }, { "content": " subtree_root: &NodeType<H, V>,\n\n mut path: Vec<u8>,\n\n value: V,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<NodeType<H, V>, MerkleTreeError> {\n\n match subtree_root {\n\n NodeType::Empty => {\n\n let leaf_node = NodeType::Leaf(Leaf::new(path, value));\n\n Ok(leaf_node)\n\n }\n\n NodeType::Leaf(leaf_node) => {\n\n if leaf_node.has_path(&path) {\n\n // Updating value of an existing leaf\n\n let leaf_node = NodeType::Leaf(Leaf::new(path, value));\n\n Ok(leaf_node)\n\n } else {\n\n // Creating a node, will result in creation of more than one new node.\n\n let cur_path = &leaf_node.path;\n\n let common_prefix = Self::get_common_prefix_in_paths(cur_path, &path);\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 35, "score": 23.904933377317786 }, { "content": " let serz_node = hash_db.get(root_hash)?;\n\n let root_node = node_serializer.deserialize(serz_node)?;\n\n Ok(Self {\n\n root_node,\n\n hasher,\n\n node_serializer,\n\n phantom_1: PhantomData,\n\n phantom_2: PhantomData,\n\n })\n\n }\n\n\n\n pub fn get_root_hash(&self) -> Result<H, MerkleTreeError> {\n\n self.hash_node(self.root_node.clone()).map(|t| t.0)\n\n }\n\n\n\n /// Get value of the given key. If `proof` is not None, it is populated with a proof.\n\n pub fn get(\n\n &self,\n\n key: &dyn Key,\n\n proof: &mut Option<Vec<NodeType<H, V>>>,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 36, "score": 23.86327715888134 }, { "content": " ext_key: &HashOrBranch<H, V>,\n\n path: Vec<u8>,\n\n value: V,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<HashOrBranch<H, V>, MerkleTreeError> {\n\n let new_node = match ext_key {\n\n HashOrBranch::Hash(h) => {\n\n let inner_node = self.get_node_from_db(h, hash_db)?;\n\n self.insert_into_subtree(&inner_node, path, value, hash_db)?\n\n }\n\n HashOrBranch::Branch(branch) => {\n\n self.insert_into_subtree(&NodeType::Branch(branch.clone()), path, value, hash_db)?\n\n }\n\n };\n\n\n\n // The new node must be a branch.\n\n if !new_node.is_branch() {\n\n let msg = String::from(\"The node should have been a branch and nothing else\");\n\n return Err(MerkleTreeErrorKind::IncorrectNodeType { msg }.into());\n\n }\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 37, "score": 23.841840636638644 }, { "content": " node_index: usize,\n\n branch: &Branch<H, V>,\n\n hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<NodeType<H, V>, MerkleTreeError> {\n\n let hash_or_node = branch.path[node_index].as_ref();\n\n match hash_or_node {\n\n HashOrNode::Hash(h) => self.get_node_from_db(h, hash_db),\n\n HashOrNode::Node(n) => Ok(n.clone()),\n\n }\n\n }\n\n\n\n fn serialize_node(&self, node: NodeType<H, V>) -> Result<S, MerkleTreeError> {\n\n self.node_serializer.serialize(node)\n\n }\n\n\n\n fn deserialize_node(&self, node: S) -> Result<NodeType<H, V>, MerkleTreeError> {\n\n self.node_serializer.deserialize(node)\n\n }\n\n\n\n /// Serialize and hash the serialized node. Returns the hash as well as the serialized value\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 38, "score": 23.7574666390425 }, { "content": " let new_node = self.insert_into_subtree(&node, path, value, hash_db)?;\n\n\n\n // Create a new branch with the updated node\n\n let mut new_branch = branch.clone();\n\n new_branch.path[node_index] =\n\n Box::new(self.store_node_in_db_if_needed(new_node, hash_db)?);\n\n Ok(NodeType::Branch(new_branch))\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Get value from the subtree at root `subtree_root` at the path `path`\n\n fn get_from_subtree(\n\n &self,\n\n subtree_root: &NodeType<H, V>,\n\n mut path: Vec<u8>,\n\n (need_proof, proof_nodes): (bool, &mut Vec<NodeType<H, V>>),\n\n hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<V, MerkleTreeError> {\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 39, "score": 23.741973334026223 }, { "content": "{\n\n /// Create a new empty trie\n\n pub fn new(hasher: NH, node_serializer: NS) -> Result<Self, MerkleTreeError> {\n\n Ok(Self {\n\n root_node: NodeType::Empty,\n\n hasher,\n\n node_serializer,\n\n phantom_1: PhantomData,\n\n phantom_2: PhantomData,\n\n })\n\n }\n\n\n\n /// Initialize a trie with a given root hash and database. The root hash must be\n\n /// present in the database.\n\n pub fn initialize_with_root_hash(\n\n hasher: NH,\n\n node_serializer: NS,\n\n root_hash: &H,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<Self, MerkleTreeError> {\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 40, "score": 23.421314002320837 }, { "content": " &mut self,\n\n idx: &dyn LeafIndex,\n\n val: D,\n\n hash_db: &mut dyn HashValueDb<H, (NodeType<H>, H)>,\n\n ) -> Result<(), MerkleTreeError> {\n\n let path = idx.to_leaf_path(2, self.depth);\n\n let hash = self.hasher.hash_leaf_data(val)?;\n\n let new_root = self._update(path, hash, self.root.clone(), 0, hash_db)?;\n\n self.root = new_root;\n\n Ok(())\n\n }\n\n\n\n pub fn get(\n\n &self,\n\n idx: &dyn LeafIndex,\n\n proof: &mut Option<Vec<(NodeType<H>, H)>>,\n\n hash_db: &dyn HashValueDb<H, (NodeType<H>, H)>,\n\n ) -> Result<H, MerkleTreeError> {\n\n let mut path = idx.to_leaf_path(2, self.depth);\n\n let mut cur_node = self.root.clone();\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 41, "score": 23.412180177096918 }, { "content": " Self::initialize_with_root_hash(hasher, node_serializer, root_hash, hash_db)\n\n }\n\n\n\n /// Return serialized node if size after serialization is less than hash output otherwise store node in db as key-value\n\n /// where key is the hash of the serialized node and value is the serialized node and return the hash (db key)\n\n fn store_node_in_db_if_needed(\n\n &self,\n\n node: NodeType<H, V>,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<HashOrNode<H, V>, MerkleTreeError> {\n\n let serz_node = self.serialize_node(node.clone())?;\n\n if serz_node.len() < self.hasher.output_size() {\n\n Ok(HashOrNode::Node(node))\n\n } else {\n\n let hash = self.hasher.hash(serz_node.clone())?;\n\n hash_db.put(hash.clone(), serz_node)?;\n\n Ok(HashOrNode::Hash(hash))\n\n }\n\n }\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 42, "score": 23.405989337213434 }, { "content": " fn hash_node(&self, node: NodeType<H, V>) -> Result<(H, S), MerkleTreeError> {\n\n let serz_node = self.serialize_node(node)?;\n\n let hash = self.hasher.hash(serz_node.clone())?;\n\n Ok((hash, serz_node))\n\n }\n\n\n\n /// Store the node after serialization in the db. The key is the hash of the serialized node.\n\n fn store_root_node_in_db(\n\n &self,\n\n node: NodeType<H, V>,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<H, MerkleTreeError> {\n\n let (hash, serz_node) = self.hash_node(node.clone())?;\n\n hash_db.put(hash.clone(), serz_node)?;\n\n Ok(hash)\n\n }\n\n\n\n fn get_node_from_db(\n\n &self,\n\n hash: &H,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 43, "score": 23.400175332264446 }, { "content": "where\n\n MTH: Arity4Hasher<D, H>,\n\n{\n\n /// Create a new tree. `empty_leaf_val` is the default value for leaf of empty tree. It could be zero.\n\n /// Requires a database to hold leaves and nodes. The db should implement the `HashValueDb` trait\n\n pub fn new(\n\n empty_leaf_val: D,\n\n hasher: MTH,\n\n depth: usize,\n\n hash_db: &mut dyn HashValueDb<H, [H; 4]>,\n\n ) -> Result<VanillaArity4SparseMerkleTree<D, H, MTH>, MerkleTreeError> {\n\n assert!(depth > 0);\n\n let mut cur_hash = hasher.hash_leaf_data(empty_leaf_val)?;\n\n for _ in 0..depth {\n\n let val = [\n\n cur_hash.clone(),\n\n cur_hash.clone(),\n\n cur_hash.clone(),\n\n cur_hash.clone(),\n\n ];\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 44, "score": 23.282019493430507 }, { "content": "#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Leaf<V> {\n\n /// path in nibbles, does not contain nibbles for flag\n\n path: Vec<u8>,\n\n value: V,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Extension<H, V> {\n\n /// path in nibbles, does not contain nibbles for flag\n\n path: Vec<u8>,\n\n key: HashOrBranch<H, V>,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Branch<H, V> {\n\n path: [Box<HashOrNode<H, V>>; 16],\n\n value: V,\n\n}\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 45, "score": 22.98438253792252 }, { "content": "#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub enum HashOrBranch<H, V> {\n\n /// hash of a serialized branch node\n\n Hash(H),\n\n Branch(Branch<H, V>),\n\n}\n\n\n\n/// Either hash of a serialized node or a node\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub enum HashOrNode<H, V> {\n\n Hash(H),\n\n Node(NodeType<H, V>),\n\n}\n\n\n\nimpl<H, V> Default for HashOrNode<H, V> {\n\n fn default() -> Self {\n\n HashOrNode::Node(NodeType::Empty)\n\n }\n\n}\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 46, "score": 22.73700771573988 }, { "content": " let need_proof = proof.is_some();\n\n let mut proof_nodes = Vec::<NodeType<H, V>>::new();\n\n let nv =\n\n self.get_key_nibbles_and_values(root_node, (need_proof, &mut proof_nodes), hash_db)?;\n\n\n\n if need_proof {\n\n match proof {\n\n Some(v) => {\n\n v.push(root_node.clone());\n\n v.append(&mut proof_nodes);\n\n }\n\n None => (),\n\n }\n\n }\n\n\n\n // Since the keys are in nibbles, convert them to a key using the passed function\n\n Ok(nv\n\n .into_iter()\n\n .map(|(n, v)| (nibbles_to_key(&n), v))\n\n .collect::<Vec<(K, V)>>())\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 47, "score": 22.61100547290243 }, { "content": "use crate::db::HashValueDb;\n\nuse crate::errors::MerkleTreeError;\n\nuse crate::hasher::Arity2Hasher;\n\nuse crate::types::LeafIndex;\n\nuse std::marker::PhantomData;\n\n\n\n// Following idea described here https://ethresear.ch/t/optimizing-sparse-merkle-trees/3751\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub enum NodeType<H> {\n\n Path(Vec<u8>),\n\n SubtreeHash(H),\n\n}\n\n\n\n/// The types `D`, `H` and `MTH` correspond to the types of data, hash and merkle tree hasher\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BinarySparseMerkleTree<D: Clone, H: Clone, MTH>\n\nwhere\n\n MTH: Arity2Hasher<D, H>,\n\n{\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 48, "score": 22.534834433400743 }, { "content": " }\n\n\n\n /// Get all key-value pairs in a tree with root `root_node` and prefix `prefix_key`. If `proof` is\n\n /// not None, it is populated with a proof.\n\n pub fn get_keys_values_with_prefix<K>(\n\n &self,\n\n prefix_key: &dyn Key,\n\n node: &NodeType<H, V>,\n\n proof: &mut Option<Vec<NodeType<H, V>>>,\n\n hash_db: &dyn HashValueDb<H, S>,\n\n nibbles_to_key: &dyn Fn(&[u8]) -> K, // TODO: nibbles_to_key can return an error, return type should be a result\n\n ) -> Result<Vec<(K, V)>, MerkleTreeError> {\n\n // TODO: Return value should be a iterator\n\n let path = prefix_key.to_nibbles();\n\n let need_proof = proof.is_some();\n\n let mut proof_nodes = Vec::<NodeType<H, V>>::new();\n\n\n\n // Nibbles of the prefix before the prefix_node\n\n let mut seen_prefix_nibbles = vec![];\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 49, "score": 22.447491508383305 }, { "content": "# Merkle trees in Rust\n\n\n\nVarious kinds of merkle trees with hash function and tree storage abstracted.\n\n\n\n1. [Vanilla (inefficient) sparse merkle tree](./src/vanilla_sparse_merkle_tree.rs)\n\n1. [Sparse merkle tree](./src/sparse_merkle_tree.rs) with optimizations from V. Buterin \n\n1. [Ethereum's Merkle Patricia trie](./src/merkle_patricia_trie.rs)\n\n1. [Compact merkle tree](./src/compact_merkle_tree.rs) as described by Google's certificate transparency.\n\n\n\n\n\n## Hashing\n\n\n\nThe hash function is abstract such that a hash function like SHA-2, SHA-3 or an algebraic hash function like MiMC can be \n\nused. For a binary tree, this is the hasher's trait\n\n\n\n```rust\n\n/// To be used with a binary tree\n\n/// `D` is the type of data the leaf has, like a string or a big number, etc.\n\n/// `H` is the type for the hash\n\npub trait Arity2Hasher<D, H> {\n\n /// Hash the given leaf data to get the leaf hash\n\n fn hash_leaf_data(&self, leaf: D) -> Result<H, MerkleTreeError>;\n\n \n\n /// Hash 2 adjacent nodes (leaves or inner nodes) to get their root hash\n\n fn hash_tree_nodes(&self, left_node: H, right_node: H) -> Result<H, MerkleTreeError>;\n\n}\n\n```\n\n\n\nFor a 4-ary tree, this is the hasher's trait\n\n```rust\n\n/// To be used with a 4-ary tree\n\n/// `D` is the type of data the leaf has, like a string or a big number, etc.\n\n/// `H` is the type for the hash\n\npub trait Arity4Hasher<D, H> {\n\n /// Hash the given leaf data to get the leaf hash\n\n fn hash_leaf_data(&self, leaf: D) -> Result<H, MerkleTreeError>;\n\n\n\n /// Hash 4 adjacent nodes (leaves or inner nodes) to get their root hash\n\n fn hash_tree_nodes(\n\n &self,\n\n node_0: H,\n\n node_1: H,\n\n node_2: H,\n\n node_3: H,\n\n ) -> Result<H, MerkleTreeError>;\n\n}\n\n```\n\n\n\nSay, i need to use SHA-256 in a binary merkle tree, then such an implementation can be used\n\n```rust\n\npub struct Sha256Hasher {\n\n .....\n", "file_path": "README.md", "rank": 50, "score": 22.424539611459835 }, { "content": "\n\nimpl<D: Clone, H: Clone + PartialEq, MTH> VanillaBinarySparseMerkleTree<D, H, MTH>\n\nwhere\n\n MTH: Arity2Hasher<D, H>,\n\n{\n\n /// Create a new tree. `empty_leaf_val` is the default value for leaf of empty tree. It could be zero.\n\n /// Requires a database to hold leaves and nodes. The db should implement the `HashValueDb` trait\n\n pub fn new(\n\n empty_leaf_val: D,\n\n hasher: MTH,\n\n depth: usize,\n\n hash_db: &mut dyn HashValueDb<H, (H, H)>,\n\n ) -> Result<VanillaBinarySparseMerkleTree<D, H, MTH>, MerkleTreeError> {\n\n assert!(depth > 0);\n\n let mut cur_hash = hasher.hash_leaf_data(empty_leaf_val)?;\n\n for _ in 0..depth {\n\n let val = (cur_hash.clone(), cur_hash.clone());\n\n cur_hash = hasher.hash_tree_nodes(cur_hash.clone(), cur_hash.clone())?;\n\n hash_db.put(cur_hash.clone(), val)?;\n\n }\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 51, "score": 22.419748703336843 }, { "content": " // Check if root is equal to cur_hash\n\n match root {\n\n Some(r) => Ok(cur_hash == *r),\n\n None => Ok(cur_hash == self.root),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct VanillaArity4SparseMerkleTree<D: Clone, H: Clone, MTH>\n\nwhere\n\n MTH: Arity4Hasher<D, H>,\n\n{\n\n pub depth: usize,\n\n pub root: H,\n\n hasher: MTH,\n\n phantom: PhantomData<D>,\n\n}\n\n\n\nimpl<D: Clone, H: Clone + PartialEq + Default, MTH> VanillaArity4SparseMerkleTree<D, H, MTH>\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 52, "score": 22.36867516935931 }, { "content": " }\n\n }\n\n };\n\n Ok(key)\n\n }\n\n\n\n fn initialize_with_given_nodes_and_root_hash(\n\n hasher: NH,\n\n node_serializer: NS,\n\n root_hash: &H,\n\n nodes: Vec<NodeType<H, V>>,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<Self, MerkleTreeError> {\n\n for node in nodes {\n\n let serz_node = node_serializer.serialize(node)?;\n\n if serz_node.len() >= hasher.output_size() {\n\n let hash = hasher.hash(serz_node.clone())?;\n\n hash_db.put(hash.clone(), serz_node)?;\n\n }\n\n }\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 53, "score": 22.314715247627017 }, { "content": "use crate::db::HashValueDb;\n\nuse crate::errors::MerkleTreeError;\n\nuse crate::hasher::{Arity2Hasher, Arity4Hasher};\n\nuse crate::types::LeafIndex;\n\nuse std::marker::PhantomData;\n\n\n\n// TODO: Have prehashed versions of the methods below that do not call `hash_leaf_data` but assume\n\n// that leaf data being passed is already hashed.\n\n\n\n/// The types `D`, `H` and `MTH` correspond to the types of data, hash and merkle tree hasher\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct VanillaBinarySparseMerkleTree<D: Clone, H: Clone, MTH>\n\nwhere\n\n MTH: Arity2Hasher<D, H>,\n\n{\n\n pub depth: usize,\n\n pub root: H,\n\n pub hasher: MTH,\n\n pub phantom: PhantomData<D>,\n\n}\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 54, "score": 21.89364536307879 }, { "content": "\n\nimpl<D: Clone, H: Clone + PartialEq, MTH> CompactMerkleTree<D, H, MTH>\n\nwhere\n\n MTH: Arity2Hasher<D, H>,\n\n{\n\n pub fn new(hasher: MTH) -> Self {\n\n Self {\n\n size: 0,\n\n full_subtree_roots: vec![],\n\n hasher,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Takes a hash db and returns a new tree of the size `tree_size` based on the leaves and nodes\n\n /// present in hash db.\n\n pub fn new_from_hash_db(\n\n hasher: MTH,\n\n tree_size: TreeSizeType,\n\n hash_db: &dyn HashDb<H>,\n", "file_path": "src/compact_merkle_tree.rs", "rank": 55, "score": 21.798248922414913 }, { "content": "\n\n fn get_full_subtree_root(&self, node_index: TreeSizeType) -> Result<H, MerkleTreeError> {\n\n let i = node_index as usize;\n\n if i >= self.nodes.len() {\n\n Err(MerkleTreeError::from_kind(\n\n MerkleTreeErrorKind::NodeIndexNotFoundInDB { index: i as u64 },\n\n ))\n\n } else {\n\n Ok(self.nodes[i].clone())\n\n }\n\n }\n\n}\n\n\n\nimpl<H> InMemoryHashDb<H> {\n\n pub fn new() -> Self {\n\n Self {\n\n leaves: vec![],\n\n nodes: vec![],\n\n }\n\n }\n", "file_path": "src/compact_merkle_tree.rs", "rank": 56, "score": 21.703214147726264 }, { "content": " cur_hash = hasher.hash_tree_nodes(\n\n cur_hash.clone(),\n\n cur_hash.clone(),\n\n cur_hash.clone(),\n\n cur_hash.clone(),\n\n )?;\n\n hash_db.put(cur_hash.clone(), val)?;\n\n }\n\n Ok(Self {\n\n depth,\n\n root: cur_hash,\n\n hasher,\n\n phantom: PhantomData,\n\n })\n\n }\n\n\n\n /// Create a new tree with a given root hash\n\n pub fn initialize_with_root_hash(hasher: MTH, depth: usize, root: H) -> Self {\n\n Self {\n\n depth,\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 57, "score": 21.263336187516877 }, { "content": " for i in 0..keys.len() {\n\n let key = keys[i];\n\n let value = &values[i];\n\n match new_trie.get(key, &mut None, hash_db) {\n\n Ok(v) => {\n\n if v != *value {\n\n return Ok(false);\n\n }\n\n }\n\n Err(_) => return Ok(false),\n\n }\n\n }\n\n\n\n Ok(true)\n\n }\n\n\n\n /// Get the node from which all keys having the prefix `prefix_nibbles` diverge. This node can then\n\n /// be used to traverse all keys with the prefix.\n\n fn get_last_node_for_prefix_key(\n\n &self,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 58, "score": 21.031074889017958 }, { "content": " fn is_branch(&self) -> bool {\n\n match self {\n\n NodeType::Branch(_) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\nimpl<H, V> Default for NodeType<H, V> {\n\n fn default() -> Self {\n\n NodeType::Empty\n\n }\n\n}\n\n\n\npub enum KeyValueNodeType<H, V> {\n\n Leaf(Leaf<V>),\n\n Extension(Extension<H, V>),\n\n}\n\n\n\n/// Either a hash (which would be a hash of a serialized branch node) or a branch node\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 59, "score": 20.328911164925827 }, { "content": " let node = self.get_node_from_branch(node_index, branch, hash_db)?;\n\n if need_proof {\n\n proof_nodes.push(node.clone());\n\n }\n\n self.get_from_subtree(&node, path, (need_proof, proof_nodes), hash_db)\n\n }\n\n }\n\n };\n\n val\n\n }\n\n\n\n /// Get all key-value pairs in a tree with root `root_node`. If `proof` is not None, it is populated\n\n /// with a proof. The keys are returned as nibbles.\n\n fn get_key_nibbles_and_values(\n\n &self,\n\n node: &NodeType<H, V>,\n\n (need_proof, proof_nodes): (bool, &mut Vec<NodeType<H, V>>),\n\n hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<Vec<(Vec<u8>, V)>, MerkleTreeError> {\n\n // TODO: Return value should be a iterator\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 60, "score": 20.295951855722002 }, { "content": " value: leaf_value,\n\n });\n\n let leaf = self.store_node_in_db_if_needed(leaf_node, hash_db)?;\n\n branch.path[leaf_idx] = Box::new(leaf);\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Store an extension node in a branch node.\n\n fn store_extension_in_branch(\n\n &self,\n\n branch_path: &mut [Box<HashOrNode<H, V>>; 16],\n\n mut ext_path: Vec<u8>,\n\n ext_key: HashOrBranch<H, V>,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<(), MerkleTreeError> {\n\n if ext_path.len() == 1 {\n\n // If existing extension node's path, store the extension's key directly\n\n // in the branch node. Since extension will have a branch node only when\n\n // its length is less than hash output size, no need to deserialize it.\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 61, "score": 19.403593565019122 }, { "content": " proof_vec.push(children.0);\n\n }\n\n }\n\n }\n\n match proof {\n\n Some(v) => {\n\n v.append(&mut proof_vec);\n\n }\n\n None => (),\n\n }\n\n Ok(cur_node.clone())\n\n }\n\n\n\n /// Verify a leaf inclusion proof, if `root` is None, use the current root else use given root\n\n pub fn verify_proof(\n\n &self,\n\n idx: &dyn LeafIndex,\n\n val: D,\n\n proof: Vec<H>,\n\n root: Option<&H>,\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 62, "score": 19.38996835240599 }, { "content": " (cur_hash, sibling)\n\n } else {\n\n // leaf falls on the right side\n\n (sibling, cur_hash)\n\n };\n\n let val = (l.clone(), r.clone());\n\n cur_hash = self.hasher.hash_tree_nodes(l, r)?;\n\n hash_db.put(cur_hash.clone(), val)?;\n\n }\n\n\n\n self.root = cur_hash;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Get value for a leaf. `proof` when not set to None will be set to the inclusion proof for that leaf.\n\n pub fn get(\n\n &self,\n\n idx: &dyn LeafIndex,\n\n proof: &mut Option<Vec<H>>,\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 63, "score": 19.34437890201082 }, { "content": " path.reverse();\n\n let mut cur_hash = self.hasher.hash_leaf_data(val)?;\n\n\n\n // Iterate over the base 4 digits\n\n for d in path {\n\n let (n_0, n_1, n_2, n_3) =\n\n Self::extract_from_siblings(d, siblings.pop().unwrap(), cur_hash);\n\n let val = [n_0.clone(), n_1.clone(), n_2.clone(), n_3.clone()];\n\n cur_hash = self.hasher.hash_tree_nodes(n_0, n_1, n_2, n_3)?;\n\n hash_db.put(cur_hash.clone(), val)?;\n\n }\n\n\n\n self.root = cur_hash;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Get value for a leaf. `proof` when not set to None will be set to the inclusion proof for that leaf.\n\n pub fn get(\n\n &self,\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 64, "score": 19.314610715461765 }, { "content": " root_hash: &H,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<bool, MerkleTreeError> {\n\n if keys.len() != values.len() {\n\n return Err(MerkleTreeError::from_kind(\n\n MerkleTreeErrorKind::UnequalNoOfKeysAndValues {\n\n num_keys: keys.len(),\n\n num_values: values.len(),\n\n },\n\n ));\n\n }\n\n\n\n let new_trie = Self::initialize_with_given_nodes_and_root_hash(\n\n hasher,\n\n node_serializer,\n\n root_hash,\n\n proof,\n\n hash_db,\n\n )?;\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 65, "score": 19.234610392180237 }, { "content": " hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<NodeType<H, V>, MerkleTreeError> {\n\n let serz_node = hash_db.get(hash)?;\n\n self.deserialize_node(serz_node)\n\n }\n\n\n\n /// Store a leaf node in a branch node.\n\n fn store_leaf_in_branch(\n\n &self,\n\n branch: &mut Branch<H, V>,\n\n mut leaf_path: Vec<u8>,\n\n leaf_value: V,\n\n hash_db: &mut dyn HashValueDb<H, S>,\n\n ) -> Result<(), MerkleTreeError> {\n\n if leaf_path.is_empty() {\n\n branch.value = leaf_value;\n\n } else {\n\n let leaf_idx = leaf_path.remove(0) as usize;\n\n let leaf_node = NodeType::Leaf(Leaf {\n\n path: leaf_path,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 66, "score": 19.10923548359729 }, { "content": " node_to_start_from: &NodeType<H, V>,\n\n mut prefix_nibbles: Vec<u8>,\n\n seen_prefix_nibbles: &mut Vec<u8>,\n\n (need_proof, proof_nodes): (bool, &mut Vec<NodeType<H, V>>),\n\n hash_db: &dyn HashValueDb<H, S>,\n\n ) -> Result<NodeType<H, V>, MerkleTreeError> {\n\n match node_to_start_from {\n\n NodeType::Empty => Ok(NodeType::Empty),\n\n NodeType::Leaf(leaf) => {\n\n if leaf.path.starts_with(&prefix_nibbles) {\n\n Ok(node_to_start_from.clone())\n\n } else {\n\n return Err(MerkleTreeError::from_kind(\n\n MerkleTreeErrorKind::NoKeyWithPrefixInTrie,\n\n ));\n\n }\n\n }\n\n NodeType::Extension(ext) => {\n\n if prefix_nibbles.is_empty() {\n\n return Err(MerkleTreeError::from_kind(\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 67, "score": 19.03150694306192 }, { "content": " use super::*;\n\n use crate::db::InMemoryHashValueDb;\n\n use std::collections::HashMap;\n\n extern crate rand;\n\n use self::rand::{thread_rng, Rng};\n\n\n\n /// Create a new trie and db and return them\n\n fn get_new_sha3_rlp_trie_with_in_memory_db() -> (\n\n MerklePatriciaTrie<Vec<u8>, Vec<u8>, Vec<u8>, RLPSerializer, Sha3Hasher>,\n\n InMemoryHashValueDb<Vec<u8>>,\n\n ) {\n\n let hasher = Sha3Hasher {};\n\n let node_serz = RLPSerializer {};\n\n // Create a new trie\n\n let trie = MerklePatriciaTrie::new(hasher.clone(), node_serz.clone()).unwrap();\n\n // Create a new db\n\n let db = InMemoryHashValueDb::<Vec<u8>>::new();\n\n (trie, db)\n\n }\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 68, "score": 18.839114475251705 }, { "content": " }\n\n .into());\n\n }\n\n if old_tree_size == new_tree_size {\n\n Ok(vec![])\n\n } else {\n\n let proof = Self::subproof(hasher, old_tree_size, 0, new_tree_size, true, hash_db)?;\n\n Ok(proof)\n\n }\n\n }\n\n\n\n /// Verify the proof generated by `Self::get_leaf_inclusion_proof_*`\n\n pub fn verify_leaf_inclusion_proof(\n\n hasher: &MTH,\n\n leaf_index: TreeSizeType,\n\n leaf_val: D,\n\n tree_size: TreeSizeType,\n\n root: &H,\n\n proof: Vec<H>,\n\n ) -> Result<bool, MerkleTreeError> {\n", "file_path": "src/compact_merkle_tree.rs", "rank": 69, "score": 18.674983399866417 }, { "content": " }\n\n\n\n /// Testing implementation for using sqlite for storing tree data. No error handling. Purpose is\n\n /// to demonstrate how a persistent database can be used\n\n\n\n pub struct RusqliteSMTHashValueDb {\n\n db_path: String,\n\n pub table_name: String,\n\n pub db_conn: Connection,\n\n }\n\n\n\n impl RusqliteSMTHashValueDb {\n\n pub fn new(db_path: String, table_name: String) -> Self {\n\n let db_conn = Connection::open(&db_path).unwrap();\n\n let sql = format!(\"create table if not exists {} (key string primary key, value1 blob not null, value2 blob not null)\", table_name);\n\n db_conn.execute(&sql, NO_PARAMS).unwrap();\n\n Self {\n\n db_path,\n\n table_name,\n\n db_conn,\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 70, "score": 18.462136918350172 }, { "content": " }\n\n for (mut n, v) in\n\n self.get_key_nibbles_and_values(&node, (need_proof, proof_nodes), hash_db)?\n\n {\n\n n.insert(0, i as u8);\n\n nv.push((n, v));\n\n }\n\n }\n\n if branch.value != V::default() {\n\n nv.push((Vec::<u8>::new(), branch.value.clone()))\n\n }\n\n Ok(nv)\n\n }\n\n }\n\n }\n\n\n\n /// Get node at a particualar index in a branch. If there is a hash at the index, get the node\n\n /// for that hash from the db\n\n fn get_node_from_branch(\n\n &self,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 71, "score": 18.30977304335175 }, { "content": " .hasher\n\n .hash_tree_nodes(left_subtree_hash.clone(), new_right.clone())?;\n\n hash_db.put(\n\n root.clone(),\n\n (NodeType::SubtreeHash(left_subtree_hash), new_right),\n\n )?;\n\n Ok(root)\n\n } else {\n\n // New value lies in left subtree so update left subtree\n\n path.remove(0);\n\n let new_left =\n\n self._update(path, val, left_subtree_hash, depth + 1, hash_db)?;\n\n let root = self\n\n .hasher\n\n .hash_tree_nodes(new_left.clone(), right_child.clone())?;\n\n hash_db\n\n .put(root.clone(), (NodeType::SubtreeHash(new_left), right_child))?;\n\n Ok(root)\n\n }\n\n }\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 72, "score": 18.211028511012966 }, { "content": " }\n\n }\n\n }\n\n\n\n /// Update subtree with 1 non-empty leaf, result will be creation of 2 subtrees, each with 1\n\n /// non-empty leaf unless the same non empty leaf is being updated. Save intermediate nodes in the DB\n\n fn update_one_val_subtree(\n\n &mut self,\n\n mut path_for_new_key: Vec<u8>,\n\n val_for_new_key: H,\n\n mut path_for_old_key: Vec<u8>,\n\n val_for_old_key: H,\n\n depth: usize,\n\n hash_db: &mut dyn HashValueDb<H, (NodeType<H>, H)>,\n\n ) -> Result<H, MerkleTreeError> {\n\n if path_for_new_key == path_for_old_key {\n\n // The path being updated is same as the existing path, this is the case of updating value\n\n // of an existing key so the resulting subtree has size a single non empty leaf.\n\n let new_root = self.update_empty_subtree(\n\n path_for_new_key.clone(),\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 73, "score": 18.139370082705646 }, { "content": " pub fn get_consistency_proof(\n\n &self,\n\n old_tree_size: TreeSizeType,\n\n hash_db: &dyn HashDb<H>,\n\n ) -> Result<Vec<H>, MerkleTreeError> {\n\n Self::get_consistency_proof_for_tree_size(&self.hasher, old_tree_size, self.size, hash_db)\n\n }\n\n\n\n /// Get a proof that the a shorter tree with size `old_tree_size` is consistent with tree of size\n\n /// `new_tree_size`, i.e. the shorter tree is contained in the new tree\n\n pub fn get_consistency_proof_for_tree_size(\n\n hasher: &MTH,\n\n old_tree_size: TreeSizeType,\n\n new_tree_size: TreeSizeType,\n\n hash_db: &dyn HashDb<H>,\n\n ) -> Result<Vec<H>, MerkleTreeError> {\n\n if old_tree_size > new_tree_size {\n\n return Err(MerkleTreeErrorKind::TreeSmallerThanExpected {\n\n expected: old_tree_size,\n\n given: new_tree_size,\n", "file_path": "src/compact_merkle_tree.rs", "rank": 74, "score": 18.058731575931866 }, { "content": " // of p leaves\n\n idx += (p - 1);\n\n self.added_subtree(p, node_hashes[(idx - 1) as usize].clone());\n\n }\n\n }\n\n\n\n for l in leaf_hashes {\n\n hash_db.add_leaf(l)?;\n\n }\n\n for n in node_hashes {\n\n hash_db.add_full_subtree_root(n)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn get_root_hash(&self) -> Result<H, MerkleTreeError> {\n\n if self.size == 0 {\n\n return Err(MerkleTreeErrorKind::CannotQueryEmptyTree.into());\n\n }\n", "file_path": "src/compact_merkle_tree.rs", "rank": 75, "score": 18.046521176446433 }, { "content": "/// The type `H` is for the hash output\n\n/// The type `S` is for the serialized (node) output\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct MerklePatriciaTrie<V, H, S: Clone + KnownLength, NS, NH>\n\nwhere\n\n NS: PatriciaTrieNodeSerializer<H, V, S>,\n\n NH: NodeHasher<S, H>,\n\n{\n\n pub root_node: NodeType<H, V>,\n\n hasher: NH,\n\n node_serializer: NS,\n\n pub phantom_1: PhantomData<V>,\n\n pub phantom_2: PhantomData<S>,\n\n}\n\n\n\nimpl<V: Clone + Default + Eq, H: Clone, S: Clone + KnownLength, NS, NH>\n\n MerklePatriciaTrie<V, H, S, NS, NH>\n\nwhere\n\n NS: PatriciaTrieNodeSerializer<H, V, S>,\n\n NH: NodeHasher<S, H>,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 76, "score": 17.88118891777891 }, { "content": " if common_prefix.len() == 0 {\n\n // paths for both nodes (new and existing) have no common prefix, create a branch node with 2 leaf nodes\n\n let branch_path: [Box<HashOrNode<H, V>>; 16] = Default::default();\n\n let mut branch = Branch {\n\n path: branch_path,\n\n value: V::default(),\n\n };\n\n\n\n self.store_leaf_in_branch(\n\n &mut branch,\n\n cur_path.to_vec(),\n\n leaf_node.value.clone(),\n\n hash_db,\n\n )?;\n\n self.store_leaf_in_branch(&mut branch, path, value, hash_db)?;\n\n\n\n Ok(NodeType::Branch(branch))\n\n } else {\n\n if common_prefix.len() < cur_path.len() && common_prefix.len() < path.len()\n\n {\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 77, "score": 17.852847385819835 }, { "content": " ext_node.key.clone(),\n\n hash_db,\n\n )?;\n\n let mut branch = Branch {\n\n path: branch_path,\n\n value: V::default(),\n\n };\n\n self.store_leaf_in_branch(&mut branch, path, value, hash_db)?;\n\n\n\n Ok(NodeType::Branch(branch))\n\n } else if common_prefix.len() < cur_path.len()\n\n && common_prefix.len() < path.len()\n\n {\n\n // Some path prefix is common between both new and existing node, create an extension node\n\n // with common prefix path as key and 1 leaf node and 1 extension node, both\n\n // in a new branch node\n\n\n\n // this branch will be the key for the extension node\n\n let mut branch_path: [Box<HashOrNode<H, V>>; 16] = Default::default();\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 78, "score": 17.742829506575944 }, { "content": " path[common_prefix.len()..].to_vec(),\n\n value,\n\n hash_db,\n\n )?;\n\n\n\n Ok(NodeType::Extension(Extension {\n\n path: common_prefix,\n\n key: HashOrBranch::Branch(branch),\n\n }))\n\n } else if common_prefix == *cur_path {\n\n // Existing node and new node will be moved to a new branch node which will be the key of\n\n // a new extension node with path as the common prefix. The value of the existing node will\n\n // be the value of the branch node.\n\n\n\n // this branch will be the key for the extension node\n\n let branch_path: [Box<HashOrNode<H, V>>; 16] = Default::default();\n\n let mut branch = Branch {\n\n path: branch_path,\n\n value: leaf_node.value.clone(),\n\n };\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 79, "score": 17.723358193877793 }, { "content": "impl<V> Leaf<V> {\n\n pub fn new(path: Vec<u8>, value: V) -> Self {\n\n Leaf { path, value }\n\n }\n\n\n\n pub fn has_path(&self, path: &[u8]) -> bool {\n\n self.path == path\n\n }\n\n}\n\n\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 80, "score": 17.629912777940365 }, { "content": "}\n\n\n\n/// When SHA-256 is used for hashing in a binary merkle tree\n\nimpl Arity2Hasher<&str, Vec<u8>> for Sha256Hasher {\n\n fn hash_leaf_data(&self, leaf: &str) -> Result<Vec<u8>, MerkleTreeError> {\n\n ....\n\n }\n\n\n\n fn hash_tree_nodes(\n\n &self,\n\n left_node: Vec<u8>,\n\n right_node: Vec<u8>,\n\n ) -> Result<Vec<u8>, MerkleTreeError> {\n\n ....\n\n }\n\n}\n\n```\n\n\n\nWhen using SHA-256 in a 4-ary tree, similar implementation can be used\n\n```rust\n\n/// When SHA-256 is used for hashing in a 4-ary merkle tree\n\nimpl Arity4Hasher<&str, Vec<u8>> for Sha256Hasher {\n\n fn hash_leaf_data(&self, leaf: &str) -> Result<Vec<u8>, MerkleTreeError> {\n\n ....\n\n }\n\n\n\n fn hash_tree_nodes(\n\n &self,\n\n node_0: Vec<u8>,\n\n node_1: Vec<u8>,\n\n node_2: Vec<u8>,\n\n node_3: Vec<u8>,\n\n ) -> Result<Vec<u8>, MerkleTreeError> {\n\n ....\n\n }\n\n}\n\n```\n\n\n\nSimilarly, other hash functions can be used. For demonstration, an implementation of `Arity2Hasher` with algebraic hash function \n\nMiMC is present as well. MiMC is useful when using merkle trees in various SNARKs constructions where the data to hash and the \n\nhash output are big numbers.\n\n```rust\n\n/// When MiMC is used for hashing in a merkle tree\n\npub struct MiMCHasher {\n\n ...\n\n}\n\n\n\n/// When MiMC is used for hashing in a binary merkle tree\n\nimpl Arity2Hasher<BigUint, BigUint> for MiMCHasher {\n\n fn hash_leaf_data(&self, leaf: BigUint) -> Result<BigUint, MerkleTreeError> {\n\n ...\n\n }\n\n\n\n fn hash_tree_nodes(\n\n &self,\n\n left_node: BigUint,\n\n right_node: BigUint,\n\n ) -> Result<BigUint, MerkleTreeError> {\n\n ....\n\n }\n\n}\n\n```\n\n\n", "file_path": "README.md", "rank": 81, "score": 17.59617939888695 }, { "content": " match node {\n\n NodeType::Empty => Ok(vec![]),\n\n NodeType::Leaf(l) => return Ok(vec![(l.path.clone(), l.value.clone())]),\n\n NodeType::Extension(ext_node) => {\n\n let path = ext_node.path.clone();\n\n let nv = match &ext_node.key {\n\n HashOrBranch::Hash(h) => {\n\n let inner_node = self.get_node_from_db(h, hash_db)?;\n\n if need_proof {\n\n proof_nodes.push(inner_node.clone());\n\n }\n\n self.get_key_nibbles_and_values(\n\n &inner_node,\n\n (need_proof, proof_nodes),\n\n hash_db,\n\n )?\n\n }\n\n HashOrBranch::Branch(branch) => self.get_key_nibbles_and_values(\n\n &NodeType::Branch(branch.clone()),\n\n (need_proof, proof_nodes),\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 82, "score": 17.554297182679 }, { "content": " if proof_len == self.depth {\n\n Ok(subtree_root_hash == leaf_hash)\n\n } else {\n\n Ok(self.empty_subtree_hashes[0] == leaf_hash)\n\n }\n\n }\n\n\n\n fn _update(\n\n &mut self,\n\n mut path: Vec<u8>,\n\n val: H,\n\n root: H,\n\n depth: usize,\n\n hash_db: &mut dyn HashValueDb<H, (NodeType<H>, H)>,\n\n ) -> Result<H, MerkleTreeError> {\n\n if depth == self.depth {\n\n return Ok(val);\n\n }\n\n if root == self.empty_subtree_hashes[depth] {\n\n // Update an empty subtree: make a single-val subtree\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 83, "score": 17.515149061676482 }, { "content": " let value_1 = vec![1, 2, 9, 98, 10, 230];\n\n trie.insert(&key_1, value_1.clone(), &mut db).unwrap();\n\n\n\n let key_2 = vec![1, 17, 8];\n\n let value_2 = vec![2, 4, 5, 6];\n\n trie.insert(&key_2, value_2.clone(), &mut db).unwrap();\n\n\n\n assert_eq!(trie.get(&key_1, &mut None, &db).unwrap(), value_1);\n\n assert_eq!(trie.get(&key_2, &mut None, &db).unwrap(), value_2);\n\n\n\n match &trie.root_node {\n\n NodeType::<Vec<u8>, Vec<u8>>::Extension(ext_node) => match &ext_node.key {\n\n HashOrBranch::Branch(b) => {\n\n for (i, p) in b.path.to_vec().into_iter().enumerate() {\n\n match *p {\n\n HashOrNode::Node(n) => match n {\n\n NodeType::<Vec<u8>, Vec<u8>>::Empty => {\n\n if i == 0 || i == 1 {\n\n panic!(\"Node should have been a leaf but was empty\")\n\n }\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 84, "score": 17.38582332707005 }, { "content": " let new_root = self.update_empty_subtree(path.clone(), val.clone(), depth)?;\n\n hash_db.put(new_root.clone(), (NodeType::Path(path), val))?;\n\n Ok(new_root)\n\n } else {\n\n let (left_child, right_child) = hash_db.get(&root)?;\n\n match left_child {\n\n NodeType::Path(right_child_path) => self.update_one_val_subtree(\n\n path,\n\n val,\n\n right_child_path,\n\n right_child,\n\n depth,\n\n hash_db,\n\n ),\n\n NodeType::SubtreeHash(left_subtree_hash) => {\n\n if path[0] == 1 {\n\n // New value lies in right subtree so update right subtree\n\n path.remove(0);\n\n let new_right = self._update(path, val, right_child, depth + 1, hash_db)?;\n\n let root = self\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 85, "score": 17.357891987679906 }, { "content": " proof: Vec<[H; 3]>,\n\n root: Option<&H>,\n\n ) -> Result<bool, MerkleTreeError> {\n\n let mut path = idx.to_leaf_path(4, self.depth);\n\n if path.len() != proof.len() {\n\n return Ok(false);\n\n }\n\n path.reverse();\n\n\n\n let mut cur_hash = self.hasher.hash_leaf_data(val)?;\n\n\n\n for (i, sibling) in proof.into_iter().rev().enumerate() {\n\n let (n_0, n_1, n_2, n_3) = Self::extract_from_siblings(path[i], sibling, cur_hash);\n\n cur_hash = self.hasher.hash_tree_nodes(n_0, n_1, n_2, n_3)?;\n\n }\n\n\n\n // Check if root is equal to cur_hash\n\n match root {\n\n Some(r) => Ok(cur_hash == *r),\n\n None => Ok(cur_hash == self.root),\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 86, "score": 17.23051717138477 }, { "content": " j += 1;\n\n }\n\n }\n\n proof_vec.push(proof_node);\n\n }\n\n }\n\n match proof {\n\n Some(v) => {\n\n v.append(&mut proof_vec);\n\n }\n\n None => (),\n\n }\n\n Ok(cur_node.clone())\n\n }\n\n\n\n /// Verify a merkle proof, if `root` is None, use the current root else use given root\n\n pub fn verify_proof(\n\n &self,\n\n idx: &dyn LeafIndex,\n\n val: D,\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 87, "score": 17.03465994021117 }, { "content": " leaf_data: D,\n\n hash_db: &mut dyn HashDb<H>,\n\n ) -> Result<Vec<H>, MerkleTreeError> {\n\n // Inclusion proof (audit path in RFC 6982) of a leaf contains the nodes that when hashed\n\n // together with the leaf result in the root hash. Thus the inclusion proof would be the roots\n\n // of all the subtrees from larger to smaller trees.\n\n let mut inclusion_proof = self.full_subtree_roots.clone();\n\n // We need the path from smaller to larger.\n\n inclusion_proof.reverse();\n\n // A single leaf forms a full subtree with 2^0 = 1 leaf\n\n self.push_full_subtree(vec![leaf_data], hash_db)?;\n\n Ok(inclusion_proof)\n\n }\n\n\n\n /// Append multiple leaves\n\n pub fn extend(\n\n &mut self,\n\n mut leaves: Vec<D>,\n\n hash_db: &mut dyn HashDb<H>,\n\n ) -> Result<(), MerkleTreeError> {\n", "file_path": "src/compact_merkle_tree.rs", "rank": 88, "score": 17.003328313591744 }, { "content": " Ok(Self {\n\n depth,\n\n root: cur_hash,\n\n hasher,\n\n phantom: PhantomData,\n\n })\n\n }\n\n\n\n /// Create a new tree with a given root hash\n\n pub fn initialize_with_root_hash(hasher: MTH, depth: usize, root: H) -> Self {\n\n Self {\n\n depth,\n\n root,\n\n hasher,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Set the given `val` at the given leaf index `idx`\n\n pub fn update(\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 89, "score": 16.862002918510036 }, { "content": "\n\n path.remove(0);\n\n }\n\n\n\n match proof {\n\n Some(v) => {\n\n v.append(&mut proof_vec);\n\n }\n\n None => (),\n\n }\n\n\n\n Ok(cur_node)\n\n }\n\n\n\n pub fn verify_proof(\n\n &self,\n\n idx: &dyn LeafIndex,\n\n val: D,\n\n proof: Vec<(NodeType<H>, H)>,\n\n ) -> Result<bool, MerkleTreeError> {\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 90, "score": 16.845984894920136 }, { "content": "## Trees\n\n1. Vanilla sparse merkle trees.\n\nThese sparse merkle trees do not perform any optimizations to use the fact that most leaves are empty. They are useful when using SNARKs. \n\n2 variations, binary and 4-ary are present. `VanillaBinarySparseMerkleTree<D: Clone, H: Clone, MTH>` and \n\n`VanillaArity4SparseMerkleTree<D: Clone, H: Clone, MTH>`. The types `D`, `H` and `MTH` correspond to the types of data, hash\n\nand merkle tree hasher. Have a look at the tests for their usage.\n\n\n\n1. Sparse merkle tree\n\nThe have several optimizations over the vanilla ones. Only a binary sparse merkle tree is present for now `BinarySparseMerkleTree<D: Clone, H: Clone, MTH>`.\n\nThe types have the same meaning as for the vanilla tree. Look at the tests for usage.\n\n\n\n1. Merkle Patricia trie\n\nEthereum's merkle patricia trie . Apart from the hash function and storage, the node serialization is abstract as well `PatriciaTrieNodeSerializer`.\n\nThere is only one implementation of the serialization which is RLP (same as Ethereum) as of now. Look at the tests for usage.\n\n ```rust\n\n /// The type `V` is for the value of the data being stored in the trie.\n\n /// The type `H` is for the hash output\n\n /// The type `S` is for the serialized (node) output\n\n #[derive(Clone, Debug, Serialize, Deserialize)]\n\n pub struct MerklePatriciaTrie<V, H, S: Clone + KnownLength, NS, NH>\n\n where\n\n NS: PatriciaTrieNodeSerializer<H, V, S>,\n\n NH: NodeHasher<S, H>,\n\n {\n\n ....\n\n }\n\n ```\n\n1. Compact merkle tree.\n\nAppend only merkle tree used in Google's certificate transparency and Hyperledger Indy's ledger. \n\n`CompactMerkleTree<D: Clone, H: Clone, MTH> where MTH: Arity2Hasher<D, H>`\n\n\n\n## TODO\n\n1. Make each tree usable as a feature.\n", "file_path": "README.md", "rank": 91, "score": 16.733384573843367 }, { "content": " root,\n\n hasher,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Set the given `val` at the given leaf index `idx`\n\n pub fn update(\n\n &mut self,\n\n idx: &dyn LeafIndex,\n\n val: D,\n\n hash_db: &mut dyn HashValueDb<H, [H; 4]>,\n\n ) -> Result<(), MerkleTreeError> {\n\n // Find path to insert the new key\n\n let mut siblings_wrap = Some(Vec::<[H; 3]>::new());\n\n self.get(idx, &mut siblings_wrap, hash_db)?;\n\n let mut siblings = siblings_wrap.unwrap();\n\n\n\n let mut path = idx.to_leaf_path(4, self.depth);\n\n // Reverse since path was from root to leaf but i am going leaf to root\n", "file_path": "src/vanilla_sparse_merkle_tree.rs", "rank": 92, "score": 16.623597506311064 }, { "content": "use failure::{Backtrace, Context, Fail};\n\nuse std::fmt;\n\n\n\n#[derive(Clone, Eq, PartialEq, Debug, Fail)]\n\npub enum MerkleTreeErrorKind {\n\n /// Occurs when the hash is not found in the database. Relevant to databases implementing `HashValueDb`\n\n /// trait. The hash is usually the merkle tree hash\n\n #[fail(display = \"Expected to find hash {:?} in the database.\", hash)]\n\n HashNotFoundInDB { hash: Vec<u8> },\n\n\n\n #[fail(display = \"Expected to find leaf index {:?} in the database.\", index)]\n\n LeafIndexNotFoundInDB { index: u64 },\n\n\n\n #[fail(display = \"Expected to find node index {:?} in the database.\", index)]\n\n NodeIndexNotFoundInDB { index: u64 },\n\n\n\n #[fail(display = \"Incorrect flag {:?} for RLP node\", flag)]\n\n IncorrectFlagForRLPNode { flag: u8 },\n\n\n\n #[fail(display = \"Cannot deserialize using RLP. Error: {:?}\", msg)]\n", "file_path": "src/errors.rs", "rank": 93, "score": 16.619658553426955 }, { "content": " pub depth: usize,\n\n pub root: H,\n\n pub hasher: MTH,\n\n /// `empty_tree_hashes` contains the hashes of empty subtrees at each level.\n\n /// The 1st element is the root hash when all subtrees are empty and last element is the hash\n\n /// of the empty leaf\n\n pub empty_subtree_hashes: Vec<H>,\n\n pub phantom: PhantomData<D>,\n\n}\n\n\n\nimpl<D: Clone, H: Clone + PartialEq, MTH> BinarySparseMerkleTree<D, H, MTH>\n\nwhere\n\n MTH: Arity2Hasher<D, H>,\n\n{\n\n pub fn new(\n\n empty_leaf_val: D,\n\n hasher: MTH,\n\n depth: usize,\n\n ) -> Result<BinarySparseMerkleTree<D, H, MTH>, MerkleTreeError> {\n\n assert!(depth > 0);\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 94, "score": 16.445055678886632 }, { "content": "\n\n #[fail(display = \"New root does not match the root calculated from consistency proof\")]\n\n InconsistentNewRoot,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MerkleTreeError {\n\n inner: Context<MerkleTreeErrorKind>,\n\n}\n\n\n\nimpl MerkleTreeError {\n\n pub fn kind(&self) -> MerkleTreeErrorKind {\n\n self.inner.get_context().clone()\n\n }\n\n\n\n pub fn from_kind(kind: MerkleTreeErrorKind) -> Self {\n\n Self {\n\n inner: Context::new(\"\").context(kind),\n\n }\n\n }\n", "file_path": "src/errors.rs", "rank": 95, "score": 16.437646043003344 }, { "content": " (\n\n self.update_one_val_subtree(\n\n path_for_new_key,\n\n val_for_new_key,\n\n path_for_old_key,\n\n val_for_old_key,\n\n depth + 1,\n\n hash_db,\n\n )?,\n\n self.empty_subtree_hashes[depth + 1].clone(),\n\n )\n\n }\n\n }\n\n };\n\n let root = self.hasher.hash_tree_nodes(left.clone(), right.clone())?;\n\n hash_db.put(root.clone(), (NodeType::SubtreeHash(left), right))?;\n\n Ok(root)\n\n }\n\n\n\n /// Make a root hash of a (sub)tree with a single key/value pair from empty tree\n", "file_path": "src/sparse_merkle_tree.rs", "rank": 96, "score": 16.2544139553291 }, { "content": " fn add_leaf(&mut self, leaf_hash: H) -> Result<(), MerkleTreeError> {\n\n self.leaves.push(leaf_hash);\n\n Ok(())\n\n }\n\n\n\n fn add_full_subtree_root(&mut self, node_hash: H) -> Result<(), MerkleTreeError> {\n\n self.nodes.push(node_hash);\n\n Ok(())\n\n }\n\n\n\n fn get_leaf(&self, leaf_index: TreeSizeType) -> Result<H, MerkleTreeError> {\n\n let i = leaf_index as usize;\n\n if i >= self.leaves.len() {\n\n Err(MerkleTreeError::from_kind(\n\n MerkleTreeErrorKind::LeafIndexNotFoundInDB { index: i as u64 },\n\n ))\n\n } else {\n\n Ok(self.leaves[i].clone())\n\n }\n\n }\n", "file_path": "src/compact_merkle_tree.rs", "rank": 97, "score": 16.222917963379878 }, { "content": " }\n\n\n\n // Since the keys are in nibbles, convert them to a key using the passed function after\n\n // adding the prefix\n\n Ok(nv\n\n .into_iter()\n\n .map(|(mut n, v)| {\n\n let mut key = seen_prefix_nibbles.clone();\n\n key.append(&mut n);\n\n (nibbles_to_key(&key), v)\n\n })\n\n .collect::<Vec<(K, V)>>())\n\n }\n\n\n\n pub fn verify_proof_multiple_keys(\n\n keys: Vec<&dyn Key>,\n\n values: &[V],\n\n proof: Vec<NodeType<H, V>>,\n\n hasher: NH,\n\n node_serializer: NS,\n", "file_path": "src/merkle_patricia_trie.rs", "rank": 98, "score": 16.182884131270978 }, { "content": " // Hash the roots of subtrees, starting from the root of the lower subtree.\n\n // In case of a single subtree (a full tree), the root of the subtree will be the root of\n\n // the full tree\n\n let mut cur_root = self.full_subtree_roots[self.full_subtree_roots.len() - 1].clone();\n\n for i in (0..self.full_subtree_roots.len() - 1).rev() {\n\n cur_root = self\n\n .hasher\n\n .hash_tree_nodes(self.full_subtree_roots[i].clone(), cur_root)?;\n\n }\n\n Ok(cur_root)\n\n }\n\n\n\n /// Get a proof that the leaf at index `leaf_index` is present in the current tree. Called `audit path` in RFC 6982\n\n pub fn get_leaf_inclusion_proof(\n\n &self,\n\n leaf_index: TreeSizeType,\n\n hash_db: &dyn HashDb<H>,\n\n ) -> Result<Vec<H>, MerkleTreeError> {\n\n Self::get_leaf_inclusion_proof_for_tree_size(&self.hasher, leaf_index, self.size, hash_db)\n\n }\n", "file_path": "src/compact_merkle_tree.rs", "rank": 99, "score": 15.996286334782873 } ]
Rust
crates/volta-core/src/tool/node/resolve.rs
gregjopa/volta
18f6b061d9fe5205010291f586518b93533a2f6f
use std::fs::File; use std::io::Write; use std::str::FromStr; use std::time::{Duration, SystemTime}; use super::super::registry_fetch_error; use super::metadata::{NodeEntry, NodeIndex, RawNodeIndex}; use crate::error::{Context, ErrorKind, Fallible}; use crate::fs::{create_staging_file, read_file}; use crate::hook::ToolHooks; use crate::layout::volta_home; use crate::session::Session; use crate::style::progress_spinner; use crate::tool::Node; use crate::version::{VersionSpec, VersionTag}; use attohttpc::Response; use cfg_if::cfg_if; use fs_utils::ensure_containing_dir_exists; use hyperx::header::{CacheControl, CacheDirective, Expires, HttpDate, TypedHeaders}; use log::debug; use semver::{Version, VersionReq}; cfg_if! { if #[cfg(feature = "mock-network")] { fn public_node_version_index() -> String { format!("{}/node-dist/index.json", mockito::SERVER_URL) } } else { fn public_node_version_index() -> String { "https://nodejs.org/dist/index.json".to_string() } } } pub fn resolve(matching: VersionSpec, session: &mut Session) -> Fallible<Version> { let hooks = session.hooks()?.node(); match matching { VersionSpec::Semver(requirement) => resolve_semver(requirement, hooks), VersionSpec::Exact(version) => Ok(version), VersionSpec::None | VersionSpec::Tag(VersionTag::Lts) => resolve_lts(hooks), VersionSpec::Tag(VersionTag::Latest) => resolve_latest(hooks), VersionSpec::Tag(VersionTag::LtsRequirement(req)) => resolve_lts_semver(req, hooks), VersionSpec::Tag(VersionTag::Custom(tag)) => { Err(ErrorKind::NodeVersionNotFound { matching: tag }.into()) } } } fn resolve_latest(hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { latest: Some(ref hook), .. }) => { debug!("Using node.latest hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let version_opt = match_node_version(&url, |_| true)?; match version_opt { Some(version) => { debug!("Found latest node version ({}) from {}", version, url); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: "latest".into(), } .into()), } } fn resolve_lts(hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { index: Some(ref hook), .. }) => { debug!("Using node.index hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let version_opt = match_node_version(&url, |&NodeEntry { lts, .. }| lts)?; match version_opt { Some(version) => { debug!("Found newest LTS node version ({}) from {}", version, url); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: "lts".into(), } .into()), } } fn resolve_semver(matching: VersionReq, hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { index: Some(ref hook), .. }) => { debug!("Using node.index hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let version_opt = match_node_version(&url, |NodeEntry { version, .. }| matching.matches(version))?; match version_opt { Some(version) => { debug!( "Found node@{} matching requirement '{}' from {}", version, matching, url ); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: matching.to_string(), } .into()), } } fn resolve_lts_semver(matching: VersionReq, hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { index: Some(ref hook), .. }) => { debug!("Using node.index hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let first_pass = match_node_version( &url, |&NodeEntry { ref version, lts, .. }| { lts && matching.matches(version) }, )?; match first_pass { Some(version) => { debug!( "Found LTS node@{} matching requirement '{}' from {}", version, matching, url ); return Ok(version); } None => debug!( "No LTS version found matching requirement '{}', checking for non-LTS", matching ), }; match match_node_version(&url, |NodeEntry { version, .. }| matching.matches(version))? { Some(version) => { debug!( "Found non-LTS node@{} matching requirement '{}' from {}", version, matching, url ); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: matching.to_string(), } .into()), } } fn match_node_version( url: &str, predicate: impl Fn(&NodeEntry) -> bool, ) -> Fallible<Option<Version>> { let index: NodeIndex = resolve_node_versions(url)?.into(); let mut entries = index.entries.into_iter(); Ok(entries .find(predicate) .map(|NodeEntry { version, .. }| version)) } fn read_cached_opt(url: &str) -> Fallible<Option<RawNodeIndex>> { let expiry_file = volta_home()?.node_index_expiry_file(); let expiry = read_file(&expiry_file).with_context(|| ErrorKind::ReadNodeIndexExpiryError { file: expiry_file.to_owned(), })?; if let Some(date) = expiry { let expiry_date = HttpDate::from_str(&date).with_context(|| ErrorKind::ParseNodeIndexExpiryError)?; let current_date = HttpDate::from(SystemTime::now()); if current_date < expiry_date { let index_file = volta_home()?.node_index_file(); let cached = read_file(&index_file).with_context(|| ErrorKind::ReadNodeIndexCacheError { file: index_file.to_owned(), })?; if let Some(content) = cached { if content.starts_with(url) { return serde_json::de::from_str(&content[url.len()..]) .with_context(|| ErrorKind::ParseNodeIndexCacheError); } } } } Ok(None) } fn max_age(headers: &attohttpc::header::HeaderMap) -> u32 { if let Ok(cache_control_header) = headers.decode::<CacheControl>() { for cache_directive in cache_control_header.iter() { if let CacheDirective::MaxAge(max_age) = cache_directive { return *max_age; } } } 4 * 60 * 60 } fn resolve_node_versions(url: &str) -> Fallible<RawNodeIndex> { match read_cached_opt(url)? { Some(serial) => { debug!("Found valid cache of Node version index"); Ok(serial) } None => { debug!("Node index cache was not found or was invalid"); let spinner = progress_spinner(&format!("Fetching public registry: {}", url)); let (_, headers, response) = attohttpc::get(url) .send() .and_then(Response::error_for_status) .with_context(registry_fetch_error("Node", url))? .split(); let response_text = response .text() .with_context(registry_fetch_error("Node", url))?; let index: RawNodeIndex = serde_json::de::from_str(&response_text).with_context(|| { ErrorKind::ParseNodeIndexError { from_url: url.to_string(), } })?; let cached = create_staging_file()?; let mut cached_file: &File = cached.as_file(); writeln!(cached_file, "{}", url) .and_then(|_| cached_file.write(response_text.as_bytes())) .with_context(|| ErrorKind::WriteNodeIndexCacheError { file: cached.path().to_path_buf(), })?; let index_cache_file = volta_home()?.node_index_file(); ensure_containing_dir_exists(&index_cache_file).with_context(|| { ErrorKind::ContainingDirError { path: index_cache_file.to_owned(), } })?; cached.persist(&index_cache_file).with_context(|| { ErrorKind::WriteNodeIndexCacheError { file: index_cache_file.to_owned(), } })?; let expiry = create_staging_file()?; let mut expiry_file: &File = expiry.as_file(); let result = if let Ok(expires_header) = headers.decode::<Expires>() { write!(expiry_file, "{}", expires_header) } else { let expiry_date = SystemTime::now() + Duration::from_secs(max_age(&headers).into()); write!(expiry_file, "{}", HttpDate::from(expiry_date)) }; result.with_context(|| ErrorKind::WriteNodeIndexExpiryError { file: expiry.path().to_path_buf(), })?; let index_expiry_file = volta_home()?.node_index_expiry_file(); ensure_containing_dir_exists(&index_expiry_file).with_context(|| { ErrorKind::ContainingDirError { path: index_expiry_file.to_owned(), } })?; expiry.persist(&index_expiry_file).with_context(|| { ErrorKind::WriteNodeIndexExpiryError { file: index_expiry_file.to_owned(), } })?; spinner.finish_and_clear(); Ok(index) } } }
use std::fs::File; use std::io::Write; use std::str::FromStr; use std::time::{Duration, SystemTime}; use super::super::registry_fetch_error; use super::metadata::{NodeEntry, NodeIndex, RawNodeIndex}; use crate::error::{Context, ErrorKind, Fallible}; use crate::fs::{create_staging_file, read_file}; use crate::hook::ToolHooks; use crate::layout::volta_home; use crate::session::Session; use crate::style::progress_spinner; use crate::tool::Node; use crate::version::{VersionSpec, VersionTag}; use attohttpc::Response; use cfg_if::cfg_if; use fs_utils::ensure_containing_dir_exists; use hyperx::header::{CacheControl, CacheDirective, Expires, HttpDate, TypedHeaders}; use log::debug; use semver::{Version, VersionReq}; cfg_if! { if #[cfg(feature = "mock-network")] { fn public_node_version_index() -> String { format!("{}/node-dist/index.json", mockit
let index: NodeIndex = resolve_node_versions(url)?.into(); let mut entries = index.entries.into_iter(); Ok(entries .find(predicate) .map(|NodeEntry { version, .. }| version)) } fn read_cached_opt(url: &str) -> Fallible<Option<RawNodeIndex>> { let expiry_file = volta_home()?.node_index_expiry_file(); let expiry = read_file(&expiry_file).with_context(|| ErrorKind::ReadNodeIndexExpiryError { file: expiry_file.to_owned(), })?; if let Some(date) = expiry { let expiry_date = HttpDate::from_str(&date).with_context(|| ErrorKind::ParseNodeIndexExpiryError)?; let current_date = HttpDate::from(SystemTime::now()); if current_date < expiry_date { let index_file = volta_home()?.node_index_file(); let cached = read_file(&index_file).with_context(|| ErrorKind::ReadNodeIndexCacheError { file: index_file.to_owned(), })?; if let Some(content) = cached { if content.starts_with(url) { return serde_json::de::from_str(&content[url.len()..]) .with_context(|| ErrorKind::ParseNodeIndexCacheError); } } } } Ok(None) } fn max_age(headers: &attohttpc::header::HeaderMap) -> u32 { if let Ok(cache_control_header) = headers.decode::<CacheControl>() { for cache_directive in cache_control_header.iter() { if let CacheDirective::MaxAge(max_age) = cache_directive { return *max_age; } } } 4 * 60 * 60 } fn resolve_node_versions(url: &str) -> Fallible<RawNodeIndex> { match read_cached_opt(url)? { Some(serial) => { debug!("Found valid cache of Node version index"); Ok(serial) } None => { debug!("Node index cache was not found or was invalid"); let spinner = progress_spinner(&format!("Fetching public registry: {}", url)); let (_, headers, response) = attohttpc::get(url) .send() .and_then(Response::error_for_status) .with_context(registry_fetch_error("Node", url))? .split(); let response_text = response .text() .with_context(registry_fetch_error("Node", url))?; let index: RawNodeIndex = serde_json::de::from_str(&response_text).with_context(|| { ErrorKind::ParseNodeIndexError { from_url: url.to_string(), } })?; let cached = create_staging_file()?; let mut cached_file: &File = cached.as_file(); writeln!(cached_file, "{}", url) .and_then(|_| cached_file.write(response_text.as_bytes())) .with_context(|| ErrorKind::WriteNodeIndexCacheError { file: cached.path().to_path_buf(), })?; let index_cache_file = volta_home()?.node_index_file(); ensure_containing_dir_exists(&index_cache_file).with_context(|| { ErrorKind::ContainingDirError { path: index_cache_file.to_owned(), } })?; cached.persist(&index_cache_file).with_context(|| { ErrorKind::WriteNodeIndexCacheError { file: index_cache_file.to_owned(), } })?; let expiry = create_staging_file()?; let mut expiry_file: &File = expiry.as_file(); let result = if let Ok(expires_header) = headers.decode::<Expires>() { write!(expiry_file, "{}", expires_header) } else { let expiry_date = SystemTime::now() + Duration::from_secs(max_age(&headers).into()); write!(expiry_file, "{}", HttpDate::from(expiry_date)) }; result.with_context(|| ErrorKind::WriteNodeIndexExpiryError { file: expiry.path().to_path_buf(), })?; let index_expiry_file = volta_home()?.node_index_expiry_file(); ensure_containing_dir_exists(&index_expiry_file).with_context(|| { ErrorKind::ContainingDirError { path: index_expiry_file.to_owned(), } })?; expiry.persist(&index_expiry_file).with_context(|| { ErrorKind::WriteNodeIndexExpiryError { file: index_expiry_file.to_owned(), } })?; spinner.finish_and_clear(); Ok(index) } } }
o::SERVER_URL) } } else { fn public_node_version_index() -> String { "https://nodejs.org/dist/index.json".to_string() } } } pub fn resolve(matching: VersionSpec, session: &mut Session) -> Fallible<Version> { let hooks = session.hooks()?.node(); match matching { VersionSpec::Semver(requirement) => resolve_semver(requirement, hooks), VersionSpec::Exact(version) => Ok(version), VersionSpec::None | VersionSpec::Tag(VersionTag::Lts) => resolve_lts(hooks), VersionSpec::Tag(VersionTag::Latest) => resolve_latest(hooks), VersionSpec::Tag(VersionTag::LtsRequirement(req)) => resolve_lts_semver(req, hooks), VersionSpec::Tag(VersionTag::Custom(tag)) => { Err(ErrorKind::NodeVersionNotFound { matching: tag }.into()) } } } fn resolve_latest(hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { latest: Some(ref hook), .. }) => { debug!("Using node.latest hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let version_opt = match_node_version(&url, |_| true)?; match version_opt { Some(version) => { debug!("Found latest node version ({}) from {}", version, url); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: "latest".into(), } .into()), } } fn resolve_lts(hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { index: Some(ref hook), .. }) => { debug!("Using node.index hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let version_opt = match_node_version(&url, |&NodeEntry { lts, .. }| lts)?; match version_opt { Some(version) => { debug!("Found newest LTS node version ({}) from {}", version, url); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: "lts".into(), } .into()), } } fn resolve_semver(matching: VersionReq, hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { index: Some(ref hook), .. }) => { debug!("Using node.index hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let version_opt = match_node_version(&url, |NodeEntry { version, .. }| matching.matches(version))?; match version_opt { Some(version) => { debug!( "Found node@{} matching requirement '{}' from {}", version, matching, url ); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: matching.to_string(), } .into()), } } fn resolve_lts_semver(matching: VersionReq, hooks: Option<&ToolHooks<Node>>) -> Fallible<Version> { let url = match hooks { Some(&ToolHooks { index: Some(ref hook), .. }) => { debug!("Using node.index hook to determine node index URL"); hook.resolve("index.json")? } _ => public_node_version_index(), }; let first_pass = match_node_version( &url, |&NodeEntry { ref version, lts, .. }| { lts && matching.matches(version) }, )?; match first_pass { Some(version) => { debug!( "Found LTS node@{} matching requirement '{}' from {}", version, matching, url ); return Ok(version); } None => debug!( "No LTS version found matching requirement '{}', checking for non-LTS", matching ), }; match match_node_version(&url, |NodeEntry { version, .. }| matching.matches(version))? { Some(version) => { debug!( "Found non-LTS node@{} matching requirement '{}' from {}", version, matching, url ); Ok(version) } None => Err(ErrorKind::NodeVersionNotFound { matching: matching.to_string(), } .into()), } } fn match_node_version( url: &str, predicate: impl Fn(&NodeEntry) -> bool, ) -> Fallible<Option<Version>> {
random
[ { "content": "fn resolve_semver_legacy(matching: VersionReq, url: String) -> Fallible<Version> {\n\n let spinner = progress_spinner(&format!(\"Fetching public registry: {}\", url));\n\n let releases: RawYarnIndex = attohttpc::get(&url)\n\n .send()\n\n .and_then(Response::error_for_status)\n\n .and_then(Response::json)\n\n .with_context(registry_fetch_error(\"Yarn\", &url))?;\n\n let index = YarnIndex::from(releases);\n\n let releases = index.entries;\n\n spinner.finish_and_clear();\n\n let version_opt = releases.into_iter().rev().find(|v| matching.matches(v));\n\n\n\n match version_opt {\n\n Some(version) => {\n\n debug!(\n\n \"Found yarn@{} matching requirement '{}' from {}\",\n\n version, matching, url\n\n );\n\n Ok(version)\n\n }\n\n None => Err(ErrorKind::YarnVersionNotFound {\n\n matching: matching.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 0, "score": 184880.9049596996 }, { "content": "/// Read a fetched package and generate a map of all the bins it provides\n\nfn read_bins(name: &str, version: &Version) -> Fallible<HashMap<String, String>> {\n\n let image_dir = volta_home()?.package_image_dir(&name, &version.to_string());\n\n let pkg_info = BinManifest::for_dir(&image_dir)?;\n\n let bin_map = pkg_info.bin;\n\n if bin_map.is_empty() {\n\n return Err(ErrorKind::NoPackageExecutables.into());\n\n }\n\n\n\n for (bin_name, _bin_path) in bin_map.iter() {\n\n // check for conflicts with installed bins\n\n // some packages may install bins with the same name\n\n let bin_config_file = volta_home()?.default_tool_bin_config(&bin_name);\n\n if bin_config_file.exists() {\n\n let bin_config = BinConfig::from_file(bin_config_file)?;\n\n // if the bin was installed by the package that is currently being installed,\n\n // that's ok - otherwise it's an error\n\n if name != bin_config.package {\n\n return Err(ErrorKind::BinaryAlreadyInstalled {\n\n bin_name: bin_name.clone(),\n\n existing_package: bin_config.package,\n\n new_package: name.to_string(),\n\n }\n\n .into());\n\n }\n\n }\n\n }\n\n\n\n Ok(bin_map)\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/package/install.rs", "rank": 1, "score": 171799.26771482287 }, { "content": "fn fetch_yarn_index() -> Fallible<(String, PackageIndex)> {\n\n let url = public_registry_index(\"yarn\");\n\n let spinner = progress_spinner(&format!(\"Fetching public registry: {}\", url));\n\n let metadata: RawPackageMetadata = attohttpc::get(&url)\n\n .header(ACCEPT, NPM_ABBREVIATED_ACCEPT_HEADER)\n\n .send()\n\n .and_then(Response::error_for_status)\n\n .and_then(Response::json)\n\n .with_context(registry_fetch_error(\"Yarn\", &url))?;\n\n\n\n spinner.finish_and_clear();\n\n Ok((url, metadata.into()))\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 2, "score": 169537.19480155525 }, { "content": "/// Execute a shell command and return the trimmed stdout from that command\n\nfn execute_binary(bin: &str, base_path: &Path, extra_arg: Option<String>) -> Fallible<String> {\n\n let mut trimmed = bin.trim().to_string();\n\n let mut words = trimmed.parse_cmdline_words();\n\n let cmd = match words.next() {\n\n Some(word) => {\n\n // Treat any path that starts with a './' or '../' as a relative path (using OS separator)\n\n if word.starts_with(REL_PATH.as_str()) || word.starts_with(REL_PATH_PARENT.as_str()) {\n\n canonicalize(base_path.join(word)).with_context(|| ErrorKind::HookPathError {\n\n command: String::from(word),\n\n })?\n\n } else {\n\n PathBuf::from(word)\n\n }\n\n }\n\n None => {\n\n return Err(ErrorKind::InvalidHookCommand {\n\n command: String::from(bin.trim()),\n\n }\n\n .into())\n\n }\n", "file_path": "crates/volta-core/src/hook/tool.rs", "rank": 3, "score": 168317.95626673184 }, { "content": "fn resolve_latest_legacy(url: String) -> Fallible<Version> {\n\n let response_text = attohttpc::get(&url)\n\n .send()\n\n .and_then(Response::error_for_status)\n\n .and_then(Response::text)\n\n .with_context(|| ErrorKind::YarnLatestFetchError {\n\n from_url: url.clone(),\n\n })?;\n\n\n\n debug!(\"Found yarn latest version ({}) from {}\", response_text, url);\n\n parse_version(response_text)\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 4, "score": 166642.13689306594 }, { "content": "fn resolve_custom_tag(tag: String) -> Fallible<Version> {\n\n let (url, mut index) = fetch_yarn_index()?;\n\n\n\n match index.tags.remove(&tag) {\n\n Some(version) => {\n\n debug!(\"Found yarn@{} matching tag '{}' from {}\", version, tag, url);\n\n Ok(version)\n\n }\n\n None => Err(ErrorKind::YarnVersionNotFound { matching: tag }.into()),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 5, "score": 166642.13689306594 }, { "content": "/// Reads the contents of a directory and returns a Vec containing the names of\n\n/// all the binaries installed by the input package.\n\nfn binaries_from_package(package: &str) -> Fallible<Vec<String>> {\n\n let bin_config_dir = volta_home()?.default_bin_dir();\n\n if bin_config_dir.exists() {\n\n dir_entry_match(&bin_config_dir, |entry| {\n\n let path = entry.path();\n\n if let Ok(config) = BinConfig::from_file(path) {\n\n if config.package == package {\n\n return Some(config.name);\n\n }\n\n };\n\n None\n\n })\n\n .with_context(|| ErrorKind::ReadBinConfigDirError {\n\n dir: bin_config_dir.to_owned(),\n\n })\n\n } else {\n\n Ok(vec![])\n\n }\n\n}\n", "file_path": "crates/volta-core/src/tool/package/mod.rs", "rank": 6, "score": 164035.9428557119 }, { "content": "fn get_shim_list_deduped(dir: &Path) -> Fallible<HashSet<String>> {\n\n let contents = read_dir_eager(dir).with_context(|| ErrorKind::ReadDirError {\n\n dir: dir.to_owned(),\n\n })?;\n\n\n\n #[cfg(unix)]\n\n {\n\n let mut shims: HashSet<String> = contents.filter_map(entry_to_shim_name).collect();\n\n shims.insert(\"node\".into());\n\n shims.insert(\"npm\".into());\n\n shims.insert(\"npx\".into());\n\n shims.insert(\"yarn\".into());\n\n Ok(shims)\n\n }\n\n\n\n #[cfg(windows)]\n\n {\n\n // On Windows, the default shims are installed in Program Files, so we don't need to generate them here\n\n Ok(contents.filter_map(entry_to_shim_name).collect())\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/shim.rs", "rank": 7, "score": 161997.3281913379 }, { "content": "fn fetch_npm_index(hooks: Option<&ToolHooks<Npm>>) -> Fallible<(String, PackageIndex)> {\n\n let url = match hooks {\n\n Some(&ToolHooks {\n\n index: Some(ref hook),\n\n ..\n\n }) => {\n\n debug!(\"Using npm.index hook to determine npm index URL\");\n\n hook.resolve(\"npm\")?\n\n }\n\n _ => public_registry_index(\"npm\"),\n\n };\n\n\n\n let spinner = progress_spinner(&format!(\"Fetching public registry: {}\", url));\n\n let metadata: RawPackageMetadata = attohttpc::get(&url)\n\n .header(ACCEPT, NPM_ABBREVIATED_ACCEPT_HEADER)\n\n .send()\n\n .and_then(Response::error_for_status)\n\n .and_then(Response::json)\n\n .with_context(registry_fetch_error(\"npm\", &url))?;\n\n\n\n spinner.finish_and_clear();\n\n Ok((url, metadata.into()))\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/npm/resolve.rs", "rank": 8, "score": 150018.0361030553 }, { "content": "fn workspace_hooks_json() -> String {\n\n format!(\n\n r#\"\n\n{{\n\n \"npm\": {{\n\n \"distro\": {{\n\n \"template\": \"{0}/hook/workspace/npm/{{{{version}}}}\"\n\n }}\n\n }},\n\n \"yarn\": {{\n\n \"distro\": {{\n\n \"template\": \"{0}/hook/workspace/yarn/{{{{version}}}}\"\n\n }}\n\n }}\n\n}}\"#,\n\n mockito::SERVER_URL\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/hooks.rs", "rank": 9, "score": 148730.43799568102 }, { "content": "fn project_hooks_json() -> String {\n\n format!(\n\n r#\"\n\n{{\n\n \"yarn\": {{\n\n \"distro\": {{\n\n \"template\": \"{0}/hook/project/yarn/{{{{version}}}}\"\n\n }}\n\n }}\n\n}}\"#,\n\n mockito::SERVER_URL\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/hooks.rs", "rank": 10, "score": 148730.43799568102 }, { "content": "fn default_hooks_json() -> String {\n\n format!(\n\n r#\"\n\n{{\n\n \"node\": {{\n\n \"distro\": {{\n\n \"template\": \"{}/hook/default/node/{{{{version}}}}\"\n\n }}\n\n }},\n\n \"npm\": {{\n\n \"distro\": {{\n\n \"template\": \"{0}/hook/default/npm/{{{{version}}}}\"\n\n }}\n\n }},\n\n \"yarn\": {{\n\n \"distro\": {{\n\n \"template\": \"{0}/hook/default/yarn/{{{{version}}}}\"\n\n }}\n\n }}\n\n}}\"#,\n\n mockito::SERVER_URL\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/hooks.rs", "rank": 11, "score": 148730.43799568102 }, { "content": "fn yarn_hooks_json() -> String {\n\n format!(\n\n r#\"\n\n{{\n\n \"yarn\": {{\n\n \"latest\": {{\n\n \"template\": \"{0}/yarn-old/latest\"\n\n }},\n\n \"index\": {{\n\n \"template\": \"{0}/yarn-old/index\"\n\n }}\n\n }}\n\n}}\"#,\n\n mockito::SERVER_URL\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/hooks.rs", "rank": 12, "score": 148730.43799568102 }, { "content": "/// Determine the remote URL to download from, using the hooks if avaialble\n\nfn determine_remote_url(version: &Version, hooks: Option<&ToolHooks<Npm>>) -> Fallible<String> {\n\n let version_str = version.to_string();\n\n match hooks {\n\n Some(&ToolHooks {\n\n distro: Some(ref hook),\n\n ..\n\n }) => {\n\n debug!(\"Using npm.distro hook to determine download URL\");\n\n let distro_file_name = Npm::archive_filename(&version_str);\n\n hook.resolve(&version, &distro_file_name)\n\n }\n\n _ => Ok(public_registry_package(\"npm\", &version_str)),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/npm/fetch.rs", "rank": 13, "score": 148064.47577180507 }, { "content": "/// Determine the remote URL to download from, using the hooks if available\n\nfn determine_remote_url(version: &Version, hooks: Option<&ToolHooks<Yarn>>) -> Fallible<String> {\n\n let version_str = version.to_string();\n\n match hooks {\n\n Some(&ToolHooks {\n\n distro: Some(ref hook),\n\n ..\n\n }) => {\n\n debug!(\"Using yarn.distro hook to determine download URL\");\n\n let distro_file_name = Yarn::archive_filename(&version_str);\n\n hook.resolve(&version, &distro_file_name)\n\n }\n\n _ => Ok(public_registry_package(\"yarn\", &version_str)),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/fetch.rs", "rank": 14, "score": 148064.47577180507 }, { "content": "/// Determine the remote URL to download from, using the hooks if available\n\nfn determine_remote_url(version: &Version, hooks: Option<&ToolHooks<Node>>) -> Fallible<String> {\n\n let version_str = version.to_string();\n\n let distro_file_name = Node::archive_filename(&version_str);\n\n match hooks {\n\n Some(&ToolHooks {\n\n distro: Some(ref hook),\n\n ..\n\n }) => {\n\n debug!(\"Using node.distro hook to determine download URL\");\n\n hook.resolve(&version, &distro_file_name)\n\n }\n\n _ => Ok(format!(\n\n \"{}/v{}/{}\",\n\n public_node_server_root(),\n\n version,\n\n distro_file_name\n\n )),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/node/fetch.rs", "rank": 15, "score": 148064.47577180507 }, { "content": "/// Combines all the arguments into a single String\n\nfn collect_arguments() -> String {\n\n // The Debug formatter for OsString properly quotes and escapes each value\n\n args_os()\n\n .map(|arg| format!(\"{:?}\", arg))\n\n .collect::<Vec<String>>()\n\n .join(\" \")\n\n}\n", "file_path": "crates/volta-core/src/error/reporter.rs", "rank": 16, "score": 145274.6639477624 }, { "content": "fn read_file_to_string(file_path: PathBuf) -> String {\n\n let mut contents = String::new();\n\n let mut file = ok_or_panic! { File::open(file_path) };\n\n ok_or_panic! { file.read_to_string(&mut contents) };\n\n contents\n\n}\n", "file_path": "tests/acceptance/support/sandbox.rs", "rank": 17, "score": 144818.6951283982 }, { "content": "fn read_file_to_string(file_path: PathBuf) -> String {\n\n let mut contents = String::new();\n\n let mut file = ok_or_panic! { File::open(file_path) };\n\n ok_or_panic! { file.read_to_string(&mut contents) };\n\n contents\n\n}\n", "file_path": "tests/smoke/support/temp_project.rs", "rank": 18, "score": 143500.3676767373 }, { "content": "fn done(warnings: Vec<String>, errors: Vec<String>) -> Validity {\n\n match (warnings.len(), errors.len()) {\n\n (0, 0) => Validity::Valid,\n\n (_, 0) => Validity::ValidForOldPackages { warnings },\n\n (_, _) => Validity::Invalid { warnings, errors },\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn traditional() {\n\n assert_eq!(validate(\"some-package\"), Validity::Valid);\n\n assert_eq!(validate(\"example.com\"), Validity::Valid);\n\n assert_eq!(validate(\"under_score\"), Validity::Valid);\n\n assert_eq!(validate(\"period.js\"), Validity::Valid);\n\n assert_eq!(validate(\"123numeric\"), Validity::Valid);\n\n assert_eq!(\n", "file_path": "crates/validate-npm-package-name/src/lib.rs", "rank": 19, "score": 141140.76812465087 }, { "content": "fn package_source(package: &Package) -> String {\n\n match package {\n\n Package::Default { .. } => String::from(\" (default)\"),\n\n Package::Project { path, .. } => format!(\" (current @ {})\", path.display()),\n\n Package::Fetched(..) => String::new(),\n\n }\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 20, "score": 138396.91491873632 }, { "content": "/// Format a single `Toolchain::Package` and its associated tools.\n\nfn format_package(package: &Package) -> String {\n\n match package {\n\n Package::Default {\n\n details,\n\n node,\n\n tools,\n\n ..\n\n } => {\n\n let tools = match tools.len() {\n\n 0 => String::from(\"\"),\n\n _ => tools.join(\", \"),\n\n };\n\n\n\n let version = format!(\"{}{}\", details.version, list_package_source(package));\n\n let binaries = WRAPPER.fill(&format!(\"binary tools: {}\", tools));\n\n let platform_detail = WRAPPER.fill(&format!(\n\n \"runtime: {}\\npackage manager: {}\",\n\n tool_version(\"node\", &node),\n\n // TODO: Should be updated when we support installing with custom package_managers,\n\n // whether Yarn or non-built-in versions of npm\n", "file_path": "src/command/list/human.rs", "rank": 21, "score": 138396.91491873632 }, { "content": "/// Format a set of `Toolchain::Node`s.\n\nfn display_node(runtimes: &[Node]) -> String {\n\n if runtimes.is_empty() {\n\n NO_RUNTIME.to_string()\n\n } else {\n\n format!(\n\n \"⚡️ Node runtimes in your toolchain:\\n\\n{}\",\n\n format_runtime_list(&runtimes)\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 22, "score": 138396.91491873632 }, { "content": "fn display_package(package: &Package) -> String {\n\n match package {\n\n Package::Default {\n\n details,\n\n node,\n\n tools,\n\n ..\n\n } => {\n\n let tools = match tools.len() {\n\n 0 => String::from(\" \"),\n\n _ => format!(\" {} \", tools.join(\", \")),\n\n };\n\n\n\n format!(\n\n \"package {} /{}/ {} {}{}\",\n\n tool_version(&details.name, &details.version),\n\n tools,\n\n tool_version(\"node\", &node),\n\n // Should be updated when we support installing with custom package_managers,\n\n // whether Yarn or non-built-in versions of npm\n", "file_path": "src/command/list/plain.rs", "rank": 23, "score": 138396.91491873632 }, { "content": "/// Format a single `Toolchain::Package` without detail information\n\nfn format_tool(package: &Package) -> String {\n\n match package {\n\n Package::Default { tools, .. } | Package::Project { tools, .. } => {\n\n let tools = match tools.len() {\n\n 0 => String::from(\"\"),\n\n _ => tools.join(\", \"),\n\n };\n\n WRAPPER.fill(&format!(\"{}{}\", tools, list_package_source(package)))\n\n }\n\n Package::Fetched(..) => String::new(),\n\n }\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 24, "score": 138396.91491873632 }, { "content": "fn bin_config(name: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"{}\",\n\n \"package\": \"cowsay\",\n\n \"version\": \"1.4.0\",\n\n \"path\": \"./cli.js\",\n\n \"platform\": {{\n\n \"node\": {{\n\n \"runtime\": \"11.10.1\",\n\n \"npm\": \"6.7.0\"\n\n }},\n\n \"yarn\": null\n\n }}\n\n}}\"#,\n\n name\n\n )\n\n}\n\n\n\nconst VOLTA_LOGLEVEL: &str = \"VOLTA_LOGLEVEL\";\n\n\n", "file_path": "tests/acceptance/volta_uninstall.rs", "rank": 25, "score": 138396.91491873632 }, { "content": "/// Format a set of `Toolchain::Package`s and their associated tools.\n\nfn display_packages(packages: &[Package]) -> String {\n\n if packages.is_empty() {\n\n String::from(\n\n \"⚡️ No tools or packages installed.\n\n\n\nYou can safely install packages by running `volta install <package name>`.\n\nSee `volta help install` for details and more options.\",\n\n )\n\n } else {\n\n format!(\n\n \"⚡️ Package versions in your toolchain:\\n\\n{}\",\n\n format_package_list(packages)\n\n )\n\n }\n\n}\n\n\n\n/// Format a single `Toolchain::Tool` with associated `Toolchain::Package`\n\n\n", "file_path": "src/command/list/human.rs", "rank": 26, "score": 138396.91491873632 }, { "content": "fn executable(name: &str) -> String {\n\n format!(\"{}{}\", name, std::env::consts::EXE_SUFFIX)\n\n}\n", "file_path": "crates/volta-layout/src/lib.rs", "rank": 27, "score": 138396.91491873632 }, { "content": "/// format a single version of `Toolchain::Node`.\n\nfn format_runtime(runtime: &Node) -> String {\n\n format!(\"v{}{}\", runtime.version, runtime.source)\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 28, "score": 138396.91491873632 }, { "content": "fn platform_with_node(node: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"node\": {{\n\n \"runtime\": \"{}\",\n\n \"npm\": null\n\n }},\n\n \"yarn\": null\n\n}}\"#,\n\n node\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/volta_install.rs", "rank": 29, "score": 138396.91491873632 }, { "content": "/// Format a list of `Toolchain::Package`s without detail information\n\nfn format_tool_list(packages: &[Package]) -> String {\n\n packages\n\n .iter()\n\n .map(format_tool)\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\")\n\n}\n", "file_path": "src/command/list/human.rs", "rank": 30, "score": 136768.08384539577 }, { "content": "/// Format a set of `Toolchain::PackageManager`s for `volta list npm`\n\nfn display_npms(managers: &[PackageManager]) -> String {\n\n if managers.is_empty() {\n\n \"⚡️ No custom npm versions installed (npm is still available bundled with Node).\n\n\n\nYou can install an npm version by running `volta install npm`.\n\nSee `volta help install` for details and more options.\"\n\n .into()\n\n } else {\n\n let versions = WRAPPER.fill(\n\n &managers\n\n .iter()\n\n .map(format_package_manager)\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\"),\n\n );\n\n format!(\"⚡️ Custom npm versions in your toolchain:\\n\\n{}\", versions)\n\n }\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 31, "score": 136768.08384539577 }, { "content": "/// format a list of `Toolchain::Node`s.\n\nfn format_runtime_list(runtimes: &[Node]) -> String {\n\n WRAPPER.fill(\n\n &runtimes\n\n .iter()\n\n .map(format_runtime)\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n )\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 32, "score": 136768.08384539577 }, { "content": "fn substitute_macros(input: &str) -> String {\n\n let macros = [\n\n (\"[RUNNING]\", \" Running\"),\n\n (\"[COMPILING]\", \" Compiling\"),\n\n (\"[CHECKING]\", \" Checking\"),\n\n (\"[CREATED]\", \" Created\"),\n\n (\"[FINISHED]\", \" Finished\"),\n\n (\"[ERROR]\", \"error:\"),\n\n (\"[WARNING]\", \"warning:\"),\n\n (\"[DOCUMENTING]\", \" Documenting\"),\n\n (\"[FRESH]\", \" Fresh\"),\n\n (\"[UPDATING]\", \" Updating\"),\n\n (\"[ADDING]\", \" Adding\"),\n\n (\"[REMOVING]\", \" Removing\"),\n\n (\"[DOCTEST]\", \" Doc-tests\"),\n\n (\"[PACKAGING]\", \" Packaging\"),\n\n (\"[DOWNLOADING]\", \" Downloading\"),\n\n (\"[UPLOADING]\", \" Uploading\"),\n\n (\"[VERIFYING]\", \" Verifying\"),\n\n (\"[ARCHIVING]\", \" Archiving\"),\n", "file_path": "crates/test-support/src/matchers.rs", "rank": 33, "score": 136768.08384539577 }, { "content": "/// format a list of `Toolchain::Package`s and their associated tools.\n\nfn format_package_list(packages: &[Package]) -> String {\n\n WRAPPER.fill(\n\n &packages\n\n .iter()\n\n .map(format_package)\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n )\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 34, "score": 136768.08384539577 }, { "content": "/// List a the source from a `Toolchain::Package`.\n\nfn list_package_source(package: &Package) -> String {\n\n match package {\n\n Package::Default { .. } => String::from(\" (default)\"),\n\n Package::Project { path, .. } => format!(\" (current @ {})\", path.display()),\n\n Package::Fetched(..) => String::new(),\n\n }\n\n}\n\n\n\n// These tests are organized by way of the *commands* supplied to `list`, unlike\n\n// in the `plain` module, because the formatting varies by command here, as it\n\n// does not there.\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n\n\n\n use lazy_static::lazy_static;\n\n use semver::Version;\n\n\n\n use super::*;\n\n\n", "file_path": "src/command/list/human.rs", "rank": 35, "score": 136768.08384539577 }, { "content": "fn package_json_with_pinned_node(node: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"test-package\",\n\n \"volta\": {{\n\n \"node\": \"{}\"\n\n }}\n\n}}\"#,\n\n node\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/volta_pin.rs", "rank": 36, "score": 135201.75860660124 }, { "content": "fn package_json_with_pinned_node(node: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"test-package\",\n\n \"volta\": {{\n\n \"node\": \"{}\"\n\n }}\n\n}}\"#,\n\n node\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/volta_run.rs", "rank": 37, "score": 135201.75860660124 }, { "content": "fn describe_packages(packages: &[Package]) -> Option<String> {\n\n if packages.is_empty() {\n\n None\n\n } else {\n\n Some(\n\n packages\n\n .iter()\n\n .map(display_package)\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 38, "score": 133873.02593690646 }, { "content": "fn describe_runtimes(runtimes: &[Node]) -> Option<String> {\n\n if runtimes.is_empty() {\n\n None\n\n } else {\n\n Some(\n\n runtimes\n\n .iter()\n\n .map(|runtime| display_node(&runtime.source, &runtime.version))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 39, "score": 133873.02593690646 }, { "content": "fn display_package_manager(package_manager: &PackageManager) -> String {\n\n format!(\n\n \"package-manager {}{}\",\n\n tool_version(&package_manager.kind, &package_manager.version),\n\n package_manager.source\n\n )\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 40, "score": 133694.40897794146 }, { "content": "fn npm_distro_file_name(version: &str) -> String {\n\n package_distro_file_name(\"npm\", version)\n\n}\n", "file_path": "tests/smoke/support/temp_project.rs", "rank": 41, "score": 133694.40897794146 }, { "content": "fn yarn_distro_file_name(version: &str) -> String {\n\n format!(\"yarn-v{}.tar.gz\", version)\n\n}\n", "file_path": "tests/smoke/support/temp_project.rs", "rank": 42, "score": 133694.40897794146 }, { "content": "/// format a single `Toolchain::PackageManager`.\n\nfn format_package_manager(package_manager: &PackageManager) -> String {\n\n format!(\"v{}{}\", package_manager.version, package_manager.source)\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 43, "score": 133694.40897794146 }, { "content": "/// format the title for a kind of package manager\n\n///\n\n/// This is distinct from the `Display` impl, because we need 'Yarn' to be capitalized for human output\n\nfn format_package_manager_kind(kind: PackageManagerKind) -> String {\n\n match kind {\n\n PackageManagerKind::Npm => \"npm\".into(),\n\n PackageManagerKind::Yarn => \"Yarn\".into(),\n\n }\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 44, "score": 132242.7656642404 }, { "content": "/// format a list of `Toolchain::PackageManager`s in condensed form\n\nfn format_package_manager_list_condensed(package_managers: &[PackageManager]) -> String {\n\n WRAPPER.fill(\n\n &package_managers\n\n .iter()\n\n .map(|manager| {\n\n format!(\n\n \"{}: {}\",\n\n format_package_manager_kind(manager.kind),\n\n format_package_manager(manager)\n\n )\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\"),\n\n )\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 45, "score": 130843.79662938433 }, { "content": "/// format a list of `Toolchain::PackageManager`s in verbose form\n\nfn format_package_manager_list_verbose(package_managers: &[PackageManager]) -> String {\n\n let mut manager_lists = BTreeMap::new();\n\n\n\n for manager in package_managers {\n\n manager_lists\n\n .entry(manager.kind)\n\n .or_insert_with(Vec::new)\n\n .push(format_package_manager(manager));\n\n }\n\n\n\n WRAPPER.fill(\n\n &manager_lists\n\n .iter()\n\n .map(|(kind, list)| {\n\n format!(\n\n \"{}:\\n{}\",\n\n format_package_manager_kind(*kind),\n\n WRAPPER.fill(&list.join(\"\\n\"))\n\n )\n\n })\n\n .collect::<Vec<_>>()\n\n .join(\"\\n\"),\n\n )\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 46, "score": 130843.79662938433 }, { "content": "fn get_tool_name(args: &mut ArgsOs) -> Fallible<OsString> {\n\n args.next()\n\n .and_then(|arg0| Path::new(&arg0).file_name().map(tool_name_from_file_name))\n\n .ok_or_else(|| ErrorKind::CouldNotDetermineTool.into())\n\n}\n\n\n", "file_path": "crates/volta-core/src/run/mod.rs", "rank": 47, "score": 130176.50396605922 }, { "content": "fn display_node(source: &Source, version: &Version) -> String {\n\n format!(\"runtime {}{}\", tool_version(\"node\", version), source)\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 48, "score": 129700.50666075792 }, { "content": "fn describe_package_managers(package_managers: &[PackageManager]) -> Option<String> {\n\n if package_managers.is_empty() {\n\n None\n\n } else {\n\n Some(\n\n package_managers\n\n .iter()\n\n .map(|package_manager| display_package_manager(&package_manager))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 49, "score": 129347.7077557511 }, { "content": "pub fn node_distro_file_name(version: &str) -> String {\n\n format!(\n\n \"node-v{}-{}-{}.tar.gz\",\n\n version, NODE_DISTRO_OS, NODE_DISTRO_ARCH\n\n )\n\n}\n", "file_path": "tests/smoke/support/temp_project.rs", "rank": 50, "score": 129347.7077557511 }, { "content": "fn describe_tool_set(name: &str, hosts: &[Package]) -> String {\n\n hosts\n\n .iter()\n\n .filter_map(|package| display_tool(name, package))\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\")\n\n}\n\n\n", "file_path": "src/command/list/plain.rs", "rank": 51, "score": 128193.15703209816 }, { "content": "fn platform_with_node_npm(node: &str, npm: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"node\": {{\n\n \"runtime\": \"{}\",\n\n \"npm\": \"{}\"\n\n }},\n\n \"yarn\": null\n\n}}\"#,\n\n node, npm\n\n )\n\n}\n\n\n\nconst NODE_VERSION_INFO: &str = r#\"[\n\n{\"version\":\"v10.99.1040\",\"npm\":\"6.2.26\",\"lts\": \"Dubnium\",\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]},\n\n{\"version\":\"v9.27.6\",\"npm\":\"5.6.17\",\"lts\": false,\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]},\n\n{\"version\":\"v8.9.10\",\"npm\":\"5.6.7\",\"lts\": false,\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]},\n\n{\"version\":\"v6.19.62\",\"npm\":\"3.10.1066\",\"lts\": false,\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]}\n\n]\n\n\"#;\n", "file_path": "tests/acceptance/volta_install.rs", "rank": 52, "score": 128193.15703209816 }, { "content": "fn display_tool(tool: &str, host_packages: &[Package]) -> String {\n\n if host_packages.is_empty() {\n\n format!(\n\n \"⚡️ No tools or packages named `{}` installed.\n\n\n\nYou can safely install packages by running `volta install <package name>`.\n\nSee `volta help install` for details and more options.\",\n\n tool\n\n )\n\n } else {\n\n let versions = WRAPPER.fill(\n\n &host_packages\n\n .iter()\n\n .map(format_package)\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\"),\n\n );\n\n format!(\"⚡️ Tool `{}` available from:\\n\\n{}\", tool, versions)\n\n }\n\n}\n\n\n", "file_path": "src/command/list/human.rs", "rank": 53, "score": 128193.15703209816 }, { "content": "fn compose_error_details(err: &VoltaError) -> Option<String> {\n\n // Only compose details if there is an underlying cause for the error\n\n let mut current = err.source()?;\n\n let mut details = String::new();\n\n\n\n // Walk up the tree of causes and include all of them\n\n loop {\n\n details.push_str(&format_error_cause(current));\n\n\n\n match current.source() {\n\n Some(cause) => {\n\n details.push_str(\"\\n\\n\");\n\n current = cause;\n\n }\n\n None => {\n\n break;\n\n }\n\n };\n\n }\n\n\n\n Some(details)\n\n}\n\n\n", "file_path": "crates/volta-core/src/error/reporter.rs", "rank": 54, "score": 127948.73872089502 }, { "content": "fn resolve_semver_from_registry(matching: VersionReq) -> Fallible<Version> {\n\n let (url, index) = fetch_yarn_index()?;\n\n\n\n let details_opt = index\n\n .entries\n\n .into_iter()\n\n .find(|PackageDetails { version, .. }| matching.matches(&version));\n\n\n\n match details_opt {\n\n Some(details) => {\n\n debug!(\n\n \"Found yarn@{} matching requirement '{}' from {}\",\n\n details.version, matching, url\n\n );\n\n Ok(details.version)\n\n }\n\n None => Err(ErrorKind::YarnVersionNotFound {\n\n matching: matching.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 55, "score": 127299.6377406824 }, { "content": "fn display_tool(name: &str, host: &Package) -> Option<String> {\n\n match host {\n\n Package::Default { details, node, .. } => Some(format!(\n\n \"tool {} / {} / {} {}{}\",\n\n name,\n\n tool_version(&details.name, &details.version),\n\n tool_version(\"node\", &node),\n\n \"npm@built-in\",\n\n package_source(&host)\n\n )),\n\n Package::Project {\n\n name: host_name, ..\n\n } => Some(format!(\n\n \"tool {} / {} / {} {}{}\",\n\n name,\n\n tool_version(&host_name, \"project\"),\n\n \"node@project\",\n\n \"npm@project\",\n\n package_source(&host)\n\n )),\n", "file_path": "src/command/list/plain.rs", "rank": 57, "score": 125834.64775014209 }, { "content": "fn package_shasum_file_name(name: &str, version: &str) -> String {\n\n format!(\"{}-{}.shasum\", name, version)\n\n}\n\n\n\npub struct TempProject {\n\n root: PathBuf,\n\n path: OsString,\n\n}\n\n\n\nimpl TempProject {\n\n /// Root of the project, ex: `/path/to/cargo/target/integration_test/t0/foo`\n\n pub fn root(&self) -> PathBuf {\n\n self.root.clone()\n\n }\n\n\n\n /// Create a `ProcessBuilder` to run a program in the project.\n\n /// Example:\n\n /// assert_that(\n\n /// p.process(&p.bin(\"foo\")),\n\n /// execs().with_stdout(\"bar\\n\"),\n", "file_path": "tests/smoke/support/temp_project.rs", "rank": 58, "score": 125342.54468354101 }, { "content": "fn package_json_with_pinned_node_npm(node: &str, npm: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"test-package\",\n\n \"volta\": {{\n\n \"node\": \"{}\",\n\n \"npm\": \"{}\"\n\n }}\n\n}}\"#,\n\n node, npm\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/volta_pin.rs", "rank": 59, "score": 125342.54468354101 }, { "content": "fn package_json_with_pinned_node_npm(node: &str, npm: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"test-package\",\n\n \"volta\": {{\n\n \"node\": \"{}\",\n\n \"npm\": \"{}\"\n\n }}\n\n}}\"#,\n\n node, npm\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/volta_run.rs", "rank": 60, "score": 125342.54468354101 }, { "content": "fn package_distro_file_name(name: &str, version: &str) -> String {\n\n format!(\"{}-{}.tgz\", name, version)\n\n}\n", "file_path": "tests/smoke/support/temp_project.rs", "rank": 61, "score": 125342.54468354101 }, { "content": "/// Wraps the supplied content to the terminal width, if we are in a terminal.\n\n/// If not, returns the content as a String\n\n///\n\n/// Note: Uses the supplied prefix to calculate the terminal width, but then removes\n\n/// it so that it can be styled (style characters are counted against the wrapped width)\n\nfn wrap_content<D>(prefix: &str, content: &D) -> String\n\nwhere\n\n D: Display,\n\n{\n\n match text_width() {\n\n Some(width) => Wrapper::with_splitter(width, NoHyphenation)\n\n .subsequent_indent(WRAP_INDENT)\n\n .break_words(false)\n\n .fill(&format!(\"{} {}\", prefix, content))\n\n .replace(prefix, \"\"),\n\n None => format!(\" {}\", content),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/log.rs", "rank": 62, "score": 124391.29168202913 }, { "content": "/// Format a set of `Toolchain::PackageManager`s.\n\nfn display_package_managers(kind: PackageManagerKind, managers: &[PackageManager]) -> String {\n\n match kind {\n\n PackageManagerKind::Npm => display_npms(managers),\n\n _ => {\n\n if managers.is_empty() {\n\n // Note: Using `format_package_manager_kind` to get the properly capitalized version of the tool\n\n // Then using the `Display` impl on the kind to get the version to show in the command\n\n format!(\n\n \"⚡️ No {} versions installed.\n\n\n\nYou can install a {0} version by running `volta install {}`.\n\nSee `volta help install` for details and more options.\",\n\n format_package_manager_kind(kind),\n\n kind\n\n )\n\n } else {\n\n let versions = WRAPPER.fill(\n\n &managers\n\n .iter()\n\n .map(format_package_manager)\n", "file_path": "src/command/list/human.rs", "rank": 63, "score": 123993.43401107522 }, { "content": "fn package_json_with_pinned_node_yarn(node_version: &str, yarn_version: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"test-package\",\n\n \"volta\": {{\n\n \"node\": \"{}\",\n\n \"yarn\": \"{}\"\n\n }}\n\n}}\"#,\n\n node_version, yarn_version\n\n )\n\n}\n\n\n", "file_path": "tests/acceptance/volta_pin.rs", "rank": 64, "score": 122691.56298541429 }, { "content": "fn package_json_with_pinned_node_yarn(node_version: &str, yarn_version: &str) -> String {\n\n format!(\n\n r#\"{{\n\n \"name\": \"test-package\",\n\n \"volta\": {{\n\n \"node\": \"{}\",\n\n \"yarn\": \"{}\"\n\n }}\n\n}}\"#,\n\n node_version, yarn_version\n\n )\n\n}\n\n\n\nconst NODE_VERSION_INFO: &str = r#\"[\n\n{\"version\":\"v10.99.1040\",\"npm\":\"6.2.26\",\"lts\": \"Dubnium\",\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]},\n\n{\"version\":\"v9.27.6\",\"npm\":\"5.6.17\",\"lts\": false,\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]},\n\n{\"version\":\"v8.9.10\",\"npm\":\"5.6.7\",\"lts\": false,\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]},\n\n{\"version\":\"v6.19.62\",\"npm\":\"3.10.1066\",\"lts\": false,\"files\":[\"linux-x64\",\"osx-x64-tar\",\"win-x64-zip\",\"win-x86-zip\", \"linux-arm64\"]}\n\n]\n\n\"#;\n", "file_path": "tests/acceptance/volta_run.rs", "rank": 65, "score": 122691.56298541429 }, { "content": "pub fn public_registry_package(package: &str, version: &str) -> String {\n\n format!(\n\n \"{}/-/{}-{}.tgz\",\n\n public_registry_index(package),\n\n package,\n\n version\n\n )\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/registry.rs", "rank": 67, "score": 121634.92472911916 }, { "content": "fn entry_to_shim_name((entry, metadata): (DirEntry, Metadata)) -> Option<String> {\n\n if metadata.file_type().is_symlink() {\n\n entry\n\n .path()\n\n .file_stem()\n\n .and_then(|stem| stem.to_str())\n\n .map(|stem| stem.to_string())\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub enum ShimResult {\n\n Created,\n\n AlreadyExists,\n\n Deleted,\n\n DoesntExist,\n\n}\n\n\n", "file_path": "crates/volta-core/src/shim.rs", "rank": 68, "score": 121634.92472911916 }, { "content": "pub fn parse_requirements(s: impl AsRef<str>) -> Fallible<VersionReq> {\n\n let s = s.as_ref();\n\n serial::parse_requirements(s)\n\n .with_context(|| ErrorKind::VersionParseError { version: s.into() })\n\n}\n\n\n", "file_path": "crates/volta-core/src/version/mod.rs", "rank": 69, "score": 120089.068017468 }, { "content": "pub fn tool_version<N, V>(name: N, version: V) -> String\n\nwhere\n\n N: std::fmt::Display + Sized,\n\n V: std::fmt::Display + Sized,\n\n{\n\n format!(\"{:}@{:}\", name, version)\n\n}\n\n\n", "file_path": "crates/volta-core/src/style.rs", "rank": 70, "score": 117531.98283456879 }, { "content": "fn resolve_tag(tag: VersionTag, hooks: Option<&ToolHooks<Yarn>>) -> Fallible<Version> {\n\n // This triage is complicated because we need to maintain the legacy behavior of hooks\n\n // First, if the tag is 'latest' and we have a 'latest' hook, we use the old behavior\n\n // Next, if the tag is 'latest' and we _do not_ have a 'latest' hook, we use the new behavior\n\n // Next, if the tag is _not_ 'latest' and we have an 'index' hook, we show an error since\n\n // the previous behavior did not support generic tags\n\n // Finally, we don't have any relevant hooks, so we can use the new behavior\n\n match (tag, hooks) {\n\n (\n\n VersionTag::Latest,\n\n Some(&ToolHooks {\n\n latest: Some(ref hook),\n\n ..\n\n }),\n\n ) => {\n\n debug!(\"Using yarn.latest hook to determine latest-version URL\");\n\n resolve_latest_legacy(hook.resolve(\"latest-version\")?)\n\n }\n\n (VersionTag::Latest, _) => resolve_custom_tag(VersionTag::Latest.to_string()),\n\n (tag, Some(&ToolHooks { index: Some(_), .. })) => Err(ErrorKind::YarnVersionNotFound {\n\n matching: tag.to_string(),\n\n }\n\n .into()),\n\n (tag, _) => resolve_custom_tag(tag.to_string()),\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 71, "score": 113228.21784392165 }, { "content": "fn resolve_semver(matching: VersionReq, hooks: Option<&ToolHooks<Npm>>) -> Fallible<Version> {\n\n let (url, index) = fetch_npm_index(hooks)?;\n\n\n\n let details_opt = index\n\n .entries\n\n .into_iter()\n\n .find(|PackageDetails { version, .. }| matching.matches(&version));\n\n\n\n match details_opt {\n\n Some(details) => {\n\n debug!(\n\n \"Found npm@{} matching requirement '{}' from {}\",\n\n details.version, matching, url\n\n );\n\n Ok(details.version)\n\n }\n\n None => Err(ErrorKind::NpmVersionNotFound {\n\n matching: matching.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n", "file_path": "crates/volta-core/src/tool/npm/resolve.rs", "rank": 72, "score": 113158.09124232881 }, { "content": "fn resolve_semver(matching: VersionReq, hooks: Option<&ToolHooks<Yarn>>) -> Fallible<Version> {\n\n // For semver, the triage is less complicated: The previous behavior _always_ used\n\n // the 'index' hook, so we can check for that to decide which behavior to use.\n\n if let Some(&ToolHooks {\n\n index: Some(ref hook),\n\n ..\n\n }) = hooks\n\n {\n\n debug!(\"Using yarn.index hook to determine yarn index URL\");\n\n resolve_semver_legacy(matching, hook.resolve(\"releases\")?)\n\n } else {\n\n resolve_semver_from_registry(matching)\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/tool/yarn/resolve.rs", "rank": 73, "score": 113158.09124232881 }, { "content": "#[test]\n\nfn uses_project_yarn_if_available() {\n\n let s = sandbox()\n\n .platform(PLATFORM_WITH_YARN)\n\n .package_json(PACKAGE_JSON_WITH_YARN)\n\n .env(\"VOLTA_LOGLEVEL\", \"debug\")\n\n .build();\n\n\n\n assert_that!(\n\n s.yarn(\"--version\"),\n\n execs()\n\n .with_status(ExitCode::ExecutionFailure as i32)\n\n .with_stderr_does_not_contain(\"[..]Yarn is not available.\")\n\n .with_stderr_does_not_contain(\"[..]No Yarn version found in this project.\")\n\n .with_stderr_contains(\"[..]Using [email protected] from project configuration\")\n\n );\n\n}\n\n\n", "file_path": "tests/acceptance/merged_platform.rs", "rank": 76, "score": 111627.90740278535 }, { "content": "/// Reads a file, if it exists.\n\npub fn read_file<P: AsRef<Path>>(path: P) -> io::Result<Option<String>> {\n\n let result: io::Result<String> = fs::read_to_string(path);\n\n\n\n match result {\n\n Ok(string) => Ok(Some(string)),\n\n Err(error) => match error.kind() {\n\n io::ErrorKind::NotFound => Ok(None),\n\n _ => Err(error),\n\n },\n\n }\n\n}\n\n\n", "file_path": "crates/volta-core/src/fs.rs", "rank": 77, "score": 111525.4057703375 }, { "content": "#[test]\n\nfn uses_default_yarn_outside_project() {\n\n let s = sandbox()\n\n .platform(PLATFORM_WITH_YARN)\n\n .env(\"VOLTA_LOGLEVEL\", \"debug\")\n\n .build();\n\n\n\n assert_that!(\n\n s.yarn(\"--version\"),\n\n execs()\n\n .with_status(ExitCode::ExecutionFailure as i32)\n\n .with_stderr_does_not_contain(\"[..]Yarn is not available.\")\n\n .with_stderr_does_not_contain(\"[..]No Yarn version found in this project.\")\n\n .with_stderr_contains(\"[..]Using [email protected] from default configuration\")\n\n );\n\n}\n\n\n", "file_path": "tests/acceptance/merged_platform.rs", "rank": 78, "score": 109926.43062719418 }, { "content": "#[test]\n\nfn yarn_semver_with_hook_uses_old_format() {\n\n let s = sandbox()\n\n .default_hooks(&yarn_hooks_json())\n\n .env(\"VOLTA_LOGLEVEL\", \"debug\")\n\n .build();\n\n let _mock = mock(\"GET\", \"/yarn-old/index\")\n\n .with_status(200)\n\n .with_header(\"Content-Type\", \"application/json\")\n\n .with_body(\n\n // Yarn Index hook expects the \"old\" (Github API) format\n\n r#\"[\n\n {\"tag_name\":\"v1.22.4\",\"assets\":[{\"name\":\"yarn-v1.22.4.tar.gz\"}]},\n\n {\"tag_name\":\"v2.0.0\",\"assets\":[{\"name\":\"yarn-v2.0.0.tar.gz\"}]},\n\n {\"tag_name\":\"v3.9.2\",\"assets\":[{\"name\":\"yarn-v3.9.2.tar.gz\"}]},\n\n {\"tag_name\":\"v4.1.1\",\"assets\":[{\"name\":\"yarn-v4.1.1.tar.gz\"}]}\n\n]\"#,\n\n )\n\n .create();\n\n\n\n assert_that!(\n\n s.volta(\"install yarn@3\"),\n\n execs()\n\n .with_status(ExitCode::NetworkError as i32)\n\n .with_stderr_contains(\"[..]Using yarn.index hook to determine yarn index URL\")\n\n .with_stderr_contains(\"[..]Found [email protected] matching requirement[..]\")\n\n .with_stderr_contains(\"[..]Could not download [email protected]\")\n\n );\n\n}\n", "file_path": "tests/acceptance/hooks.rs", "rank": 79, "score": 109926.43062719418 }, { "content": "fn detect_and_migrate() -> Fallible<()> {\n\n let mut state = MigrationState::current()?;\n\n\n\n // To keep the complexity of writing a new migration from continuously increasing, each new\n\n // layout version only needs to implement a migration from 2 states: Empty and the previously\n\n // latest version. We then apply the migrations sequentially here: V0 -> V1 -> ... -> VX\n\n loop {\n\n state = match state {\n\n MigrationState::Empty(e) => MigrationState::V1(Box::new(e.try_into()?)),\n\n MigrationState::V0(zero) => MigrationState::V1(Box::new((*zero).try_into()?)),\n\n MigrationState::V1(one) => MigrationState::V2(Box::new((*one).try_into()?)),\n\n MigrationState::V2(_) => {\n\n break;\n\n }\n\n };\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "crates/volta-migrate/src/lib.rs", "rank": 80, "score": 109617.53707364494 }, { "content": "#[test]\n\nfn uses_default_yarn_in_project_without_yarn() {\n\n let s = sandbox()\n\n .platform(PLATFORM_WITH_YARN)\n\n .package_json(PACKAGE_JSON_NO_YARN)\n\n .env(\"VOLTA_LOGLEVEL\", \"debug\")\n\n .build();\n\n\n\n assert_that!(\n\n s.yarn(\"--version\"),\n\n execs()\n\n .with_status(ExitCode::ExecutionFailure as i32)\n\n .with_stderr_does_not_contain(\"[..]Yarn is not available.\")\n\n .with_stderr_does_not_contain(\"[..]No Yarn version found in this project.\")\n\n .with_stderr_contains(\"[..]Using [email protected] from default configuration\")\n\n );\n\n}\n\n\n", "file_path": "tests/acceptance/merged_platform.rs", "rank": 81, "score": 108291.52475795586 }, { "content": "pub fn run_migration() -> Fallible<()> {\n\n // Acquire an exclusive lock on the Volta directory, to ensure that no other migrations are running.\n\n // If this fails, however, we still need to run the migration\n\n match VoltaLock::acquire() {\n\n Ok(_lock) => {\n\n // The lock was acquired, so we can be confident that no other migrations are running\n\n detect_and_migrate()\n\n }\n\n Err(_) => {\n\n debug!(\"Unable to acquire lock on Volta directory! Running migration anyway.\");\n\n detect_and_migrate()\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/volta-migrate/src/lib.rs", "rank": 82, "score": 106473.58934406114 }, { "content": "fn build_path_error() -> ErrorKind {\n\n ErrorKind::BuildPathError\n\n}\n", "file_path": "crates/volta-core/src/platform/mod.rs", "rank": 83, "score": 102074.79877712613 }, { "content": "/// Determine the binary install directory from the currently running executable\n\n///\n\n/// The volta-shim and volta binaries will be installed in the same location, so we can use the\n\n/// currently running executable to find the binary install directory. Note that we need to\n\n/// canonicalize the path we get from current_exe to make sure we resolve symlinks and find the\n\n/// actual binary files\n\nfn default_install_dir() -> Fallible<PathBuf> {\n\n env::current_exe()\n\n .map(|mut path| {\n\n path.pop(); // Remove the executable name from the path\n\n path\n\n })\n\n .and_then(canonicalize)\n\n .with_context(|| ErrorKind::NoInstallDir)\n\n}\n", "file_path": "crates/volta-core/src/layout/mod.rs", "rank": 84, "score": 101777.60817580878 }, { "content": "/// Clear npm from the default `platform.json` file if it is set to the same value as that bundled with Node\n\n///\n\n/// This will ensure that we don't treat the default npm from a prior version of Volta as a \"custom\" npm that\n\n/// the user explicitly requested\n\nfn clear_default_npm(platform_file: &Path) -> Fallible<()> {\n\n if platform_file.exists() {\n\n let platform_json =\n\n read_to_string(platform_file).with_context(|| ErrorKind::ReadPlatformError {\n\n file: platform_file.to_owned(),\n\n })?;\n\n let mut existing_platform = Platform::from_json(platform_json)?;\n\n\n\n if let Some(ref mut node_version) = &mut existing_platform.node {\n\n if let Some(npm) = &node_version.npm {\n\n if let Ok(default_npm) = load_default_npm_version(&node_version.runtime) {\n\n if *npm == default_npm {\n\n node_version.npm = None;\n\n write(platform_file, existing_platform.into_json()?).with_context(\n\n || ErrorKind::WritePlatformError {\n\n file: platform_file.to_owned(),\n\n },\n\n )?;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/volta-migrate/src/v2.rs", "rank": 85, "score": 100484.78784128912 }, { "content": "/// Creates a staging directory in the Volta tmp directory\n\npub fn create_staging_dir() -> Fallible<TempDir> {\n\n let tmp_root = volta_home()?.tmp_dir();\n\n tempdir_in(&tmp_root).with_context(|| ErrorKind::CreateTempDirError {\n\n in_dir: tmp_root.to_owned(),\n\n })\n\n}\n\n\n", "file_path": "crates/volta-core/src/fs.rs", "rank": 86, "score": 100484.78784128912 }, { "content": "pub fn regenerate_shims_for_dir(dir: &Path) -> Fallible<()> {\n\n // Acquire a lock on the Volta directory, if possible, to prevent concurrent changes\n\n let _lock = VoltaLock::acquire();\n\n debug!(\"Rebuilding shims for directory: {}\", dir.display());\n\n for shim_name in get_shim_list_deduped(dir)?.iter() {\n\n delete(shim_name)?;\n\n create(shim_name)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/volta-core/src/shim.rs", "rank": 87, "score": 99331.05078242898 }, { "content": "/// Uninstall the specified package.\n\n///\n\n/// This removes:\n\n/// * the json config files\n\n/// * the shims\n\n/// * the unpacked and initialized package\n\npub fn uninstall(name: &str) -> Fallible<()> {\n\n let home = volta_home()?;\n\n // Acquire a lock on the Volta directory, if possible, to prevent concurrent changes\n\n let _lock = VoltaLock::acquire();\n\n\n\n // if the package config file exists, use that to remove any installed bins and shims\n\n let package_config_file = home.default_package_config_file(name);\n\n let package_found = if package_config_file.exists() {\n\n let package_config = PackageConfig::from_file(&package_config_file)?;\n\n\n\n for bin_name in package_config.bins {\n\n remove_config_and_shim(&bin_name, name)?;\n\n }\n\n\n\n remove_file_if_exists(package_config_file)?;\n\n true\n\n } else {\n\n // there is no package config - check for orphaned binaries\n\n let package_binary_list = binaries_from_package(name)?;\n\n if !package_binary_list.is_empty() {\n", "file_path": "crates/volta-core/src/tool/package/mod.rs", "rank": 88, "score": 99331.05078242898 }, { "content": "/// Creates a NamedTempFile in the Volta tmp directory\n\npub fn create_staging_file() -> Fallible<NamedTempFile> {\n\n let tmp_dir = volta_home()?.tmp_dir();\n\n NamedTempFile::new_in(&tmp_dir).with_context(|| ErrorKind::CreateTempFileError {\n\n in_dir: tmp_dir.to_owned(),\n\n })\n\n}\n\n\n", "file_path": "crates/volta-core/src/fs.rs", "rank": 89, "score": 99034.01546132445 }, { "content": "/// Checks if a given npm version image is available on the local machine\n\npub fn npm_available(version: &Version) -> Fallible<bool> {\n\n volta_home().map(|home| home.npm_image_dir(&version.to_string()).exists())\n\n}\n\n\n", "file_path": "crates/volta-core/src/inventory.rs", "rank": 90, "score": 98292.43161192517 }, { "content": "pub fn volta_home<'a>() -> Fallible<&'a VoltaHome> {\n\n VOLTA_HOME.get_or_try_init(|| {\n\n let home_dir = match env::var_os(\"VOLTA_HOME\") {\n\n Some(home) => PathBuf::from(home),\n\n None => default_home_dir()?,\n\n };\n\n\n\n Ok(VoltaHome::new(home_dir))\n\n })\n\n}\n\n\n", "file_path": "crates/volta-core/src/layout/mod.rs", "rank": 91, "score": 98292.43161192517 }, { "content": "/// Checks if a given Node version image is available on the local machine\n\npub fn node_available(version: &Version) -> Fallible<bool> {\n\n volta_home().map(|home| {\n\n home.node_image_root_dir()\n\n .join(version.to_string())\n\n .exists()\n\n })\n\n}\n\n\n", "file_path": "crates/volta-core/src/inventory.rs", "rank": 92, "score": 98292.43161192517 }, { "content": "pub fn volta_install<'a>() -> Fallible<&'a VoltaInstall> {\n\n VOLTA_INSTALL.get_or_try_init(|| {\n\n let install_dir = match env::var_os(\"VOLTA_INSTALL_DIR\") {\n\n Some(install) => PathBuf::from(install),\n\n None => default_install_dir()?,\n\n };\n\n\n\n Ok(VoltaInstall::new(install_dir))\n\n })\n\n}\n\n\n", "file_path": "crates/volta-core/src/layout/mod.rs", "rank": 93, "score": 98292.43161192517 }, { "content": "/// Checks if a given Yarn version image is available on the local machine\n\npub fn yarn_available(version: &Version) -> Fallible<bool> {\n\n volta_home().map(|home| home.yarn_image_dir(&version.to_string()).exists())\n\n}\n\n\n", "file_path": "crates/volta-core/src/inventory.rs", "rank": 94, "score": 98292.43161192517 }, { "content": "/// Collects a set of all Node versions fetched on the local machine\n\npub fn node_versions() -> Fallible<BTreeSet<Version>> {\n\n volta_home().and_then(|home| read_versions(home.node_image_root_dir()))\n\n}\n\n\n", "file_path": "crates/volta-core/src/inventory.rs", "rank": 95, "score": 97880.27840246432 }, { "content": "pub fn env_paths() -> Fallible<Vec<PathBuf>> {\n\n let home = volta_home()?;\n\n let install = volta_install()?;\n\n\n\n Ok(vec![home.shim_dir().to_owned(), install.root().to_owned()])\n\n}\n", "file_path": "crates/volta-core/src/layout/windows.rs", "rank": 96, "score": 97880.27840246432 }, { "content": "pub fn env_paths() -> Fallible<Vec<PathBuf>> {\n\n let home = volta_home()?;\n\n Ok(vec![home.shim_dir().to_owned()])\n\n}\n", "file_path": "crates/volta-core/src/layout/unix.rs", "rank": 97, "score": 97880.27840246432 }, { "content": "/// Collects a set of all Yarn versions fetched on the local machine\n\npub fn yarn_versions() -> Fallible<BTreeSet<Version>> {\n\n volta_home().and_then(|home| read_versions(home.yarn_image_root_dir()))\n\n}\n\n\n", "file_path": "crates/volta-core/src/inventory.rs", "rank": 98, "score": 97880.27840246432 }, { "content": "/// Collects a set of all npm versions fetched on the local machine\n\npub fn npm_versions() -> Fallible<BTreeSet<Version>> {\n\n volta_home().and_then(|home| read_versions(home.npm_image_root_dir()))\n\n}\n\n\n", "file_path": "crates/volta-core/src/inventory.rs", "rank": 99, "score": 97880.27840246432 } ]
Rust
ethers-solc/src/hh.rs
alxiong/ethers-rs
133c32d64a53a8ae38e0c8c8a10753e423127782
use crate::{ artifacts::{ Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, Contract, ContractBytecode, DeployedBytecode, LosslessAbi, Offsets, }, ArtifactOutput, }; use serde::{Deserialize, Serialize}; use std::collections::btree_map::BTreeMap; const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct HardhatArtifact { #[serde(rename = "_format")] pub format: String, pub contract_name: String, pub source_name: String, pub abi: LosslessAbi, pub bytecode: Option<BytecodeObject>, pub deployed_bytecode: Option<BytecodeObject>, #[serde(default)] pub link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>, #[serde(default)] pub deployed_link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>, } impl From<HardhatArtifact> for CompactContract { fn from(artifact: HardhatArtifact) -> Self { CompactContract { abi: Some(artifact.abi.abi), bin: artifact.bytecode, bin_runtime: artifact.deployed_bytecode, } } } impl From<HardhatArtifact> for ContractBytecode { fn from(artifact: HardhatArtifact) -> Self { let bytecode: Option<Bytecode> = artifact.bytecode.as_ref().map(|t| { let mut bcode: Bytecode = t.clone().into(); bcode.link_references = artifact.link_references.clone(); bcode }); let deployed_bytecode: Option<DeployedBytecode> = artifact.bytecode.as_ref().map(|t| { let mut bcode: Bytecode = t.clone().into(); bcode.link_references = artifact.deployed_link_references.clone(); bcode.into() }); ContractBytecode { abi: Some(artifact.abi.abi), bytecode, deployed_bytecode } } } impl From<HardhatArtifact> for CompactContractBytecode { fn from(artifact: HardhatArtifact) -> Self { let c: ContractBytecode = artifact.into(); c.into() } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Default)] pub struct HardhatArtifacts { _priv: (), } impl ArtifactOutput for HardhatArtifacts { type Artifact = HardhatArtifact; fn contract_to_artifact(&self, file: &str, name: &str, contract: Contract) -> Self::Artifact { let (bytecode, link_references, deployed_bytecode, deployed_link_references) = if let Some(evm) = contract.evm { let (deployed_bytecode, deployed_link_references) = if let Some(code) = evm.deployed_bytecode.and_then(|code| code.bytecode) { (Some(code.object), code.link_references) } else { (None, Default::default()) }; let (bytecode, link_ref) = if let Some(bc) = evm.bytecode { (Some(bc.object), bc.link_references) } else { (None, Default::default()) }; (bytecode, link_ref, deployed_bytecode, deployed_link_references) } else { (Default::default(), Default::default(), None, Default::default()) }; HardhatArtifact { format: HH_ARTIFACT_VERSION.to_string(), contract_name: name.to_string(), source_name: file.to_string(), abi: contract.abi.unwrap_or_default(), bytecode, deployed_bytecode, link_references, deployed_link_references, } } } #[cfg(test)] mod tests { use super::*; use crate::Artifact; #[test] fn can_parse_hh_artifact() { let s = include_str!("../test-data/hh-greeter-artifact.json"); let artifact = serde_json::from_str::<HardhatArtifact>(s).unwrap(); let compact = artifact.into_compact_contract(); assert!(compact.abi.is_some()); assert!(compact.bin.is_some()); assert!(compact.bin_runtime.is_some()); } }
use crate::{ artifacts::{ Bytecode, BytecodeObject, CompactContract, CompactContractBytecode, Contract, ContractBytecode, DeployedBytecode, LosslessAbi, Offsets, }, ArtifactOutput, }; use serde::{Deserialize, Serialize}; use std::collections::btree_map::BTreeMap; const HH_ARTIFACT_VERSION: &str = "hh-sol-artifact-1"; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct HardhatArtifact { #[serde(rename = "_format")] pub format: String, pub contract_name: String, pub source_name: String, pub abi: LosslessAbi, pub bytecode: Option<BytecodeObject>, pub deployed_bytecode: Option<BytecodeObject>, #[serde(default)] pub link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>, #[serde(default)] pub deployed_link_references: BTreeMap<String, BTreeMap<String, Vec<Offsets>>>, } impl From<HardhatArtifact> for CompactContract { fn from(artifact: HardhatArtifact) -> Self { CompactContract { abi: Some(artifact.abi.abi), bin: artifact.bytecode, bin_runtime: artifact.deployed_bytecode, } } } impl From<HardhatArtifact> for ContractBytecode { fn from(artifact: HardhatArtifact) -> Self { let bytecode: Option<Bytecode> = artifact.bytecode.as_ref().map(|t| { let mut bcode: Bytecode = t.clone().into(); bcode.link_references = artifact.link_references.clone(); bcode }); let deployed_bytecode: Option<DeployedBytecode> = artifact.bytecode.as_ref().map(|t| { let mut bcode: Bytecode = t.clone().into(); bcode.link_references = artifact.deployed_link_references.clone(); bcode.into() }); ContractBytecode { abi: Some(artifact.abi.abi), bytecode, deployed_bytecode } } } impl From<HardhatArtifact> for CompactContractBytecode { fn from(artifact: HardhatArtifact) -> Self { let c: ContractBytecode = artifact.into(); c.into() } } #[derive(Debug, Copy, Clone, Eq, PartialEq, Default)] pub struct HardhatArtifacts { _priv: (), } impl ArtifactOutput for HardhatArtifacts { type Artifact = HardhatArtifact;
} #[cfg(test)] mod tests { use super::*; use crate::Artifact; #[test] fn can_parse_hh_artifact() { let s = include_str!("../test-data/hh-greeter-artifact.json"); let artifact = serde_json::from_str::<HardhatArtifact>(s).unwrap(); let compact = artifact.into_compact_contract(); assert!(compact.abi.is_some()); assert!(compact.bin.is_some()); assert!(compact.bin_runtime.is_some()); } }
fn contract_to_artifact(&self, file: &str, name: &str, contract: Contract) -> Self::Artifact { let (bytecode, link_references, deployed_bytecode, deployed_link_references) = if let Some(evm) = contract.evm { let (deployed_bytecode, deployed_link_references) = if let Some(code) = evm.deployed_bytecode.and_then(|code| code.bytecode) { (Some(code.object), code.link_references) } else { (None, Default::default()) }; let (bytecode, link_ref) = if let Some(bc) = evm.bytecode { (Some(bc.object), bc.link_references) } else { (None, Default::default()) }; (bytecode, link_ref, deployed_bytecode, deployed_link_references) } else { (Default::default(), Default::default(), None, Default::default()) }; HardhatArtifact { format: HH_ARTIFACT_VERSION.to_string(), contract_name: name.to_string(), source_name: file.to_string(), abi: contract.abi.unwrap_or_default(), bytecode, deployed_bytecode, link_references, deployed_link_references, } }
function_block-full_function
[ { "content": "/// Strips the identifier of field declaration from the input and returns it\n\nfn strip_field_identifier(input: &mut &str) -> Result<String> {\n\n let mut iter = input.trim_end().rsplitn(2, is_whitespace);\n\n let name = iter\n\n .next()\n\n .ok_or_else(|| format_err!(\"Expected field identifier\"))\n\n .map(|mut s| parse_identifier(&mut s))??;\n\n *input =\n\n iter.next().ok_or_else(|| format_err!(\"Expected field type in `{}`\", input))?.trim_end();\n\n Ok(name)\n\n}\n\n\n", "file_path": "ethers-core/src/abi/struct_def.rs", "rank": 0, "score": 374188.38964124734 }, { "content": "/// `struct Pairing.Nested.G2Point[]` -> `[Pairing, Nested]`\n\nfn struct_type_projections(name: &str) -> Vec<String> {\n\n let id = struct_type_identifier(name);\n\n let mut iter = id.rsplit('.');\n\n iter.next();\n\n iter.rev().map(str::to_string).collect()\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 1, "score": 371778.23069988075 }, { "content": "fn take_while(s: &str, mut predicate: impl FnMut(char) -> bool) -> (&str, &str) {\n\n let mut index = 0;\n\n for c in s.chars() {\n\n if predicate(c) {\n\n index += c.len_utf8();\n\n } else {\n\n break\n\n }\n\n }\n\n s.split_at(index)\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 2, "score": 366963.8001931706 }, { "content": "/// Returns the 36 char (deprecated) fully qualified name placeholder\n\n///\n\n/// If the name is longer than 36 char, then the name gets truncated,\n\n/// If the name is shorter than 36 char, then the name is filled with trailing `_`\n\npub fn library_fully_qualified_placeholder(name: impl AsRef<str>) -> String {\n\n name.as_ref().chars().chain(std::iter::repeat('_')).take(36).collect()\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 3, "score": 353056.20286166953 }, { "content": "/// compiles the given contract and returns the ABI and Bytecode\n\npub fn compile_contract(name: &str, filename: &str) -> (Abi, Bytes) {\n\n let path = format!(\"./tests/solidity-contracts/{}\", filename);\n\n let compiled = Solc::default().compile_source(&path).unwrap();\n\n let contract = compiled.get(&path, name).expect(\"could not find contract\");\n\n let (abi, bin, _) = contract.into_parts_or_default();\n\n (abi, bin)\n\n}\n\n\n", "file_path": "ethers-contract/tests/common/mod.rs", "rank": 4, "score": 352398.06023179804 }, { "content": "/// Reapplies leading and trailing underscore chars to the ident\n\n/// Example `ident = \"pascalCase\"; alias = __pascalcase__` -> `__pascalCase__`\n\npub fn preserve_underscore_delim(ident: &str, alias: &str) -> String {\n\n alias\n\n .chars()\n\n .take_while(|c| *c == '_')\n\n .chain(ident.chars())\n\n .chain(alias.chars().rev().take_while(|c| *c == '_'))\n\n .collect()\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 5, "score": 350046.86161761073 }, { "content": "/// Returns all path parts from any solidity import statement in a string,\n\n/// `import \"./contracts/Contract.sol\";` -> `\"./contracts/Contract.sol\"`.\n\n///\n\n/// See also https://docs.soliditylang.org/en/v0.8.9/grammar.html\n\npub fn find_import_paths(contract: &str) -> impl Iterator<Item = Match> {\n\n RE_SOL_IMPORT.captures_iter(contract).filter_map(|cap| {\n\n cap.name(\"p1\")\n\n .or_else(|| cap.name(\"p2\"))\n\n .or_else(|| cap.name(\"p3\"))\n\n .or_else(|| cap.name(\"p4\"))\n\n })\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 6, "score": 346108.0698931995 }, { "content": "fn parse_event(abi: &str) -> Result<Event, String> {\n\n let abi = if !abi.trim_start().starts_with(\"event \") {\n\n format!(\"event {}\", abi)\n\n } else {\n\n abi.to_string()\n\n };\n\n AbiParser::default()\n\n .parse_event(&abi)\n\n .map_err(|err| format!(\"Failed to parse the event ABI: {:?}\", err))\n\n}\n\n\n\n/// All the attributes the `EthEvent` macro supports\n", "file_path": "ethers-contract/ethers-contract-derive/src/event.rs", "rank": 7, "score": 341829.1123792042 }, { "content": "fn parse_function(abi: &str) -> Result<Function, String> {\n\n let abi = if !abi.trim_start().starts_with(\"function \") {\n\n format!(\"function {}\", abi)\n\n } else {\n\n abi.to_string()\n\n };\n\n\n\n AbiParser::default()\n\n .parse_function(&abi)\n\n .map_err(|err| format!(\"Failed to parse the function ABI: {:?}\", err))\n\n}\n", "file_path": "ethers-contract/ethers-contract-derive/src/call.rs", "rank": 8, "score": 341829.1123792042 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\npub fn http_get(_url: &str) -> Result<String> {\n\n cfg_if::cfg_if! {\n\n if #[cfg(feature = \"reqwest\")]{\n\n Ok(reqwest::blocking::get(_url)?.text()?)\n\n } else {\n\n eyre::bail!(\"HTTP is unsupported\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 9, "score": 339768.2301843182 }, { "content": "/// Format the raw input source string and return formatted output.\n\npub fn format<S>(source: S) -> Result<String>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let mut rustfmt =\n\n Command::new(\"rustfmt\").stdin(Stdio::piped()).stdout(Stdio::piped()).spawn()?;\n\n\n\n {\n\n let stdin = rustfmt\n\n .stdin\n\n .as_mut()\n\n .ok_or_else(|| eyre!(\"stdin was not created for `rustfmt` child process\"))?;\n\n stdin.write_all(source.as_ref().as_bytes())?;\n\n }\n\n\n\n let output = rustfmt.wait_with_output()?;\n\n\n\n eyre::ensure!(\n\n output.status.success(),\n\n \"`rustfmt` exited with code {}:\\n{}\",\n\n output.status,\n\n String::from_utf8_lossy(&output.stderr),\n\n );\n\n\n\n let stdout = String::from_utf8(output.stdout)?;\n\n Ok(stdout)\n\n}\n", "file_path": "ethers-contract/ethers-contract-abigen/src/rustfmt.rs", "rank": 10, "score": 335580.84169454133 }, { "content": "/// Reads an artifact JSON file from the local filesystem.\n\n///\n\n/// The given path can be relative or absolute and can contain env vars like\n\n/// `\"$CARGO_MANIFEST_DIR/contracts/a.json\"`\n\n/// If the path is relative after all env vars have been resolved then we assume the root is either\n\n/// `CARGO_MANIFEST_DIR` or the current working directory.\n\nfn get_local_contract(path: impl AsRef<str>) -> Result<String> {\n\n let path = resolve_path(path.as_ref())?;\n\n let path = if path.is_relative() {\n\n let manifest_path = env::var(\"CARGO_MANIFEST_DIR\")?;\n\n let root = Path::new(&manifest_path);\n\n let mut contract_path = root.join(&path);\n\n if !contract_path.exists() {\n\n contract_path = dunce::canonicalize(&path)?;\n\n }\n\n if !contract_path.exists() {\n\n eyre::bail!(\"Unable to find local contract \\\"{}\\\"\", path.display())\n\n }\n\n contract_path\n\n } else {\n\n path\n\n };\n\n\n\n let json = fs::read_to_string(&path)\n\n .context(format!(\"failed to read artifact JSON file with path {}\", &path.display()))?;\n\n Ok(json)\n\n}\n\n\n\n/// Retrieves a Truffle artifact or ABI from an HTTP URL.\n", "file_path": "ethers-contract/ethers-contract-abigen/src/source.rs", "rank": 11, "score": 335552.43566421914 }, { "content": "fn contract_file_name(name: impl AsRef<str>) -> String {\n\n let name = name.as_ref();\n\n if name.ends_with(\".sol\") {\n\n name.to_string()\n\n } else {\n\n format!(\"{}.sol\", name)\n\n }\n\n}\n\n\n\nimpl TempProject<HardhatArtifacts> {\n\n /// Creates an empty new hardhat style workspace in a new temporary dir\n\n pub fn hardhat() -> Result<Self> {\n\n let tmp_dir = tempdir(\"tmp_hh\")?;\n\n\n\n let paths = ProjectPathsConfig::hardhat(tmp_dir.path())?;\n\n\n\n let inner =\n\n Project::builder().artifacts(HardhatArtifacts::default()).paths(paths).build()?;\n\n Ok(Self::create_new(tmp_dir, inner)?)\n\n }\n", "file_path": "ethers-solc/src/project_util.rs", "rank": 12, "score": 333579.9900958131 }, { "content": "/// `struct Pairing.G2Point[]` -> `G2Point`\n\nfn struct_type_name(name: &str) -> &str {\n\n struct_type_identifier(name).rsplit('.').next().unwrap()\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 13, "score": 325273.90034174646 }, { "content": "/// `Pairing.G2Point` -> `Pairing.G2Point`\n\nfn struct_type_identifier(name: &str) -> &str {\n\n name.trim_start_matches(\"struct \").split('[').next().unwrap()\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 14, "score": 325268.18599554844 }, { "content": "/// turns the tuple component into a struct if it's still missing in the map, including all inner\n\n/// structs\n\nfn insert_structs(structs: &mut HashMap<String, SolStruct>, tuple: &Component) {\n\n if let Some(internal_ty) = tuple.internal_type.as_ref() {\n\n let ident = struct_type_identifier(internal_ty);\n\n if structs.contains_key(ident) {\n\n return\n\n }\n\n if let Some(fields) = tuple\n\n .components\n\n .iter()\n\n .map(|f| Reader::read(&f.type_field).ok().and_then(|kind| field(structs, f, kind)))\n\n .collect::<Option<Vec<_>>>()\n\n {\n\n let s = SolStruct { name: ident.to_string(), fields };\n\n structs.insert(ident.to_string(), s);\n\n }\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 15, "score": 317304.0994270533 }, { "content": "/// Parses an int type from its string representation\n\npub fn parse_int_param_type(s: &str) -> Option<ParamType> {\n\n let size = s.chars().skip(1).collect::<String>().parse::<usize>().ok()?;\n\n if s.starts_with('u') {\n\n Some(ParamType::Uint(size))\n\n } else if s.starts_with('i') {\n\n Some(ParamType::Int(size))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 16, "score": 313701.45403201145 }, { "content": "/// Parses a \"human readable abi\" string\n\n///\n\n/// See also `AbiParser::parse_str`\n\npub fn parse_str(input: &str) -> Result<Abi> {\n\n AbiParser::default().parse_str(input)\n\n}\n\n\n\n/// Parses an identifier like event or function name\n\npub(crate) fn parse_identifier(input: &mut &str) -> Result<String> {\n\n let mut chars = input.trim_start().chars();\n\n let mut name = String::new();\n\n let c = chars.next().ok_or_else(|| format_err!(\"Empty identifier in `{}`\", input))?;\n\n if is_first_ident_char(c) {\n\n name.push(c);\n\n loop {\n\n match chars.clone().next() {\n\n Some(c) if is_ident_char(c) => {\n\n chars.next();\n\n name.push(c);\n\n }\n\n _ => break,\n\n }\n\n }\n\n }\n\n if name.is_empty() {\n\n return Err(ParseError::ParseError(super::Error::InvalidName(input.to_string())))\n\n }\n\n *input = chars.as_str();\n\n Ok(name)\n\n}\n\n\n", "file_path": "ethers-core/src/abi/human_readable.rs", "rank": 17, "score": 313618.1049106255 }, { "content": "/// Use `AbiType::param_type` fo each field to construct the whole signature `<name>(<params,>*)` as\n\n/// `String`\n\npub fn derive_abi_signature_with_abi_type(\n\n input: &DeriveInput,\n\n function_name: &str,\n\n trait_name: &str,\n\n) -> Result<proc_macro2::TokenStream, Error> {\n\n let params = derive_abi_parameters_array(input, trait_name)?;\n\n Ok(quote! {\n\n {\n\n let params: String = #params\n\n .iter()\n\n .map(|p| p.to_string())\n\n .collect::<::std::vec::Vec<_>>()\n\n .join(\",\");\n\n let function_name = #function_name;\n\n format!(\"{}({})\", function_name, params)\n\n }\n\n })\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 18, "score": 306261.24821068486 }, { "content": "/// Use `AbiType::param_type` fo each field to construct the input types own param type\n\npub fn derive_param_type_with_abi_type(\n\n input: &DeriveInput,\n\n trait_name: &str,\n\n) -> Result<proc_macro2::TokenStream, Error> {\n\n let core_crate = ethers_core_crate();\n\n let params = derive_abi_parameters_array(input, trait_name)?;\n\n Ok(quote! {\n\n #core_crate::abi::ParamType::Tuple(::std::vec!#params)\n\n })\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 19, "score": 306060.0844773128 }, { "content": "/// Parses a \"human readable abi\" string vector\n\n///\n\n/// ```\n\n/// use ethers_core::abi::parse_abi;\n\n///\n\n/// let abi = parse_abi(&[\n\n/// \"function x() external view returns (uint256)\",\n\n/// ]).unwrap();\n\n/// ```\n\npub fn parse(input: &[&str]) -> Result<Abi> {\n\n AbiParser::default().parse(input)\n\n}\n\n\n", "file_path": "ethers-core/src/abi/human_readable.rs", "rank": 20, "score": 304330.34755392245 }, { "content": "/// Tries to determine the `ParamType::Tuple` for every struct.\n\n///\n\n/// If a structure has nested structures, these must be determined first, essentially starting with\n\n/// structures consisting of only elementary types before moving on to higher level structures, for\n\n/// example `Proof {point: Point}, Point {x:int, y:int}` start by converting Point into a tuple of\n\n/// `x` and `y` and then substituting `point` with this within `Proof`.\n\nfn resolve_struct_tuples(all_structs: &HashMap<String, SolStruct>) -> HashMap<String, ParamType> {\n\n let mut params = HashMap::new();\n\n let mut structs: VecDeque<_> = all_structs.iter().collect();\n\n\n\n // keep track of how often we retried nested structs\n\n let mut sequential_retries = 0;\n\n 'outer: while let Some((id, ty)) = structs.pop_front() {\n\n if sequential_retries > structs.len() {\n\n break\n\n }\n\n if let Some(tuple) = ty.as_tuple() {\n\n params.insert(id.to_string(), tuple);\n\n } else {\n\n // try to substitute all nested struct types with their `ParamTypes`\n\n let mut struct_params = Vec::with_capacity(ty.fields.len());\n\n for field in ty.fields() {\n\n match field.ty {\n\n FieldType::Elementary(ref param) => {\n\n struct_params.push(param.clone());\n\n }\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 21, "score": 303335.9837232885 }, { "content": "/// Fetch the addressbook for a contract by its name. If the contract name is not a part of\n\n/// [ethers-addressbook](https://github.com/gakonst/ethers-rs/tree/master/ethers-addressbook) we return None.\n\npub fn contract<S: Into<String>>(name: S) -> Option<Contract> {\n\n ADDRESSBOOK.get(&name.into()).cloned()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_tokens() {\n\n assert!(contract(\"dai\").is_some());\n\n assert!(contract(\"usdc\").is_some());\n\n assert!(contract(\"rand\").is_none());\n\n }\n\n\n\n #[test]\n\n fn test_addrs() {\n\n assert!(contract(\"dai\").unwrap().address(Chain::Mainnet).is_some());\n\n assert!(contract(\"dai\").unwrap().address(Chain::MoonbeamDev).is_none());\n\n }\n\n}\n", "file_path": "ethers-addressbook/src/lib.rs", "rank": 22, "score": 299485.73136693815 }, { "content": "/// Parse the abi via `Source::parse` and return if the abi defined as human readable\n\nfn parse_abi(abi_str: &str) -> Result<(Abi, bool, AbiParser)> {\n\n let mut abi_parser = AbiParser::default();\n\n let res = if let Ok(abi) = abi_parser.parse_str(abi_str) {\n\n (abi, true, abi_parser)\n\n } else {\n\n #[derive(Deserialize)]\n\n struct Contract {\n\n abi: Abi,\n\n }\n\n // a best-effort coercion of an ABI or an artifact JSON into an artifact JSON.\n\n let contract: Contract = if abi_str.trim_start().starts_with('[') {\n\n serde_json::from_str(&format!(r#\"{{\"abi\":{}}}\"#, abi_str.trim()))?\n\n } else {\n\n serde_json::from_str::<Contract>(abi_str)?\n\n };\n\n\n\n (contract.abi, false, abi_parser)\n\n };\n\n Ok(res)\n\n}\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract.rs", "rank": 23, "score": 297703.65858182625 }, { "content": "/// Returns the solidity version pragma from the given input:\n\n/// `pragma solidity ^0.5.2;` => `^0.5.2`\n\npub fn find_version_pragma(contract: &str) -> Option<Match> {\n\n RE_SOL_PRAGMA_VERSION.captures(contract)?.name(\"version\")\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 24, "score": 291056.40452080563 }, { "content": "/// The crates name to use when deriving macros: (`core`, `contract`)\n\n///\n\n/// We try to determine which crate ident to use based on the dependencies of\n\n/// the project in which the macro is used. This is useful because the macros,\n\n/// like `EthEvent` are provided by the `ethers-contract` crate which depends on\n\n/// `ethers_core`. Most commonly `ethers` will be used as dependency which\n\n/// reexports all the different crates, essentially `ethers::core` is\n\n/// `ethers_core` So depending on the dependency used `ethers` ors `ethers_core\n\n/// | ethers_contract`, we need to use the fitting crate ident when expand the\n\n/// macros This will attempt to parse the current `Cargo.toml` and check the\n\n/// ethers related dependencies.\n\n///\n\n/// This process is a bit hacky, we run `cargo metadata` internally which\n\n/// resolves the current package but creates a new `Cargo.lock` file in the\n\n/// process. This is not a problem for regular workspaces but becomes an issue\n\n/// during publishing with `cargo publish` if the project does not ignore\n\n/// `Cargo.lock` in `.gitignore`, because then cargo can't proceed with\n\n/// publishing the crate because the created `Cargo.lock` leads to a modified\n\n/// workspace, not the `CARGO_MANIFEST_DIR` but the workspace `cargo publish`\n\n/// created in `./target/package/..`. Therefore we check prior to executing\n\n/// `cargo metadata` if a `Cargo.lock` file exists and delete it afterwards if\n\n/// it was created by `cargo metadata`.\n\npub fn determine_ethers_crates() -> (&'static str, &'static str, &'static str) {\n\n let manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\");\n\n\n\n // if there is no cargo manifest, default to `ethers::`-style imports.\n\n let manifest_dir = if let Ok(manifest_dir) = manifest_dir {\n\n manifest_dir\n\n } else {\n\n return (\"ethers::core\", \"ethers::contract\", \"ethers::providers\")\n\n };\n\n\n\n // check if the lock file exists, if it's missing we need to clean up afterward\n\n let lock_file = format!(\"{}/Cargo.lock\", manifest_dir);\n\n let needs_lock_file_cleanup = !std::path::Path::new(&lock_file).exists();\n\n\n\n let res = MetadataCommand::new()\n\n .manifest_path(&format!(\"{}/Cargo.toml\", manifest_dir))\n\n .exec()\n\n .ok()\n\n .and_then(|metadata| {\n\n metadata.root_package().and_then(|pkg| {\n", "file_path": "ethers-core/src/macros/ethers_crate.rs", "rank": 25, "score": 289007.21005964675 }, { "content": "/// Returns a bytes32 string representation of text. If the length of text exceeds 32 bytes,\n\n/// an error is returned.\n\npub fn format_bytes32_string(text: &str) -> Result<[u8; 32], ConversionError> {\n\n let str_bytes: &[u8] = text.as_bytes();\n\n if str_bytes.len() > 32 {\n\n return Err(ConversionError::TextTooLong)\n\n }\n\n\n\n let mut bytes32: [u8; 32] = [0u8; 32];\n\n bytes32[..str_bytes.len()].copy_from_slice(str_bytes);\n\n\n\n Ok(bytes32)\n\n}\n\n\n", "file_path": "ethers-core/src/utils/mod.rs", "rank": 26, "score": 287725.8749609519 }, { "content": "fn parse_field_type(s: &str) -> Result<FieldType> {\n\n let mut input = s.trim_start();\n\n if input.starts_with(\"mapping\") {\n\n return Ok(FieldType::Mapping(Box::new(parse_mapping(input)?)))\n\n }\n\n if input.ends_with(\" payable\") {\n\n // special case for `address payable`\n\n input = input[..input.len() - 7].trim_end();\n\n }\n\n if let Ok(ty) = Reader::read(input) {\n\n // See `AbiParser::parse_type`\n\n if is_likely_tuple_not_uint8(&ty, s) {\n\n // likely that an unknown type was resolved as `uint8`\n\n StructFieldType::parse(input.trim_end())\n\n } else {\n\n Ok(FieldType::Elementary(ty))\n\n }\n\n } else {\n\n // parsing elementary datatype failed, try struct\n\n StructFieldType::parse(input.trim_end())\n\n }\n\n}\n\n\n", "file_path": "ethers-core/src/abi/struct_def.rs", "rank": 27, "score": 287366.5893015449 }, { "content": "/// Returns the library hash placeholder as `$hex(library_hash(name))$`\n\npub fn library_hash_placeholder(name: impl AsRef<[u8]>) -> String {\n\n let hash = library_hash(name);\n\n let placeholder = hex::encode(hash);\n\n format!(\"${}$\", placeholder)\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 28, "score": 284283.1924915327 }, { "content": "/// Generate the shared prefix of the `lib.rs` or `mod.rs`\n\nfn generate_prefix(mut buf: impl Write, is_crate: bool, single_file: bool) -> Result<()> {\n\n writeln!(buf, \"#![allow(clippy::all)]\")?;\n\n writeln!(\n\n buf,\n\n \"//! This {} contains abigen! generated bindings for solidity contracts.\",\n\n if is_crate { \"lib\" } else { \"module\" }\n\n )?;\n\n writeln!(buf, \"//! This is autogenerated code.\")?;\n\n writeln!(buf, \"//! Do not manually edit these files.\")?;\n\n writeln!(\n\n buf,\n\n \"//! {} may be overwritten by the codegen system at any time.\",\n\n if single_file && !is_crate { \"This file\" } else { \"These files\" }\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/multi.rs", "rank": 29, "score": 281677.02154911263 }, { "content": "// compiles the given contract and returns the ABI and Bytecode\n\nfn compile_contract(path: &str, name: &str) -> (Abi, Bytes) {\n\n let path = format!(\"./tests/solidity-contracts/{}\", path);\n\n let compiled = Solc::default().compile_source(&path).unwrap();\n\n let contract = compiled.get(&path, name).expect(\"could not find contract\");\n\n let (abi, bin, _) = contract.into_parts_or_default();\n\n (abi, bin)\n\n}\n\n\n\n#[tokio::test]\n\n#[cfg(not(feature = \"celo\"))]\n\nasync fn ds_proxy_transformer() {\n\n // randomness\n\n let mut rng = rand::thread_rng();\n\n\n\n // spawn ganache and instantiate a signer middleware.\n\n let ganache = Ganache::new().spawn();\n\n let wallet: LocalWallet = ganache.keys()[0].clone().into();\n\n let provider = Provider::<Http>::try_from(ganache.endpoint())\n\n .unwrap()\n\n .interval(Duration::from_millis(10u64));\n", "file_path": "ethers-middleware/tests/transformer.rs", "rank": 30, "score": 280459.64326386875 }, { "content": "/// parse a mapping declaration\n\nfn parse_mapping(s: &str) -> Result<MappingType> {\n\n let mut input = s.trim();\n\n if !input.starts_with(\"mapping\") {\n\n bail!(\"Not a mapping `{}`\", input)\n\n }\n\n input = input[7..].trim_start();\n\n let mut iter = input.trim_start_matches('(').trim_end_matches(')').splitn(2, \"=>\");\n\n let key_type = iter\n\n .next()\n\n .ok_or_else(|| format_err!(\"Expected mapping key type at `{}`\", input))\n\n .map(str::trim)\n\n .map(Reader::read)??;\n\n\n\n let is_illegal_ty = if let ParamType::Array(_) |\n\n ParamType::FixedArray(_, _) |\n\n ParamType::Tuple(_) = &key_type\n\n {\n\n true\n\n } else {\n\n is_likely_tuple_not_uint8(&key_type, s)\n", "file_path": "ethers-core/src/abi/struct_def.rs", "rank": 31, "score": 279259.80765422987 }, { "content": "/// Convert hash map of field names and types into a type hash corresponding to enc types;\n\npub fn make_type_hash(primary_type: String, fields: &[(String, ParamType)]) -> [u8; 32] {\n\n let parameters =\n\n fields.iter().map(|(k, v)| format!(\"{} {}\", v, k)).collect::<Vec<String>>().join(\",\");\n\n\n\n let sig = format!(\"{}({})\", primary_type, parameters);\n\n\n\n keccak256(sig)\n\n}\n\n\n", "file_path": "ethers-core/src/types/transaction/eip712.rs", "rank": 32, "score": 278790.81526402046 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nfn get_npm_contract(package: &str) -> Result<String> {\n\n let unpkg_url = format!(\"https://unpkg.io/{}\", package);\n\n let json = util::http_get(&unpkg_url)\n\n .with_context(|| format!(\"failed to retrieve JSON from for npm package {}\", package))?;\n\n\n\n Ok(json)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_source() {\n\n let root = \"/rooted\";\n\n for (url, expected) in &[\n\n (\"relative/Contract.json\", Source::local(\"/rooted/relative/Contract.json\")),\n\n (\"/absolute/Contract.json\", Source::local(\"/absolute/Contract.json\")),\n\n (\n\n \"https://my.domain.eth/path/to/Contract.json\",\n", "file_path": "ethers-contract/ethers-contract-abigen/src/source.rs", "rank": 33, "score": 278098.4231047501 }, { "content": "/// Expands a identifier string into a token.\n\npub fn ident(name: &str) -> Ident {\n\n Ident::new(name, Span::call_site())\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 34, "score": 276187.73051344557 }, { "content": "#[proc_macro_derive(EthAbiType)]\n\npub fn derive_abi_type(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(abi_ty::derive_tokenizeable_impl(&input))\n\n}\n\n\n\n/// Derives the `AbiEncode`, `AbiDecode` and traits for the labeled type.\n\n///\n\n/// This is an addition to `EthAbiType` that lacks the `AbiEncode`, `AbiDecode` implementation.\n\n///\n\n/// The reason why this is a separate macro is the `AbiEncode` / `AbiDecode` are `ethers`\n\n/// generalized codec traits used for types, calls, etc. However, encoding/decoding a call differs\n\n/// from the basic encoding/decoding, (`[selector + encode(self)]`)\n\n///\n\n/// # Example\n\n///\n\n/// ```ignore\n\n/// use ethers_contract::{EthAbiCodec, EthAbiType};\n\n/// use ethers_core::types::*;\n\n///\n\n/// #[derive(Debug, Clone, EthAbiType, EthAbiCodec)]\n\n/// struct MyStruct {\n\n/// addr: Address,\n\n/// old_value: String,\n\n/// new_value: String,\n\n/// }\n\n/// let val = MyStruct {..};\n\n/// let bytes = val.encode();\n\n/// let val = MyStruct::decode(&bytes).unwrap();\n\n/// ```\n", "file_path": "ethers-contract/ethers-contract-derive/src/lib.rs", "rank": 35, "score": 273485.607325279 }, { "content": "/// Expands an identifier string into a token and appending `_` if the\n\n/// identifier is for a reserved keyword.\n\n///\n\n/// Parsing keywords like `self` can fail, in this case we add an underscore.\n\npub fn safe_ident(name: &str) -> Ident {\n\n syn::parse_str::<SynIdent>(name).unwrap_or_else(|_| ident(&format!(\"{}_\", name)))\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 36, "score": 273125.70600739715 }, { "content": "/// Expands a doc string into an attribute token stream.\n\npub fn expand_doc(s: &str) -> TokenStream {\n\n let doc = Literal::string(s);\n\n quote! {\n\n #[doc = #doc]\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 37, "score": 273120.9872598273 }, { "content": "/// Tries to find the corresponding `ParamType` used for tokenization for the\n\n/// given type\n\npub fn find_parameter_type(ty: &Type) -> Result<ParamType, Error> {\n\n match ty {\n\n Type::Array(ty) => {\n\n let param = find_parameter_type(ty.elem.as_ref())?;\n\n if let Expr::Lit(ref expr) = ty.len {\n\n if let Lit::Int(ref len) = expr.lit {\n\n if let Ok(size) = len.base10_parse::<usize>() {\n\n return Ok(ParamType::FixedArray(Box::new(param), size))\n\n }\n\n }\n\n }\n\n Err(Error::new(ty.span(), \"Failed to derive proper ABI from array field\"))\n\n }\n\n Type::Path(ty) => {\n\n if let Some(ident) = ty.path.get_ident() {\n\n let ident = ident.to_string().to_lowercase();\n\n return match ident.as_str() {\n\n \"address\" => Ok(ParamType::Address),\n\n \"string\" => Ok(ParamType::String),\n\n \"bool\" => Ok(ParamType::Bool),\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 38, "score": 272382.1702873477 }, { "content": "/// Expands an identifier as pascal case and preserve any leading or trailing underscores\n\npub fn safe_pascal_case_ident(name: &str) -> Ident {\n\n let i = name.to_pascal_case();\n\n ident(&preserve_underscore_delim(&i, name))\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 39, "score": 267280.6811416738 }, { "content": "/// Expands an identifier as snakecase and preserve any leading or trailing underscores\n\npub fn safe_snake_case_ident(name: &str) -> Ident {\n\n let i = name.to_snake_case();\n\n ident(&preserve_underscore_delim(&i, name))\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 40, "score": 267280.6811416738 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nfn get_etherscan_contract(address: Address, domain: &str) -> Result<String> {\n\n // NOTE: We do not retrieve the bytecode since deploying contracts with the\n\n // same bytecode is unreliable as the libraries have already linked and\n\n // probably don't reference anything when deploying on other networks.\n\n let api_key = {\n\n let key_res = match domain {\n\n \"etherscan.io\" => env::var(\"ETHERSCAN_API_KEY\").ok(),\n\n \"polygonscan.com\" => env::var(\"POLYGONSCAN_API_KEY\").ok(),\n\n \"snowtrace.io\" => env::var(\"SNOWTRACE_API_KEY\").ok(),\n\n _ => None,\n\n };\n\n key_res.map(|key| format!(\"&apikey={}\", key)).unwrap_or_default()\n\n };\n\n\n\n let abi_url = format!(\n\n \"http://api.{}/api?module=contract&action=getabi&address={:?}&format=raw{}\",\n\n domain, address, api_key,\n\n );\n\n let abi =\n\n util::http_get(&abi_url).context(format!(\"failed to retrieve ABI from {}\", domain))?;\n\n\n\n if abi.starts_with(\"Contract source code not verified\") {\n\n eyre::bail!(\"Contract source code not verified: {:?}\", address);\n\n }\n\n\n\n Ok(abi)\n\n}\n\n\n\n/// Retrieves a Truffle artifact or ABI from an npm package through `unpkg.io`.\n", "file_path": "ethers-contract/ethers-contract-abigen/src/source.rs", "rank": 41, "score": 264684.4208252034 }, { "content": "/// Use the `AbiType` trait to determine the correct `ParamType` and signature at runtime\n\nfn derive_trait_impls_with_abi_type(\n\n input: &DeriveInput,\n\n function_call_name: &str,\n\n) -> Result<TokenStream, Error> {\n\n let abi_signature =\n\n utils::derive_abi_signature_with_abi_type(input, function_call_name, \"EthCall\")?;\n\n let abi_signature = quote! {\n\n ::std::borrow::Cow::Owned(#abi_signature)\n\n };\n\n let decode_impl = derive_decode_impl_with_abi_type(input)?;\n\n Ok(derive_trait_impls(input, function_call_name, abi_signature, None, decode_impl))\n\n}\n\n\n\n/// All the attributes the `EthCall` macro supports\n", "file_path": "ethers-contract/ethers-contract-derive/src/call.rs", "rank": 42, "score": 264644.08060188254 }, { "content": "/// Replaces any occurrences of env vars in the `raw` str with their value\n\npub fn resolve_path(raw: &str) -> Result<PathBuf> {\n\n let mut unprocessed = raw;\n\n let mut resolved = String::new();\n\n\n\n while let Some(dollar_sign) = unprocessed.find('$') {\n\n let (head, tail) = unprocessed.split_at(dollar_sign);\n\n resolved.push_str(head);\n\n\n\n match parse_identifier(&tail[1..]) {\n\n Some((variable, rest)) => {\n\n let value = std::env::var(variable)?;\n\n resolved.push_str(&value);\n\n unprocessed = rest;\n\n }\n\n None => {\n\n eyre::bail!(\"Unable to parse a variable from \\\"{}\\\"\", tail)\n\n }\n\n }\n\n }\n\n resolved.push_str(unprocessed);\n\n\n\n Ok(PathBuf::from(resolved))\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 43, "score": 263202.91427326674 }, { "content": "/// Generates the tokenize implementation\n\npub fn derive_tokenizeable_impl(input: &DeriveInput) -> proc_macro2::TokenStream {\n\n let core_crate = ethers_core_crate();\n\n let name = &input.ident;\n\n let generic_params = input.generics.params.iter().map(|p| quote! { #p });\n\n let generic_params = quote! { #(#generic_params,)* };\n\n\n\n let generic_args = input.generics.type_params().map(|p| {\n\n let name = &p.ident;\n\n quote_spanned! { p.ident.span() => #name }\n\n });\n\n\n\n let generic_args = quote! { #(#generic_args,)* };\n\n\n\n let generic_predicates = match input.generics.where_clause {\n\n Some(ref clause) => {\n\n let predicates = clause.predicates.iter().map(|p| quote! { #p });\n\n quote! { #(#predicates,)* }\n\n }\n\n None => quote! {},\n\n };\n", "file_path": "ethers-contract/ethers-contract-derive/src/abi_ty.rs", "rank": 44, "score": 261959.87168285428 }, { "content": "/// Generates the EthCall implementation\n\npub fn derive_trait_impls(\n\n input: &DeriveInput,\n\n function_call_name: &str,\n\n abi_signature: TokenStream,\n\n selector: Option<TokenStream>,\n\n decode_impl: TokenStream,\n\n) -> TokenStream {\n\n // the ethers crates to use\n\n let core_crate = ethers_core_crate();\n\n let contract_crate = ethers_contract_crate();\n\n let struct_name = &input.ident;\n\n\n\n let selector = selector.unwrap_or_else(|| {\n\n quote! {\n\n #core_crate::utils::id(Self::abi_signature())\n\n }\n\n });\n\n\n\n let ethcall_impl = quote! {\n\n impl #contract_crate::EthCall for #struct_name {\n", "file_path": "ethers-contract/ethers-contract-derive/src/call.rs", "rank": 45, "score": 257473.0164556438 }, { "content": "#[test]\n\nfn can_derive_abi_type_empty_struct() {\n\n #[derive(Debug, Clone, PartialEq, EthAbiType)]\n\n struct Call();\n\n\n\n #[derive(Debug, Clone, PartialEq, EthAbiType)]\n\n struct Call2 {};\n\n\n\n #[derive(Debug, Clone, PartialEq, EthAbiType)]\n\n struct Call3;\n\n\n\n assert_tokenizeable::<Call>();\n\n assert_tokenizeable::<Call2>();\n\n assert_tokenizeable::<Call3>();\n\n}\n\n\n", "file_path": "ethers-contract/tests/common/derive.rs", "rank": 46, "score": 256746.50170335968 }, { "content": "/// The address for an Ethereum contract is deterministically computed from the\n\n/// address of its creator (sender) and how many transactions the creator has\n\n/// sent (nonce). The sender and nonce are RLP encoded and then hashed with Keccak-256.\n\npub fn get_contract_address(sender: impl Into<Address>, nonce: impl Into<U256>) -> Address {\n\n let mut stream = rlp::RlpStream::new();\n\n stream.begin_list(2);\n\n stream.append(&sender.into());\n\n stream.append(&nonce.into());\n\n\n\n let hash = keccak256(&stream.out());\n\n\n\n let mut bytes = [0u8; 20];\n\n bytes.copy_from_slice(&hash[12..]);\n\n Address::from(bytes)\n\n}\n\n\n", "file_path": "ethers-core/src/utils/mod.rs", "rank": 47, "score": 255561.1658534772 }, { "content": "/// Convenience function to turn the `ethers_contract` name in `ETHERS_CRATE` into an `Path`\n\npub fn ethers_contract_crate() -> Path {\n\n syn::parse_str(ETHERS_CRATES.1).expect(\"valid path; qed\")\n\n}\n", "file_path": "ethers-core/src/macros/ethers_crate.rs", "rank": 48, "score": 254407.7565409401 }, { "content": "/// Expands a positional identifier string that may be empty.\n\n///\n\n/// Note that this expands the parameter name with `safe_ident`, meaning that\n\n/// identifiers that are reserved keywords get `_` appended to them.\n\npub fn expand_input_name(index: usize, name: &str) -> TokenStream {\n\n let name_str = match name {\n\n \"\" => format!(\"p{}\", index),\n\n n => n.to_snake_case(),\n\n };\n\n let name = safe_ident(&name_str);\n\n\n\n quote! { #name }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 49, "score": 254014.9942904838 }, { "content": "/// Use `AbiType::param_type` fo each field to construct the signature's parameters as runtime array\n\n/// `[param1, param2,...]`\n\npub fn derive_abi_parameters_array(\n\n input: &DeriveInput,\n\n trait_name: &str,\n\n) -> Result<proc_macro2::TokenStream, Error> {\n\n let core_crate = ethers_core_crate();\n\n\n\n let param_types: Vec<_> = match input.data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields\n\n .named\n\n .iter()\n\n .map(|f| {\n\n let ty = &f.ty;\n\n quote_spanned! { f.span() => <#ty as #core_crate::abi::AbiType>::param_type() }\n\n })\n\n .collect(),\n\n Fields::Unnamed(ref fields) => fields\n\n .unnamed\n\n .iter()\n\n .map(|f| {\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 50, "score": 253992.6214384798 }, { "content": "/// Attempts to determine the ABI Paramtypes from the type's AST\n\npub fn derive_abi_inputs_from_fields(\n\n input: &DeriveInput,\n\n trait_name: &str,\n\n) -> Result<Vec<(String, ParamType)>, Error> {\n\n let fields: Vec<_> = match input.data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Named(ref fields) => fields.named.iter().collect(),\n\n Fields::Unnamed(ref fields) => fields.unnamed.iter().collect(),\n\n Fields::Unit => {\n\n return Err(Error::new(\n\n input.span(),\n\n format!(\"{} cannot be derived for empty structs and unit\", trait_name),\n\n ))\n\n }\n\n },\n\n Data::Enum(_) => {\n\n return Err(Error::new(\n\n input.span(),\n\n format!(\"{} cannot be derived for enums\", trait_name),\n\n ))\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 51, "score": 253987.1241894472 }, { "content": "/// Copies a single file into the given dir\n\npub fn copy_file(source: impl AsRef<Path>, target_dir: impl AsRef<Path>) -> Result<()> {\n\n let source = source.as_ref();\n\n let target = target_dir.as_ref().join(\n\n source\n\n .file_name()\n\n .ok_or_else(|| SolcError::msg(format!(\"No file name for {}\", source.display())))?,\n\n );\n\n\n\n fs_extra::file::copy(source, target, &file_copy_options())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ethers-solc/src/project_util.rs", "rank": 52, "score": 251628.3672322883 }, { "content": "/// Copies all content of the source dir into the target dir\n\npub fn copy_dir(source: impl AsRef<Path>, target_dir: impl AsRef<Path>) -> Result<()> {\n\n fs_extra::dir::copy(source, target_dir, &dir_copy_options())?;\n\n Ok(())\n\n}\n", "file_path": "ethers-solc/src/project_util.rs", "rank": 53, "score": 251628.32388406747 }, { "content": "/// Returns the decoded string represented by the bytes32 encoded data.\n\npub fn parse_bytes32_string(bytes: &[u8; 32]) -> Result<&str, ConversionError> {\n\n let mut length = 0;\n\n while length < 32 && bytes[length] != 0 {\n\n length += 1;\n\n }\n\n\n\n Ok(std::str::from_utf8(&bytes[..length])?)\n\n}\n\n\n", "file_path": "ethers-core/src/utils/mod.rs", "rank": 54, "score": 245191.40250366466 }, { "content": "/// Parses a field definition such as `<type> <storageLocation>? <name>`\n\nfn parse_struct_field(s: &str) -> Result<FieldDeclaration> {\n\n let mut input = s.trim_start();\n\n\n\n if !input.starts_with(\"mapping\") {\n\n // strip potential defaults\n\n input = input\n\n .split('=')\n\n .next()\n\n .ok_or_else(|| format_err!(\"Expected field definition `{}`\", s))?\n\n .trim_end();\n\n }\n\n let name = strip_field_identifier(&mut input)?;\n\n Ok(FieldDeclaration { name, ty: parse_field_type(input)? })\n\n}\n\n\n", "file_path": "ethers-core/src/abi/struct_def.rs", "rank": 55, "score": 244617.04606277327 }, { "content": "pub fn default_for_null<'de, D, T>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n T: Deserialize<'de> + Default,\n\n{\n\n Ok(Option::<T>::deserialize(deserializer)?.unwrap_or_default())\n\n}\n\n\n\npub mod json_string_opt {\n\n use serde::{\n\n de::{self, DeserializeOwned},\n\n ser, Deserialize, Deserializer, Serialize, Serializer,\n\n };\n\n\n\n pub fn serialize<T, S>(value: &Option<T>, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n T: Serialize,\n\n {\n\n if let Some(value) = value {\n", "file_path": "ethers-solc/src/artifacts/serde_helpers.rs", "rank": 56, "score": 243918.75182807646 }, { "content": "/// Return HashMap of the field name and the field type;\n\npub fn parse_fields(ast: &DeriveInput) -> Result<Vec<(String, ParamType)>, TokenStream> {\n\n let mut fields = Vec::new();\n\n\n\n let data = match &ast.data {\n\n Data::Struct(s) => s,\n\n _ => {\n\n return Err(Error::new(\n\n ast.span(),\n\n \"invalid data type. can only derive Eip712 for a struct\",\n\n )\n\n .to_compile_error())\n\n }\n\n };\n\n\n\n let named_fields = match &data.fields {\n\n Fields::Named(name) => name,\n\n _ => {\n\n return Err(Error::new(ast.span(), \"unnamed fields are not supported\").to_compile_error())\n\n }\n\n };\n", "file_path": "ethers-core/src/types/transaction/eip712.rs", "rank": 57, "score": 243251.10629825958 }, { "content": "/// Expands to the rust struct type\n\nfn expand_struct_type(struct_ty: &StructFieldType) -> TokenStream {\n\n match struct_ty {\n\n StructFieldType::Type(ty) => {\n\n let ty = util::ident(ty.name());\n\n quote! {#ty}\n\n }\n\n StructFieldType::Array(ty) => {\n\n let ty = expand_struct_type(&*ty);\n\n quote! {::std::vec::Vec<#ty>}\n\n }\n\n StructFieldType::FixedArray(ty, size) => {\n\n let ty = expand_struct_type(&*ty);\n\n quote! { [#ty; #size]}\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 58, "score": 240129.21764052435 }, { "content": "/// Returns a list of absolute paths to all the json files under the root\n\npub fn json_files(root: impl AsRef<std::path::Path>) -> Vec<PathBuf> {\n\n walkdir::WalkDir::new(root)\n\n .into_iter()\n\n .filter_map(Result::ok)\n\n .filter(|e| e.file_type().is_file())\n\n .filter(|e| e.path().extension().map(|ext| ext == \"json\").unwrap_or_default())\n\n .map(|e| e.path().into())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn can_resolve_path() {\n\n let raw = \"./$ENV_VAR\";\n\n std::env::set_var(\"ENV_VAR\", \"file.txt\");\n\n let resolved = resolve_path(raw).unwrap();\n\n assert_eq!(resolved.to_str().unwrap(), \"./file.txt\");\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 59, "score": 239875.00034423135 }, { "content": "/// Returns the rust type for the given parameter\n\npub fn param_type_quote(kind: &ParamType) -> proc_macro2::TokenStream {\n\n let core_crate = ethers_core_crate();\n\n match kind {\n\n ParamType::Address => {\n\n quote! {#core_crate::abi::ParamType::Address}\n\n }\n\n ParamType::Bytes => {\n\n quote! {#core_crate::abi::ParamType::Bytes}\n\n }\n\n ParamType::Int(size) => {\n\n let size = Literal::usize_suffixed(*size);\n\n quote! {#core_crate::abi::ParamType::Int(#size)}\n\n }\n\n ParamType::Uint(size) => {\n\n let size = Literal::usize_suffixed(*size);\n\n quote! {#core_crate::abi::ParamType::Uint(#size)}\n\n }\n\n ParamType::Bool => {\n\n quote! {#core_crate::abi::ParamType::Bool}\n\n }\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 60, "score": 236847.8527775941 }, { "content": "// Converts param types for indexed parameters to bytes32 where appropriate\n\n// This applies to strings, arrays, structs and bytes to follow the encoding of\n\n// these indexed param types according to\n\n// https://solidity.readthedocs.io/en/develop/abi-spec.html#encoding-of-indexed-event-parameters\n\npub fn topic_param_type_quote(kind: &ParamType) -> proc_macro2::TokenStream {\n\n let core_crate = ethers_core_crate();\n\n match kind {\n\n ParamType::String |\n\n ParamType::Bytes |\n\n ParamType::Array(_) |\n\n ParamType::FixedArray(_, _) |\n\n ParamType::Tuple(_) => quote! {#core_crate::abi::ParamType::FixedBytes(32)},\n\n ty => param_type_quote(ty),\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-derive/src/utils.rs", "rank": 61, "score": 234424.85861969696 }, { "content": "fn create_contract_file(path: PathBuf, content: impl AsRef<str>) -> Result<PathBuf> {\n\n if let Some(parent) = path.parent() {\n\n std::fs::create_dir_all(parent)\n\n .map_err(|err| SolcIoError::new(err, parent.to_path_buf()))?;\n\n }\n\n std::fs::write(&path, content.as_ref()).map_err(|err| SolcIoError::new(err, path.clone()))?;\n\n Ok(path)\n\n}\n\n\n", "file_path": "ethers-solc/src/project_util.rs", "rank": 62, "score": 234152.32947829954 }, { "content": "#[proc_macro_derive(EthAbiCodec)]\n\npub fn derive_abi_codec(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(codec::derive_codec_impl(&input))\n\n}\n\n\n\n/// Derives `fmt::Display` trait and generates a convenient format for all the\n\n/// underlying primitive types/tokens.\n\n///\n\n/// The fields of the structure are formatted comma separated, like `self.0,\n\n/// self.1, self.2,...`\n\n///\n\n/// # Example\n\n///\n\n/// ```ignore\n\n/// use ethers_contract::{EthDisplay, EthAbiType};\n\n/// use ethers_core::types::*;\n\n///\n\n/// #[derive(Debug, Clone, EthAbiType, EthDisplay)]\n\n/// struct MyStruct {\n\n/// addr: Address,\n\n/// old_value: String,\n\n/// new_value: String,\n\n/// h: H256,\n\n/// arr_u8: [u8; 32],\n\n/// arr_u16: [u16; 32],\n\n/// v: Vec<u8>,\n\n/// }\n\n/// let val = MyStruct {..};\n\n/// format!(\"{}\", val);\n\n/// ```\n", "file_path": "ethers-contract/ethers-contract-derive/src/lib.rs", "rank": 63, "score": 232704.1813019279 }, { "content": "#[proc_macro_derive(EthEvent, attributes(ethevent))]\n\npub fn derive_abi_event(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(event::derive_eth_event_impl(input))\n\n}\n\n\n\n/// Derives the `EthCall` and `Tokenizeable` trait for the labeled type.\n\n///\n\n/// Additional arguments can be specified using the `#[ethcall(...)]`\n\n/// attribute:\n\n///\n\n/// For the struct:\n\n///\n\n/// - `name`, `name = \"...\"`: Overrides the generated `EthCall` function name, default is the\n\n/// struct's name.\n\n/// - `abi`, `abi = \"...\"`: The ABI signature for the function this call's data corresponds to.\n\n///\n\n/// NOTE: in order to successfully parse the `abi` (`<name>(<args>,...)`) the `<name`>\n\n/// must match either the struct name or the name attribute: `#[ethcall(name =\"<name>\"]`\n\n///\n\n/// # Example\n", "file_path": "ethers-contract/ethers-contract-derive/src/lib.rs", "rank": 64, "score": 232698.55714297795 }, { "content": "#[proc_macro_derive(EthCall, attributes(ethcall))]\n\npub fn derive_abi_call(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(call::derive_eth_call_impl(input))\n\n}\n", "file_path": "ethers-contract/ethers-contract-derive/src/lib.rs", "rank": 65, "score": 232698.55714297795 }, { "content": "/// Generates the decode implementation based on the function's runtime `AbiType` impl\n\nfn derive_decode_impl_with_abi_type(input: &DeriveInput) -> Result<TokenStream, Error> {\n\n let datatypes_array = utils::derive_abi_parameters_array(input, \"EthCall\")?;\n\n Ok(derive_decode_impl(datatypes_array))\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-derive/src/call.rs", "rank": 66, "score": 232639.04128614176 }, { "content": "/// Returns true if the `ParamType` contains an `uint8`\n\npub fn contains_uint8(kind: &ParamType) -> bool {\n\n match kind {\n\n ParamType::Uint(8) => true,\n\n ParamType::Array(kind) => contains_uint8(&*kind),\n\n ParamType::FixedArray(kind, _) => contains_uint8(&*kind),\n\n ParamType::Tuple(tuple) => tuple.iter().any(contains_uint8),\n\n _ => false,\n\n }\n\n}\n\n\n\npub(crate) fn is_first_ident_char(c: char) -> bool {\n\n matches!(c, 'a'..='z' | 'A'..='Z' | '_')\n\n}\n\n\n\npub(crate) fn is_ident_char(c: char) -> bool {\n\n matches!(c, 'a'..='z' | 'A'..='Z' | '0'..='9' | '_')\n\n}\n\n\n\npub(crate) fn is_whitespace(c: char) -> bool {\n\n matches!(c, ' ' | '\\t')\n\n}\n\n\n", "file_path": "ethers-core/src/abi/human_readable.rs", "rank": 67, "score": 229696.5382657597 }, { "content": "/// Returns the ENS namehash as specified in [EIP-137](https://eips.ethereum.org/EIPS/eip-137)\n\npub fn namehash(name: &str) -> H256 {\n\n if name.is_empty() {\n\n return H256::zero()\n\n }\n\n\n\n // iterate in reverse\n\n name.rsplit('.')\n\n .fold([0u8; 32], |node, label| keccak256(&[node, keccak256(label.as_bytes())].concat()))\n\n .into()\n\n}\n\n\n", "file_path": "ethers-providers/src/ens.rs", "rank": 68, "score": 228771.45101144759 }, { "content": "/// Divides the provided amount with 10^{units} provided.\n\n///\n\n/// ```\n\n/// use ethers_core::{types::U256, utils::format_units};\n\n///\n\n/// let eth = format_units(1395633240123456000_u128, \"ether\").unwrap();\n\n/// assert_eq!(eth.parse::<f64>().unwrap(), 1.395633240123456);\n\n///\n\n/// let eth = format_units(U256::from_dec_str(\"1395633240123456000\").unwrap(), \"ether\").unwrap();\n\n/// assert_eq!(eth.parse::<f64>().unwrap(), 1.395633240123456);\n\n///\n\n/// let eth = format_units(U256::from_dec_str(\"1395633240123456789\").unwrap(), \"ether\").unwrap();\n\n/// assert_eq!(eth, \"1.395633240123456789\");\n\n/// ```\n\npub fn format_units<T, K>(amount: T, units: K) -> Result<String, ConversionError>\n\nwhere\n\n T: Into<U256>,\n\n K: TryInto<Units, Error = ConversionError>,\n\n{\n\n let units = units.try_into()?;\n\n let amount = amount.into();\n\n let amount_decimals = amount % U256::from(10_u128.pow(units.as_num()));\n\n let amount_integer = amount / U256::from(10_u128.pow(units.as_num()));\n\n Ok(format!(\n\n \"{}.{:0width$}\",\n\n amount_integer,\n\n amount_decimals.as_u128(),\n\n width = units.as_num() as usize\n\n ))\n\n}\n\n\n", "file_path": "ethers-core/src/utils/mod.rs", "rank": 69, "score": 228571.51190638414 }, { "content": "/// Parses the given address string\n\npub fn parse_address<S>(address_str: S) -> Result<Address>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let address_str = address_str.as_ref();\n\n eyre::ensure!(address_str.starts_with(\"0x\"), \"address must start with '0x'\");\n\n Ok(address_str[2..].parse()?)\n\n}\n\n\n\n/// Perform an HTTP GET request and return the contents of the response.\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 70, "score": 227879.2257766427 }, { "content": "fn parse_identifier(text: &str) -> Option<(&str, &str)> {\n\n let mut calls = 0;\n\n\n\n let (head, tail) = take_while(text, |c| {\n\n calls += 1;\n\n match c {\n\n '_' => true,\n\n letter if letter.is_ascii_alphabetic() => true,\n\n digit if digit.is_ascii_digit() && calls > 1 => true,\n\n _ => false,\n\n }\n\n });\n\n\n\n if head.is_empty() {\n\n None\n\n } else {\n\n Some((head, tail))\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/util.rs", "rank": 71, "score": 227207.30665765988 }, { "content": "/// Returns the reverse-registrar name of an address.\n\npub fn reverse_address(addr: Address) -> String {\n\n format!(\"{:?}.{}\", addr, ENS_REVERSE_REGISTRAR_DOMAIN)[2..].to_string()\n\n}\n\n\n", "file_path": "ethers-providers/src/ens.rs", "rank": 72, "score": 226541.5051846282 }, { "content": "/// Reads the json file and deserialize it into the provided type\n\npub fn read_json_file<T: DeserializeOwned>(path: impl AsRef<Path>) -> Result<T, SolcError> {\n\n let path = path.as_ref();\n\n let file = std::fs::File::open(path).map_err(|err| SolcError::io(err, path))?;\n\n let file = std::io::BufReader::new(file);\n\n let val: T = serde_json::from_reader(file)?;\n\n Ok(val)\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 73, "score": 225917.5113131425 }, { "content": "/// This will determine the name of the rust type and will make sure that possible collisions are\n\n/// resolved by adjusting the actual Rust name of the structure, e.g. `LibraryA.Point` and\n\n/// `LibraryB.Point` to `LibraryAPoint` and `LibraryBPoint`.\n\nfn insert_rust_type_name(\n\n type_names: &mut HashMap<String, (String, Vec<String>)>,\n\n mut name: String,\n\n mut projections: Vec<String>,\n\n id: String,\n\n) {\n\n if let Some((other_id, mut other_projections)) = type_names.remove(&name) {\n\n let mut other_name = name.clone();\n\n // name collision `A.name` `B.name`, rename to `AName`, `BName`\n\n if !other_projections.is_empty() {\n\n other_name = format!(\"{}{}\", other_projections.remove(0).to_pascal_case(), other_name);\n\n }\n\n insert_rust_type_name(type_names, other_name, other_projections, other_id);\n\n\n\n if !projections.is_empty() {\n\n name = format!(\"{}{}\", projections.remove(0).to_pascal_case(), name);\n\n }\n\n insert_rust_type_name(type_names, name, projections, id);\n\n } else {\n\n type_names.insert(name, (id, projections));\n\n }\n\n}\n\n\n", "file_path": "ethers-contract/ethers-contract-abigen/src/contract/structs.rs", "rank": 74, "score": 225519.195725531 }, { "content": "/// Generates the `AbiEncode` + `AbiDecode` implementation\n\npub fn derive_codec_impl(input: &DeriveInput) -> proc_macro2::TokenStream {\n\n let name = &input.ident;\n\n let core_crate = ethers_core_crate();\n\n\n\n quote! {\n\n impl #core_crate::abi::AbiDecode for #name {\n\n fn decode(bytes: impl AsRef<[u8]>) -> Result<Self, #core_crate::abi::AbiError> {\n\n if let #core_crate::abi::ParamType::Tuple(params) = <Self as #core_crate::abi::AbiType>::param_type() {\n\n let tokens = #core_crate::abi::decode(&params, bytes.as_ref())?;\n\n Ok(<Self as #core_crate::abi::Tokenizable>::from_token(#core_crate::abi::Token::Tuple(tokens))?)\n\n } else {\n\n Err(\n\n #core_crate::abi::InvalidOutputType(\"Expected tuple\".to_string()).into()\n\n )\n\n }\n\n }\n\n }\n\n impl #core_crate::abi::AbiEncode for #name {\n\n fn encode(self) -> ::std::vec::Vec<u8> {\n\n let tokens = #core_crate::abi::Tokenize::into_tokens(self);\n\n #core_crate::abi::encode(&tokens)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "ethers-contract/ethers-contract-derive/src/codec.rs", "rank": 75, "score": 225095.78645428014 }, { "content": "/// Parse the eth abi parameter type based on the syntax type;\n\n/// this method is copied from https://github.com/gakonst/ethers-rs/blob/master/ethers-contract/ethers-contract-derive/src/lib.rs#L600\n\n/// with additional modifications for finding byte arrays\n\npub fn find_parameter_type(ty: &Type) -> Result<ParamType, TokenStream> {\n\n match ty {\n\n Type::Array(ty) => {\n\n let param = find_parameter_type(ty.elem.as_ref())?;\n\n if let Expr::Lit(ref expr) = ty.len {\n\n if let Lit::Int(ref len) = expr.lit {\n\n if let Ok(size) = len.base10_parse::<usize>() {\n\n if let ParamType::Uint(_) = param {\n\n return Ok(ParamType::FixedBytes(size))\n\n }\n\n\n\n return Ok(ParamType::FixedArray(Box::new(param), size))\n\n }\n\n }\n\n }\n\n Err(Error::new(ty.span(), \"Failed to derive proper ABI from array field\")\n\n .to_compile_error())\n\n }\n\n Type::Path(ty) => {\n\n if let Some(ident) = ty.path.get_ident() {\n", "file_path": "ethers-core/src/types/transaction/eip712.rs", "rank": 76, "score": 223866.8515978827 }, { "content": "/// Returns the ENS record key hash [EIP-634](https://eips.ethereum.org/EIPS/eip-634)\n\npub fn parameterhash(name: &str) -> Vec<u8> {\n\n let bytes = name.as_bytes();\n\n let key_bytes =\n\n [&bytes_32ify(64), &bytes_32ify(bytes.len().try_into().unwrap()), bytes].concat();\n\n match key_bytes.len() % 32 {\n\n 0 => key_bytes,\n\n n => [key_bytes, [0; 32][n..].to_vec()].concat(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn assert_hex(hash: H256, val: &str) {\n\n let v = if let Some(stripped) = val.strip_prefix(\"0x\") { stripped } else { val };\n\n\n\n assert_eq!(hash.0.to_vec(), hex::decode(v).unwrap());\n\n }\n\n\n", "file_path": "ethers-providers/src/ens.rs", "rank": 77, "score": 221974.29618427227 }, { "content": "#[derive(Debug, Clone, PartialEq, EthAbiType)]\n\nstruct ValueChangedTuple(Address, Address, String, String);\n\n\n", "file_path": "ethers-contract/tests/common/derive.rs", "rank": 78, "score": 218514.90752282648 }, { "content": "fn escape_quotes(input: &str) -> &str {\n\n input.trim_matches(is_whitespace).trim_matches('\\\"')\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parses_approve() {\n\n let fn_str = \"function approve(address _spender, uint256 value) external returns(bool)\";\n\n let parsed = AbiParser::default().parse_function(fn_str).unwrap();\n\n assert_eq!(parsed.name, \"approve\");\n\n assert_eq!(parsed.inputs[0].name, \"_spender\");\n\n assert_eq!(parsed.inputs[0].kind, ParamType::Address,);\n\n assert_eq!(parsed.inputs[1].name, \"value\");\n\n assert_eq!(parsed.inputs[1].kind, ParamType::Uint(256),);\n\n assert_eq!(parsed.outputs[0].name, \"\");\n\n assert_eq!(parsed.outputs[0].kind, ParamType::Bool);\n\n }\n", "file_path": "ethers-core/src/abi/human_readable.rs", "rank": 79, "score": 217430.474808681 }, { "content": "/// Executes a closure with a reference to the `Reporter`.\n\npub fn with_global<T>(f: impl FnOnce(&Report) -> T) -> Option<T> {\n\n let dispatch = get_global()?;\n\n Some(f(dispatch))\n\n}\n\n\n\n/// A no-op [`Reporter`] that does nothing.\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct NoReporter(());\n\n\n\nimpl Reporter for NoReporter {}\n\n\n\n/// A [`Reporter`] that emits some general information to `stdout`\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct BasicStdoutReporter(());\n\n\n\nimpl Reporter for BasicStdoutReporter {\n\n /// Callback invoked right before [`Solc::compile()`] is called\n\n fn on_solc_spawn(&self, _solc: &Solc, version: &Version, input: &CompilerInput) {\n\n println!(\n\n \"Compiling {} files with {}.{}.{}\",\n", "file_path": "ethers-solc/src/report.rs", "rank": 80, "score": 213989.8824089379 }, { "content": "/// Returns the same path config but with canonicalized paths.\n\n///\n\n/// This will take care of potential symbolic linked directories.\n\n/// For example, the tempdir library is creating directories hosted under `/var/`, which in OS X\n\n/// is a symbolic link to `/private/var/`. So if when we try to resolve imports and a path is\n\n/// rooted in a symbolic directory we might end up with different paths for the same file, like\n\n/// `private/var/.../Dapp.sol` and `/var/.../Dapp.sol`\n\n///\n\n/// This canonicalizes all the paths but does not treat non existing dirs as an error\n\npub fn canonicalized(path: impl Into<PathBuf>) -> PathBuf {\n\n let path = path.into();\n\n canonicalize(&path).unwrap_or(path)\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 81, "score": 213583.5458833982 }, { "content": "/// Attempts to determine if the given source is a local, relative import\n\npub fn is_local_source_name(libs: &[impl AsRef<Path>], source: impl AsRef<Path>) -> bool {\n\n resolve_library(libs, source).is_none()\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 82, "score": 213159.98651136731 }, { "content": "/// Finds the common ancestor of both paths\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use std::path::{PathBuf, Path};\n\n///\n\n/// # fn main() {\n\n/// use ethers_solc::utils::common_ancestor;\n\n/// let foo = Path::new(\"/foo/bar/foo\");\n\n/// let bar = Path::new(\"/foo/bar/bar\");\n\n/// let ancestor = common_ancestor(foo, bar).unwrap();\n\n/// assert_eq!(ancestor, Path::new(\"/foo/bar\").to_path_buf());\n\n/// # }\n\n/// ```\n\npub fn common_ancestor(a: impl AsRef<Path>, b: impl AsRef<Path>) -> Option<PathBuf> {\n\n let a = a.as_ref().components();\n\n let b = b.as_ref().components();\n\n let mut ret = PathBuf::new();\n\n let mut found = false;\n\n for (c1, c2) in a.zip(b) {\n\n if c1 == c2 {\n\n ret.push(c1);\n\n found = true;\n\n } else {\n\n break\n\n }\n\n }\n\n if found {\n\n Some(ret)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 83, "score": 212709.96549058298 }, { "content": "#[test]\n\nfn can_detect_various_event_abi_types() {\n\n #[derive(Debug, PartialEq, EthEvent)]\n\n struct ValueChangedEvent {\n\n old_author: Address,\n\n s: String,\n\n h1: H256,\n\n i256: I256,\n\n u256: U256,\n\n b: bool,\n\n v: Vec<Address>,\n\n bs: Vec<bool>,\n\n h160: H160,\n\n u128: U128,\n\n int8: i8,\n\n int16: i16,\n\n int32: i32,\n\n int64: i64,\n\n int128: i128,\n\n uint8: u8,\n\n uint16: u16,\n", "file_path": "ethers-contract/tests/common/derive.rs", "rank": 84, "score": 211244.49079026503 }, { "content": "/// Returns a HTTP url for an IPFS object.\n\npub fn http_link_ipfs(url: Url) -> Result<Url, String> {\n\n Url::parse(IPFS_GATEWAY)\n\n .unwrap()\n\n .join(url.to_string().trim_start_matches(\"ipfs://\").trim_start_matches(\"ipfs/\"))\n\n .map_err(|e| e.to_string())\n\n}\n", "file_path": "ethers-providers/src/erc.rs", "rank": 85, "score": 211151.84942986834 }, { "content": "/// Parses a source map\n\npub fn parse(input: &str) -> Result<SourceMap, SyntaxError> {\n\n Parser::new(input).collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[allow(unused)]\n\n fn tokenize(s: &str) -> Vec<Spanned<Token, usize, SyntaxError>> {\n\n TokenStream::new(s).collect()\n\n }\n\n\n\n #[test]\n\n fn can_parse_source_maps() {\n\n // all source maps from the compiler output test data\n\n let source_maps = include_str!(\"../test-data/out-source-maps.txt\");\n\n\n\n for (line, s) in source_maps.lines().enumerate() {\n\n parse(s).unwrap_or_else(|_| panic!(\"Failed to parse line {}\", line));\n", "file_path": "ethers-solc/src/sourcemap.rs", "rank": 86, "score": 210705.14627357436 }, { "content": "/// Convenience function to turn the `ethers_core` name in `ETHERS_CRATE` into a `Path`\n\npub fn ethers_core_crate() -> Path {\n\n syn::parse_str(ETHERS_CRATES.0).expect(\"valid path; qed\")\n\n}\n", "file_path": "ethers-core/src/macros/ethers_crate.rs", "rank": 87, "score": 210209.20948532817 }, { "content": "pub fn ethers_providers_crate() -> Path {\n\n syn::parse_str(ETHERS_CRATES.2).expect(\"valid path; qed\")\n\n}\n\n\n", "file_path": "ethers-core/src/macros/ethers_crate.rs", "rank": 88, "score": 210203.66777299557 }, { "content": "/// Returns the library placeholder for the given name\n\n/// The placeholder is a 34 character prefix of the hex encoding of the keccak256 hash of the fully\n\n/// qualified library name.\n\n///\n\n/// See also https://docs.soliditylang.org/en/develop/using-the-compiler.html#library-linking\n\npub fn library_hash(name: impl AsRef<[u8]>) -> [u8; 17] {\n\n let mut output = [0u8; 17];\n\n let mut hasher = Keccak::v256();\n\n hasher.update(name.as_ref());\n\n hasher.finalize(&mut output);\n\n output\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 89, "score": 210174.53469939262 }, { "content": "/// Returns the path to the library if the source path is in fact determined to be a library path,\n\n/// and it exists.\n\n/// Note: this does not handle relative imports or remappings.\n\npub fn resolve_library(libs: &[impl AsRef<Path>], source: impl AsRef<Path>) -> Option<PathBuf> {\n\n let source = source.as_ref();\n\n let comp = source.components().next()?;\n\n match comp {\n\n Component::Normal(first_dir) => {\n\n // attempt to verify that the root component of this source exists under a library\n\n // folder\n\n for lib in libs {\n\n let lib = lib.as_ref();\n\n let contract = lib.join(source);\n\n if contract.exists() {\n\n // contract exists in <lib>/<source>\n\n return Some(contract)\n\n }\n\n // check for <lib>/<first_dir>/src/name.sol\n\n let contract = lib\n\n .join(first_dir)\n\n .join(\"src\")\n\n .join(source.strip_prefix(first_dir).expect(\"is first component\"));\n\n if contract.exists() {\n\n return Some(contract)\n\n }\n\n }\n\n None\n\n }\n\n Component::RootDir => Some(source.into()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 90, "score": 208736.71767958958 }, { "content": "pub fn deserialize_bytes<'de, D>(d: D) -> Result<bytes::Bytes, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let value = String::deserialize(d)?;\n\n if value.len() >= 2 && &value[0..2] == \"0x\" {\n\n let bytes: Vec<u8> =\n\n hex::decode(&value[2..]).map_err(|e| Error::custom(format!(\"Invalid hex: {}\", e)))?;\n\n Ok(bytes.into())\n\n } else {\n\n Err(Error::invalid_value(Unexpected::Str(&value), &\"0x prefix\"))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn hex_formatting() {\n\n let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]);\n\n let expected = String::from(\"0x0123456789abcdef\");\n\n assert_eq!(format!(\"{:x}\", b), expected);\n\n assert_eq!(format!(\"{}\", b), expected);\n\n }\n\n}\n", "file_path": "ethers-core/src/types/bytes.rs", "rank": 91, "score": 208400.55694578035 }, { "content": "fn tokenize_unit_type(name: &Ident) -> TokenStream {\n\n let ethers_core = ethers_core_crate();\n\n quote! {\n\n impl #ethers_core::abi::Tokenizable for #name {\n\n fn from_token(token: #ethers_core::abi::Token) -> Result<Self, #ethers_core::abi::InvalidOutputType> where\n\n Self: Sized {\n\n if let #ethers_core::abi::Token::Tuple(tokens) = token {\n\n if !tokens.is_empty() {\n\n Err(#ethers_core::abi::InvalidOutputType(::std::format!(\n\n \"Expected empty tuple, got {:?}\",\n\n tokens\n\n )))\n\n } else {\n\n Ok(#name{})\n\n }\n\n } else {\n\n Err(#ethers_core::abi::InvalidOutputType(::std::format!(\n\n \"Expected Tuple, got {:?}\",\n\n token\n\n )))\n", "file_path": "ethers-contract/ethers-contract-derive/src/abi_ty.rs", "rank": 92, "score": 206009.07728036668 }, { "content": "/// Returns a list of absolute paths to all the solidity files under the root, or the file itself,\n\n/// if the path is a solidity file.\n\n///\n\n/// NOTE: this does not resolve imports from other locations\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use ethers_solc::utils;\n\n/// let sources = utils::source_files(\"./contracts\");\n\n/// ```\n\npub fn source_files(root: impl AsRef<Path>) -> Vec<PathBuf> {\n\n WalkDir::new(root)\n\n .into_iter()\n\n .filter_map(Result::ok)\n\n .filter(|e| e.file_type().is_file())\n\n .filter(|e| e.path().extension().map(|ext| ext == \"sol\").unwrap_or_default())\n\n .map(|e| e.path().into())\n\n .collect()\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 93, "score": 205417.68546347943 }, { "content": "/// Returns a list of _unique_ paths to all folders under `root` that contain at least one solidity\n\n/// file (`*.sol`).\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use ethers_solc::utils;\n\n/// let dirs = utils::solidity_dirs(\"./lib\");\n\n/// ```\n\n///\n\n/// for following layout will return\n\n/// `[\"lib/ds-token/src\", \"lib/ds-token/src/test\", \"lib/ds-token/lib/ds-math/src\", ...]`\n\n///\n\n/// ```text\n\n/// lib\n\n/// └── ds-token\n\n/// ├── lib\n\n/// │ ├── ds-math\n\n/// │ │ └── src/Contract.sol\n\n/// │ ├── ds-stop\n\n/// │ │ └── src/Contract.sol\n\n/// │ ├── ds-test\n\n/// │ └── src//Contract.sol\n\n/// └── src\n\n/// ├── base.sol\n\n/// ├── test\n\n/// │ ├── base.t.sol\n\n/// └── token.sol\n\n/// ```\n\npub fn solidity_dirs(root: impl AsRef<Path>) -> Vec<PathBuf> {\n\n let sources = source_files(root);\n\n sources\n\n .iter()\n\n .filter_map(|p| p.parent())\n\n .collect::<HashSet<_>>()\n\n .into_iter()\n\n .map(|p| p.to_path_buf())\n\n .collect()\n\n}\n\n\n", "file_path": "ethers-solc/src/utils.rs", "rank": 94, "score": 205416.3177897894 }, { "content": "/// Converts an Ethereum address to the checksum encoding\n\n/// Ref: https://github.com/ethereum/EIPs/blob/master/EIPS/eip-55.md\n\npub fn to_checksum(addr: &Address, chain_id: Option<u8>) -> String {\n\n let prefixed_addr = match chain_id {\n\n Some(chain_id) => format!(\"{}0x{:x}\", chain_id, addr),\n\n None => format!(\"{:x}\", addr),\n\n };\n\n let hash = hex::encode(keccak256(&prefixed_addr));\n\n let hash = hash.as_bytes();\n\n\n\n let addr_hex = hex::encode(addr.as_bytes());\n\n let addr_hex = addr_hex.as_bytes();\n\n\n\n addr_hex.iter().zip(hash).fold(\"0x\".to_owned(), |mut encoded, (addr, hash)| {\n\n encoded.push(if *hash >= 56 {\n\n addr.to_ascii_uppercase() as char\n\n } else {\n\n addr.to_ascii_lowercase() as char\n\n });\n\n encoded\n\n })\n\n}\n\n\n", "file_path": "ethers-core/src/utils/mod.rs", "rank": 95, "score": 205398.51649732026 }, { "content": "// https://github.com/tomusdrw/rust-web3/blob/befcb2fb8f3ca0a43e3081f68886fa327e64c8e6/src/api/eth_filter.rs#L20\n\npub fn interval(duration: Duration) -> impl Stream<Item = ()> + Send + Unpin {\n\n stream::unfold((), move |_| Delay::new(duration).map(|_| Some(((), ())))).map(drop)\n\n}\n\n\n\n/// The default polling interval for filters and pending transactions\n\npub const DEFAULT_POLL_INTERVAL: Duration = Duration::from_millis(7000);\n\n\n", "file_path": "ethers-providers/src/stream.rs", "rank": 96, "score": 204609.3438979464 }, { "content": "/// Calculate the function selector as per the contract ABI specification. This\n\n/// is defined as the first 4 bytes of the Keccak256 hash of the function\n\n/// signature.\n\npub fn id<S: AsRef<str>>(signature: S) -> [u8; 4] {\n\n let mut output = [0u8; 4];\n\n\n\n let mut hasher = Keccak::v256();\n\n hasher.update(signature.as_ref().as_bytes());\n\n hasher.finalize(&mut output);\n\n\n\n output\n\n}\n\n\n", "file_path": "ethers-core/src/utils/hash.rs", "rank": 97, "score": 204192.38423549326 }, { "content": "fn parse_int_param_type(s: &str) -> Option<ParamType> {\n\n let size = s.chars().skip(1).collect::<String>().parse::<usize>().ok()?;\n\n if s.starts_with('u') {\n\n Some(ParamType::Uint(size))\n\n } else if s.starts_with('i') {\n\n Some(ParamType::Int(size))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "ethers-core/src/types/transaction/eip712.rs", "rank": 98, "score": 203475.04012837267 }, { "content": "#[cfg(not(target_arch = \"wasm32\"))]\n\nfn get_http_contract(url: &Url) -> Result<String> {\n\n let json = util::http_get(url.as_str())\n\n .with_context(|| format!(\"failed to retrieve JSON from {}\", url))?;\n\n Ok(json)\n\n}\n\n\n\n/// Retrieves a contract ABI from the Etherscan HTTP API and wraps it in an\n\n/// artifact JSON for compatibility with the code generation facilities.\n", "file_path": "ethers-contract/ethers-contract-abigen/src/source.rs", "rank": 99, "score": 203374.94564724874 } ]
Rust
language/bytecode-verifier/src/abstract_state.rs
meenxio/libra
da94f03225f93709f18d614a72337e41ba54b3fd
use crate::{ absint::{AbstractDomain, JoinResult}, borrow_graph::BorrowGraph, nonce::Nonce, }; use mirai_annotations::{checked_postcondition, checked_precondition, checked_verify}; use std::collections::{BTreeMap, BTreeSet}; use vm::{ file_format::{ CompiledModule, FieldDefinitionIndex, Kind, LocalIndex, SignatureToken, StructDefinitionIndex, }, views::{FunctionDefinitionView, ViewInternals}, }; #[derive(Clone, Debug, Eq, PartialEq)] pub struct TypedAbstractValue { pub signature: SignatureToken, pub value: AbstractValue, } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum AbstractValue { Reference(Nonce), Value(Kind), } impl AbstractValue { pub fn is_reference(&self) -> bool { match self { AbstractValue::Reference(_) => true, AbstractValue::Value(_) => false, } } pub fn is_value(&self) -> bool { !self.is_reference() } pub fn is_unrestricted_value(&self) -> bool { match self { AbstractValue::Reference(_) => false, AbstractValue::Value(Kind::Unrestricted) => true, AbstractValue::Value(Kind::All) | AbstractValue::Value(Kind::Resource) => false, } } pub fn extract_nonce(&self) -> Option<Nonce> { match self { AbstractValue::Reference(nonce) => Some(*nonce), AbstractValue::Value(_) => None, } } } #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum LabelElem { Local(LocalIndex), Global(StructDefinitionIndex), Field(FieldDefinitionIndex), } impl Default for LabelElem { fn default() -> Self { LabelElem::Local(0) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct AbstractState { locals: BTreeMap<LocalIndex, TypedAbstractValue>, borrow_graph: BorrowGraph<LabelElem>, frame_root: Nonce, next_id: usize, } impl AbstractState { pub fn new(function_definition_view: FunctionDefinitionView<CompiledModule>) -> Self { let function_signature_view = function_definition_view.signature(); let mut locals = BTreeMap::new(); let mut borrow_graph = BorrowGraph::new(); for (arg_idx, arg_type_view) in function_signature_view.arg_tokens().enumerate() { if arg_type_view.is_reference() { let nonce = Nonce::new(arg_idx); borrow_graph.add_nonce(nonce); locals.insert( arg_idx as LocalIndex, TypedAbstractValue { signature: arg_type_view.as_inner().clone(), value: AbstractValue::Reference(nonce), }, ); } else { let arg_kind = arg_type_view .kind(&function_definition_view.signature().as_inner().type_formals); locals.insert( arg_idx as LocalIndex, TypedAbstractValue { signature: arg_type_view.as_inner().clone(), value: AbstractValue::Value(arg_kind), }, ); } } let frame_root = Nonce::new(function_definition_view.locals_signature().len()); borrow_graph.add_nonce(frame_root); let next_id = frame_root.inner() + 1; AbstractState { locals, borrow_graph, frame_root, next_id, } } pub fn is_available(&self, idx: LocalIndex) -> bool { self.locals.contains_key(&idx) } pub fn local(&self, idx: LocalIndex) -> &TypedAbstractValue { &self.locals[&idx] } pub fn remove_local(&mut self, idx: LocalIndex) -> TypedAbstractValue { self.locals.remove(&idx).unwrap() } pub fn insert_local(&mut self, idx: LocalIndex, abs_type: TypedAbstractValue) { self.locals.insert(idx, abs_type); } pub fn is_local_safe_to_destroy(&self, idx: LocalIndex) -> bool { match self.locals[&idx].value { AbstractValue::Reference(_) => true, AbstractValue::Value(Kind::All) | AbstractValue::Value(Kind::Resource) => false, AbstractValue::Value(Kind::Unrestricted) => !self.is_local_borrowed(idx), } } pub fn is_frame_safe_to_destroy(&self) -> bool { self.locals .values() .all(|x| x.value.is_unrestricted_value()) && !self.is_nonce_borrowed(self.frame_root) } pub fn destroy_local(&mut self, idx: LocalIndex) { checked_precondition!(self.is_local_safe_to_destroy(idx)); let local = self.locals.remove(&idx).unwrap(); match local.value { AbstractValue::Reference(nonce) => self.remove_nonce(nonce), AbstractValue::Value(kind) => { checked_verify!(kind == Kind::Unrestricted); } } } pub fn add_nonce(&mut self) -> Nonce { let nonce = Nonce::new(self.next_id); self.borrow_graph.add_nonce(nonce); self.next_id += 1; nonce } pub fn remove_nonce(&mut self, nonce: Nonce) { self.borrow_graph.remove_nonce(nonce); } pub fn is_nonce_borrowed(&self, nonce: Nonce) -> bool { !self.borrow_graph.all_borrows(nonce).is_empty() } pub fn is_local_borrowed(&self, idx: LocalIndex) -> bool { !self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Local(idx)) .is_empty() } pub fn is_global_borrowed(&self, idx: StructDefinitionIndex) -> bool { !self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Global(idx)) .is_empty() } pub fn is_nonce_freezable(&self, nonce: Nonce) -> bool { let borrows = self.borrow_graph.all_borrows(nonce); self.all_nonces_immutable(borrows) } pub fn borrow_global_value(&mut self, mut_: bool, idx: StructDefinitionIndex) -> Option<Nonce> { if mut_ { if self.is_global_borrowed(idx) { return None; } } else { let borrowed_nonces = self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Global(idx)); if !self.all_nonces_immutable(borrowed_nonces) { return None; } } let new_nonce = self.add_nonce(); self.borrow_graph .add_weak_edge(self.frame_root, vec![LabelElem::Global(idx)], new_nonce); Some(new_nonce) } pub fn borrow_field_from_nonce( &mut self, operand: &TypedAbstractValue, mut_: bool, idx: FieldDefinitionIndex, ) -> Option<Nonce> { let nonce = operand.value.extract_nonce().unwrap(); if mut_ { if !self.borrow_graph.nil_borrows(nonce).is_empty() { return None; } } else if operand.signature.is_mutable_reference() { let borrowed_nonces = self .borrow_graph .consistent_borrows(nonce, LabelElem::Field(idx)); if !self.all_nonces_immutable(borrowed_nonces) { return None; } } let new_nonce = self.add_nonce(); self.borrow_graph .add_strong_edge(nonce, vec![LabelElem::Field(idx)], new_nonce); Some(new_nonce) } pub fn borrow_local_value(&mut self, mut_: bool, idx: LocalIndex) -> Option<Nonce> { checked_precondition!(self.locals[&idx].value.is_value()); if !mut_ { let borrowed_nonces = self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Local(idx)); if !self.all_nonces_immutable(borrowed_nonces) { return None; } } let new_nonce = self.add_nonce(); self.borrow_graph .add_strong_edge(self.frame_root, vec![LabelElem::Local(idx)], new_nonce); Some(new_nonce) } pub fn borrow_from_local_reference(&mut self, idx: LocalIndex) -> Nonce { checked_precondition!(self.locals[&idx].value.is_reference()); let new_nonce = self.add_nonce(); self.borrow_graph.add_strong_edge( self.locals[&idx].value.extract_nonce().unwrap(), vec![], new_nonce, ); new_nonce } pub fn borrow_from_nonces(&mut self, to_borrow_from: &BTreeSet<Nonce>) -> Nonce { let new_nonce = self.add_nonce(); for nonce in to_borrow_from { self.borrow_graph.add_weak_edge(*nonce, vec![], new_nonce); } new_nonce } pub fn construct_canonical_state(&self) -> Self { let mut new_locals = BTreeMap::new(); let mut nonce_map = BTreeMap::new(); for (idx, abs_type) in &self.locals { if let AbstractValue::Reference(nonce) = abs_type.value { let new_nonce = Nonce::new(*idx as usize); new_locals.insert( *idx, TypedAbstractValue { signature: abs_type.signature.clone(), value: AbstractValue::Reference(new_nonce), }, ); nonce_map.insert(nonce, new_nonce); } else { new_locals.insert(*idx, abs_type.clone()); } } nonce_map.insert(self.frame_root, self.frame_root); let canonical_state = AbstractState { locals: new_locals, borrow_graph: self.borrow_graph.rename_nonces(nonce_map), frame_root: self.frame_root, next_id: self.frame_root.inner() + 1, }; checked_postcondition!(canonical_state.is_canonical()); canonical_state } fn all_nonces_immutable(&self, borrows: BTreeSet<Nonce>) -> bool { !self.locals.values().any(|abs_type| { abs_type.signature.is_mutable_reference() && borrows.contains(&abs_type.value.extract_nonce().unwrap()) }) } fn is_canonical(&self) -> bool { self.locals.iter().all(|(x, y)| { !y.value.is_reference() || Nonce::new(*x as usize) == y.value.extract_nonce().unwrap() }) } fn borrowed_value_unavailable(state1: &AbstractState, state2: &AbstractState) -> bool { state1.locals.keys().any(|idx| { state1.locals[idx].value.is_value() && state1.is_local_borrowed(*idx) && !state2.locals.contains_key(idx) }) } fn split_locals( locals: &BTreeMap<LocalIndex, TypedAbstractValue>, values: &mut BTreeMap<LocalIndex, Kind>, references: &mut BTreeMap<LocalIndex, Nonce>, ) { for (idx, abs_type) in locals { match abs_type.value { AbstractValue::Reference(nonce) => { references.insert(idx.clone(), nonce); } AbstractValue::Value(kind) => { values.insert(idx.clone(), kind); } } } } } impl AbstractDomain for AbstractState { fn join(&mut self, state: &AbstractState) -> JoinResult { checked_precondition!(self.is_canonical() && state.is_canonical()); if self .locals .keys() .filter(|idx| !self.locals[idx].value.is_unrestricted_value()) .collect::<BTreeSet<_>>() != state .locals .keys() .filter(|idx| !state.locals[idx].value.is_unrestricted_value()) .collect::<BTreeSet<_>>() { return JoinResult::Error; } if Self::borrowed_value_unavailable(self, state) || Self::borrowed_value_unavailable(state, self) { return JoinResult::Error; } let mut values1 = BTreeMap::new(); let mut references1 = BTreeMap::new(); Self::split_locals(&self.locals, &mut values1, &mut references1); let mut values2 = BTreeMap::new(); let mut references2 = BTreeMap::new(); Self::split_locals(&state.locals, &mut values2, &mut references2); checked_verify!(references1 == references2); let mut locals = BTreeMap::new(); for (idx, nonce) in &references1 { locals.insert( idx.clone(), TypedAbstractValue { signature: self.locals[idx].signature.clone(), value: AbstractValue::Reference(*nonce), }, ); } for (idx, kind1) in &values1 { if let Some(kind2) = values2.get(idx) { checked_verify!(kind1 == kind2); locals.insert( idx.clone(), TypedAbstractValue { signature: self.locals[idx].signature.clone(), value: AbstractValue::Value(*kind1), }, ); } } let locals_unchanged = self.locals.keys().all(|idx| locals.contains_key(idx)); let borrow_graph_unchanged = self.borrow_graph.abstracts(&state.borrow_graph); if locals_unchanged && borrow_graph_unchanged { JoinResult::Unchanged } else { self.locals = locals; self.borrow_graph.join(&state.borrow_graph); JoinResult::Changed } } }
use crate::{ absint::{AbstractDomain, JoinResult}, borrow_graph::BorrowGraph, nonce::Nonce, }; use mirai_annotations::{checked_postcondition, checked_precondition, checked_verify}; use std::collections::{BTreeMap, BTreeSet}; use vm::{ file_format::{ CompiledModule, FieldDefinitionIndex, Kind, LocalIndex, SignatureToken, StructDefinitionIndex, }, views::{FunctionDefinitionView, ViewInternals}, }; #[derive(Clone, Debug, Eq, PartialEq)] pub struct TypedAbstractValue { pub signature: SignatureToken, pub value: AbstractValue, } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum AbstractValue { Reference(Nonce), Value(Kind), } impl AbstractValue { pub fn is_reference(&self) -> bool { match self { AbstractValue::Reference(_) => true, AbstractValue::Value(_) => false, } } pub fn is_value(&self) -> bool { !self.is_reference() } pub fn is_unrestricted_value(&self) -> bool { match self { AbstractValue::Reference(_) => false, AbstractValue::Value(Kind::Unrestricted) => true, AbstractValue::Value(Kind::All) | AbstractValue::Value(Kind::Resource) => false, } } pub fn extract_nonce(&self) -> Option<Nonce> { match self { AbstractValue::Reference(nonce) => Some(*nonce), AbstractValue::Value(_) => None, } } } #[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum LabelElem { Local(LocalIndex), Global(StructDefinitionIndex), Field(FieldDefinitionIndex), } impl Default for LabelElem { fn default() -> Self { LabelElem::Local(0) } } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct AbstractState { locals: BTreeMap<LocalIndex, TypedAbstractValue>, borrow_graph: BorrowGraph<LabelElem>, frame_root: Nonce, next_id: usize, } impl AbstractState { pub fn new(function_definition_view: FunctionDefinitionView<CompiledModule>) -> Self { let function_signature_view = function_definition_view.signature(); let mut locals = BTreeMap::new(); let mut borrow_graph = BorrowGraph::new(); for (arg_idx, arg_type_view) in function_signature_view.arg_tokens().enumerate() { if arg_type_view.is_reference() { let nonce = Nonce::new(arg_idx); borrow_graph.add_nonce(nonce); locals.insert( arg_idx as LocalIndex, TypedAbstractValue { signature: arg_type_view.as_inner().clone(), value: AbstractValue::Reference(nonce), }, ); } else { let arg_kind = arg_type_view .kind(&function_definition_view.signature().as_inner().type_formals); locals.insert( arg_idx as LocalIndex, TypedAbstractValue { signature: arg_type_view.as_inner().clone(), value: AbstractValue::Value(arg_kind), }, ); } } let frame_root = Nonce::new(function_definition_view.locals_signature().len()); borrow_graph.add_nonce(frame_root); let next_id = frame_root.inner() + 1; AbstractState { locals, borrow_graph, frame_root, next_id, } } pub fn is_available(&self, idx: LocalIndex) -> bool { self.locals.contains_key(&idx) } pub fn local(&self, idx: LocalIndex) -> &TypedAbstractValue { &self.locals[&idx] } pub fn remove_local(&mut self, idx: LocalIndex) -> TypedAbstractValue { self.locals.remove(&idx).unwrap() } pub fn insert_local(&mut self, idx: LocalIndex, abs_type: TypedAbstractValue) { self.locals.insert(idx, abs_type); } pub fn is_local_safe_to_destroy(&self, idx: LocalIndex) -> bool { match self.locals[&idx].value { AbstractValue::Reference(_) => true, AbstractValue::Value(Kind::All) | AbstractValue::Value(Kind::Resource) => false, AbstractValue::Value(Kind::Unrestricted) => !self.is_local_borrowed(idx), } } pub fn is_frame_safe_to_destroy(&self) -> bool { self.locals .values() .all(|x| x.value.is_unrestricted_value()) && !self.is_nonce_borrowed(self.frame_root) } pub fn destroy_local(&mut self, idx: LocalIndex) { checked_precondition!(self.is_local_safe_to_destroy(idx)); let local = self.locals.remove(&idx).unwrap(); match local.value { AbstractValue::Reference(nonce) => self.remove_nonce(nonce), AbstractValue::Value(kind) => { checked_verify!(kind == Kind::Unrestricted); } } } pub fn add_nonce(&mut self) -> Nonce { let nonce = Nonce::new(self.next_id); self.borrow_graph.add_nonce(nonce); self.next_id += 1; nonce } pub fn remove_nonce(&mut self, nonce: Nonce) { self.borrow_graph.remove_nonce(nonce); } pub fn is_nonce_borrowed(&self, nonce: Nonce) -> bool { !self.borrow_graph.all_borrows(nonce).is_empty() } pub fn is_local_borrowed(&self, idx: LocalIndex) -> bool { !self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Local(idx)) .is_empty() } pub fn is_global_borrowed(&self, idx: StructDefinitionIndex) -> bool { !self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Global(idx)) .is_empty() } pub fn is_nonce_freezable(&self, nonce: Nonce) -> bool { let borrows = self.borrow_graph.all_borrows(nonce); self.all_nonces_immutable(borrows) } pub fn borrow_global_value(&mut self, mut_: bool, idx: StructDefinitionIndex) -> Option<Nonce> { if mut_ { if self.is_global_borrowed(idx) { return None; } } else { let borrowed_nonces = self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Global(idx)); if !self.all_nonces_immutable(borrowed_nonces) { return None; } } let new_nonce = self.add_nonce(); self.borrow_graph .add_weak_edge(self.frame_root, vec![LabelElem::Global(idx)], new_nonce); Some(new_nonce) } pub fn borrow_field_from_nonce( &mut self, operand: &TypedAbstractValue, mut_: bool, idx: FieldDefinitionIndex, ) -> Option<Nonce> { let nonce = operand.value.extract_nonce().unwrap(); if mut_ { if !self.borrow_graph.nil_borrows(nonce).is_empty() { return None; } } else if operand.signature.is_mutable_reference() { let borrowed_nonces = self .borrow_graph .consistent_borrows(nonce, LabelElem::Field(idx)); if !self.all_nonces_immutable(borrowed_nonces) { return None; } } let new_nonce = self.add_nonce(); self.borrow_graph .add_strong_edge(nonce, vec![LabelElem::Field(idx)], new_nonce); Some(new_nonce) } pub fn borrow_local_value(&mut self, mut_: bool, idx: LocalIndex) -> Option<Nonce> { checked_precondition!(self.locals[&idx].value.is_value()); if !mut_ { let borrowed_nonces = self .borrow_graph .consistent_borrows(self.frame_root, LabelElem::Local(idx)); if !self.all_nonces_immutable(borrowed_nonces) { return None; } } let new_nonce = self.add_nonce(); self.borrow_graph .add_strong_edge(self.frame_root, vec![LabelElem::Local(idx)], new_nonce); Some(new_nonce) } pub fn borrow_from_local_reference(&mut self, idx: LocalIndex) -> Nonce { checked_precondition!(self.locals[&idx].value.is_reference()); let new_nonce = self.add_nonce(); self.borrow_graph.add_strong_edge( self.locals[&idx].value.extract_nonce().unwrap(), vec![], new_nonce, ); new_nonce } pub fn borrow_from_nonces(&mut self, to_borrow_from: &BTreeSet<Nonce>) -> Nonce { let new_nonce = self.add_nonce(); for nonce in to_borrow_from { self.borrow_graph.add_weak_edge(*nonce, vec![], new_nonce); } new_nonce } pub fn construct_canonical_state(&self) -> Self { let mut new_locals = BTreeMap::new(); let mut nonce_map = BTreeMap::new(); for (idx, abs_type) in &self.locals { if let AbstractValue::Reference(nonce) = abs_type.value { let new_nonce = Nonce::new(*idx as usize); new_locals.insert( *idx, TypedAbstractValue { signature: abs_type.signature.clone(), value: AbstractValue::Reference(new_nonce), }, ); nonce_map.insert(nonce, new_nonce); } else { new_locals.insert(*idx, abs_type.clone()); } } nonce_map.insert(self.frame_root, self.frame_root); let canonical_state = AbstractState { locals: new_locals, borrow_graph: self.borrow_graph.rename_nonces(nonce_map), frame_root: self.frame_root, next_id: self.frame_root.inner() + 1, }; checked_postcondition!(canonical_state.is_canonical()); canonical_state } fn all_nonces_immutable(&self, borrows: BTreeSet<Nonce>) -> bool { !self.locals.values().any(|abs_type| { abs_type.signature.is_mutable_reference() && borrows.contains(&abs_type.value.extract_nonce().unwrap()) }) } fn is_canonical(&self) -> bool { self.locals.iter().all(|(x, y)| { !y.value.is_reference() || Nonce::new(*x as usize) == y.value.extract_nonce().unwrap() }) } fn borrowed_value_unavailable(state1: &AbstractState, state2: &AbstractState) -> bool {
fn split_locals( locals: &BTreeMap<LocalIndex, TypedAbstractValue>, values: &mut BTreeMap<LocalIndex, Kind>, references: &mut BTreeMap<LocalIndex, Nonce>, ) { for (idx, abs_type) in locals { match abs_type.value { AbstractValue::Reference(nonce) => { references.insert(idx.clone(), nonce); } AbstractValue::Value(kind) => { values.insert(idx.clone(), kind); } } } } } impl AbstractDomain for AbstractState { fn join(&mut self, state: &AbstractState) -> JoinResult { checked_precondition!(self.is_canonical() && state.is_canonical()); if self .locals .keys() .filter(|idx| !self.locals[idx].value.is_unrestricted_value()) .collect::<BTreeSet<_>>() != state .locals .keys() .filter(|idx| !state.locals[idx].value.is_unrestricted_value()) .collect::<BTreeSet<_>>() { return JoinResult::Error; } if Self::borrowed_value_unavailable(self, state) || Self::borrowed_value_unavailable(state, self) { return JoinResult::Error; } let mut values1 = BTreeMap::new(); let mut references1 = BTreeMap::new(); Self::split_locals(&self.locals, &mut values1, &mut references1); let mut values2 = BTreeMap::new(); let mut references2 = BTreeMap::new(); Self::split_locals(&state.locals, &mut values2, &mut references2); checked_verify!(references1 == references2); let mut locals = BTreeMap::new(); for (idx, nonce) in &references1 { locals.insert( idx.clone(), TypedAbstractValue { signature: self.locals[idx].signature.clone(), value: AbstractValue::Reference(*nonce), }, ); } for (idx, kind1) in &values1 { if let Some(kind2) = values2.get(idx) { checked_verify!(kind1 == kind2); locals.insert( idx.clone(), TypedAbstractValue { signature: self.locals[idx].signature.clone(), value: AbstractValue::Value(*kind1), }, ); } } let locals_unchanged = self.locals.keys().all(|idx| locals.contains_key(idx)); let borrow_graph_unchanged = self.borrow_graph.abstracts(&state.borrow_graph); if locals_unchanged && borrow_graph_unchanged { JoinResult::Unchanged } else { self.locals = locals; self.borrow_graph.join(&state.borrow_graph); JoinResult::Changed } } }
state1.locals.keys().any(|idx| { state1.locals[idx].value.is_value() && state1.is_local_borrowed(*idx) && !state2.locals.contains_key(idx) }) }
function_block-function_prefix_line
[ { "content": "/// Get the StructTag for a StructDefinition defined in a published module.\n\npub fn resource_storage_key(module: &impl ModuleAccess, idx: StructDefinitionIndex) -> StructTag {\n\n let resource = module.struct_def_at(idx);\n\n let res_handle = module.struct_handle_at(resource.struct_handle);\n\n let res_module = module.module_handle_at(res_handle.module);\n\n let res_name = module.identifier_at(res_handle.name);\n\n let res_mod_addr = module.address_at(res_module.address);\n\n let res_mod_name = module.identifier_at(res_module.name);\n\n StructTag {\n\n module: res_mod_name.into(),\n\n address: *res_mod_addr,\n\n name: res_name.into(),\n\n type_params: vec![],\n\n }\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/src/identifier.rs", "rank": 0, "score": 436244.24141005817 }, { "content": "pub fn append_err_info(status: VMStatus, kind: IndexKind, idx: usize) -> VMStatus {\n\n let msg = format!(\"at index {} while indexing {}\", idx, kind);\n\n status.append_message_with_separator(' ', msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 1, "score": 413552.3784244198 }, { "content": "pub fn verification_error(kind: IndexKind, idx: usize, err: StatusCode) -> VMStatus {\n\n let msg = format!(\"at index {} while indexing {}\", idx, kind);\n\n VMStatus::new(err).with_message(msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 2, "score": 410630.56885825354 }, { "content": "/// Checks if the given type is well defined in the given context. References are only permitted\n\n/// at the top level.\n\nfn check_signature(context: (&[StructHandle], &[Kind]), ty: &SignatureToken) -> Vec<VMStatus> {\n\n use SignatureToken::*;\n\n\n\n match ty {\n\n Reference(inner) | MutableReference(inner) => check_signature_no_refs(context, inner),\n\n _ => check_signature_no_refs(context, ty),\n\n }\n\n}\n", "file_path": "language/bytecode-verifier/src/signature.rs", "rank": 3, "score": 410019.0681894509 }, { "content": "pub fn bounds_error(kind: IndexKind, idx: usize, len: usize, err: StatusCode) -> VMStatus {\n\n let msg = format!(\n\n \"Index {} out of bounds for {} while indexing {}\",\n\n idx, len, kind\n\n );\n\n VMStatus::new(err).with_message(msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 4, "score": 406309.8515558716 }, { "content": "/// Offers the genesis block.\n\npub fn leaf_strategy() -> impl Strategy<Value = Block<Vec<usize>>> {\n\n genesis_strategy().boxed()\n\n}\n\n\n\nprop_compose! {\n\n /// This produces a block with an invalid id (and therefore signature)\n\n /// given a valid block\n\n pub fn fake_id(block_strategy: impl Strategy<Value = Block<Vec<usize>>>)\n\n (fake_id in HashValue::arbitrary(),\n\n block in block_strategy) -> Block<Vec<usize>> {\n\n Block {\n\n id: fake_id,\n\n block_data: BlockData::new_proposal(\n\n block.payload().unwrap().clone(),\n\n block.author().unwrap(),\n\n block.round(),\n\n get_current_timestamp().as_micros() as u64,\n\n block.quorum_cert().clone(),\n\n ),\n\n signature: Some(block.signature().unwrap().clone()),\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 5, "score": 391403.8761480023 }, { "content": "/// This produces the genesis block\n\npub fn genesis_strategy() -> impl Strategy<Value = Block<Vec<usize>>> {\n\n Just(Block::make_genesis_block())\n\n}\n\n\n\nprop_compose! {\n\n /// This produces an unmoored block, with arbitrary parent & QC ancestor\n\n pub fn unmoored_block(ancestor_id_strategy: impl Strategy<Value = HashValue>)(\n\n ancestor_id in ancestor_id_strategy,\n\n )(\n\n block in new_proposal(\n\n ancestor_id,\n\n Round::arbitrary(),\n\n proptests::arb_signer(),\n\n certificate_for_genesis(),\n\n )\n\n ) -> Block<Vec<usize>> {\n\n block\n\n }\n\n}\n\n\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 6, "score": 391403.8761480023 }, { "content": "#[inline]\n\npub fn pick_slice_idxs(max: usize, indexes: &[impl AsRef<PropIndex>]) -> Vec<usize> {\n\n pick_idxs(max, indexes, indexes.len())\n\n}\n\n\n\n/// Wrapper for `proptest`'s [`Index`][proptest::sample::Index] that allows `AsRef` to work.\n\n///\n\n/// There is no blanket `impl<T> AsRef<T> for T`, so `&[PropIndex]` doesn't work with\n\n/// `&[impl AsRef<PropIndex>]` (unless an impl gets added upstream). `Index` does.\n\n#[derive(Arbitrary, Clone, Copy, Debug)]\n\npub struct Index(PropIndex);\n\n\n\nimpl AsRef<PropIndex> for Index {\n\n fn as_ref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Deref for Index {\n\n type Target = PropIndex;\n\n\n\n fn deref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 7, "score": 385531.8609458755 }, { "content": "/// Determine the abstract value at `index` is of the given kind, if it exists.\n\n/// If it does not exist, return `false`.\n\npub fn stack_kind_is(state: &AbstractState, index: usize, kind: Kind) -> bool {\n\n if index < state.stack_len() {\n\n match state.stack_peek(index) {\n\n Some(abstract_value) => {\n\n return abstract_value.kind == kind;\n\n }\n\n None => return false,\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 8, "score": 373694.8964119103 }, { "content": "/// Check whether the local at `index` is of the given kind\n\npub fn local_kind_is(state: &AbstractState, index: u8, kind: Kind) -> bool {\n\n state\n\n .local_kind_is(index as usize, kind)\n\n .unwrap_or_else(|_| false)\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 9, "score": 373691.40563469904 }, { "content": "fn load_kinds(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<Vec<Kind>> {\n\n let len = read_uleb_u16_internal(cursor)?;\n\n let mut kinds = vec![];\n\n for _ in 0..len {\n\n kinds.push(load_kind(cursor)?);\n\n }\n\n Ok(kinds)\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 10, "score": 367154.4241861114 }, { "content": "// Generate some random, well-formed, unsigned-varint length-prefixed byte arrays\n\n// for our fuzzer corpus to act as serialized inbound rpc calls.\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n let small_data_strat = vec(any::<u8>(), 0..MAX_SMALL_MSG_BYTES);\n\n let medium_data_strat = vec(any::<u8>(), 0..MAX_MEDIUM_MSG_BYTES);\n\n\n\n // bias corpus generation to prefer small message sizes\n\n let data_strat = prop_oneof![small_data_strat, medium_data_strat];\n\n\n\n let length_prefixed_data_strat = data_strat.prop_map(|data| {\n\n let max_len = data.len() + MAX_UVI_PREFIX_BYTES;\n\n let mut buf = bytes05::BytesMut::with_capacity(max_len);\n\n let mut codec = LengthDelimitedCodec::new();\n\n codec\n\n .encode(bytes05::Bytes::from(data), &mut buf)\n\n .expect(\"Failed to create uvi-prefixed data for corpus\");\n\n buf.freeze().to_vec()\n\n });\n\n\n\n gen.generate(length_prefixed_data_strat)\n\n}\n\n\n", "file_path": "network/src/protocols/rpc/fuzzing.rs", "rank": 11, "score": 362230.1838314627 }, { "content": "pub fn is_struct_vector(module: &VerifiedModule, idx: StructHandleIndex) -> bool {\n\n let struct_handle = module.struct_handle_at(idx);\n\n let struct_handle_view = StructHandleView::new(module, struct_handle);\n\n let module_name = module.identifier_at(struct_handle_view.module_handle().name);\n\n let module_address = module.address_at(struct_handle_view.module_handle().address);\n\n module_name.to_string() == \"Vector\"\n\n && *module_address == AccountAddress::from_hex_literal(\"0x0\").unwrap()\n\n}\n\n\n", "file_path": "language/move-prover/bytecode-to-boogie/src/translator.rs", "rank": 12, "score": 360355.51370521163 }, { "content": "fn load_signature_tokens(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<Vec<SignatureToken>> {\n\n let len = read_uleb_u16_internal(cursor)?;\n\n let mut tokens = vec![];\n\n for _ in 0..len {\n\n tokens.push(load_signature_token(cursor)?);\n\n }\n\n Ok(tokens)\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 13, "score": 358326.56321259885 }, { "content": "pub fn remap_set<T: Copy + Ord>(set: &mut BTreeSet<T>, id_map: &BTreeMap<T, T>) {\n\n for (old, new) in id_map {\n\n if set.remove(&old) {\n\n set.insert(*new);\n\n }\n\n }\n\n}\n", "file_path": "language/borrow-graph/src/shared.rs", "rank": 14, "score": 357647.4173259168 }, { "content": "/// Convert the transaction arguments into move values.\n\npub fn convert_txn_args(args: Vec<TransactionArgument>) -> Vec<Value> {\n\n args.into_iter()\n\n .map(|arg| match arg {\n\n TransactionArgument::U64(i) => Value::u64(i),\n\n TransactionArgument::Address(a) => Value::address(a),\n\n TransactionArgument::Bool(b) => Value::bool(b),\n\n TransactionArgument::ByteArray(b) => Value::byte_array(b),\n\n TransactionArgument::String(s) => Value::string(VMString::new(s)),\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/src/txn_executor.rs", "rank": 15, "score": 356104.5312227955 }, { "content": "pub fn assert_single_value(mut values: Values) -> Value {\n\n assert!(values.len() == 1);\n\n values.pop().unwrap()\n\n}\n\n\n\nimpl Value {\n\n pub fn is_ref(&self) -> bool {\n\n match self {\n\n Value::Ref(_) => true,\n\n Value::NonRef => false,\n\n }\n\n }\n\n\n\n fn remap_refs(&mut self, id_map: &BTreeMap<RefID, RefID>) {\n\n match self {\n\n Value::Ref(id) if id_map.contains_key(id) => *id = id_map[id],\n\n _ => (),\n\n }\n\n }\n\n}\n", "file_path": "language/move-lang/src/cfgir/borrows/state.rs", "rank": 16, "score": 354224.9171394574 }, { "content": "/// Determine whether an abstract value on the stack and a abstract value in the locals have the\n\n/// same type\n\npub fn stack_local_polymorphic_eq(state: &AbstractState, index1: usize, index2: usize) -> bool {\n\n if stack_has(state, index1, None) {\n\n if let Some((abstract_value, _)) = state.local_get(index2) {\n\n return state.stack_peek(index1) == Some(abstract_value.clone());\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 17, "score": 353534.7349949696 }, { "content": "/// generate_corpus produces an arbitrary SubmitTransactionRequest for admission control\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n // use proptest to generate a SignedTransaction\n\n let signed_txn = gen.generate(proptest::arbitrary::any::<SignedTransaction>());\n\n // wrap it in a SubmitTransactionRequest\n\n let mut req = SubmitTransactionRequest::default();\n\n req.transaction = Some(signed_txn.into());\n\n\n\n let mut bytes = bytes::BytesMut::with_capacity(req.encoded_len());\n\n req.encode(&mut bytes).unwrap();\n\n bytes.to_vec()\n\n}\n\n\n", "file_path": "admission_control/admission-control-service/src/admission_control_fuzzing.rs", "rank": 18, "score": 344333.6842593653 }, { "content": "#[inline]\n\nfn check_code_unit_bounds_impl<T, I>(pool: &[T], bytecode_offset: usize, idx: I) -> Vec<VMStatus>\n\nwhere\n\n I: ModuleIndex,\n\n{\n\n let idx = idx.into_index();\n\n let len = pool.len();\n\n if idx >= len {\n\n let status = bytecode_offset_err(\n\n I::KIND,\n\n idx,\n\n len,\n\n bytecode_offset,\n\n StatusCode::INDEX_OUT_OF_BOUNDS,\n\n );\n\n vec![status]\n\n } else {\n\n vec![]\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/check_bounds.rs", "rank": 19, "score": 341809.1349055265 }, { "content": "pub fn impl_enum_signature(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let priv_kt: syn::Type = private_key_type.parse().unwrap();\n\n let pub_kt: syn::Type = public_key_type.parse().unwrap();\n\n let mut res = impl_enum_tryfrom(name, variants);\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n let mut match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_arms.extend(quote! {\n\n (#name::#variant_ident(sig), #pub_kt::#variant_ident(pk)) => {\n\n sig.verify_arbitrary_msg(message, pk)\n\n }\n\n })\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 20, "score": 338921.03942189063 }, { "content": "#[allow(dead_code)]\n\nfn get_mut_vector(v: &mut NativeStructValue) -> VMResult<&mut NativeVector> {\n\n match v {\n\n NativeStructValue::Vector(v) => Ok(v),\n\n }\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_structs/vector.rs", "rank": 21, "score": 336185.8510267305 }, { "content": "fn type_parameters(_context: &mut Context, pty_params: Vec<(Name, Kind)>) -> Vec<(Name, Kind)> {\n\n // TODO aliasing will need to happen here at some point\n\n pty_params\n\n}\n\n\n", "file_path": "language/move-lang/src/expansion/translate.rs", "rank": 22, "score": 335055.58250557864 }, { "content": "pub fn assert_status_eq(s1: &VMStatus, s2: &VMStatus) -> bool {\n\n assert_eq!(s1.major_status, s2.major_status);\n\n assert_eq!(s1.sub_status, s2.sub_status);\n\n true\n\n}\n\n\n", "file_path": "language/e2e-tests/src/lib.rs", "rank": 23, "score": 332159.95872621145 }, { "content": "fn compute_root_hash_impl(kvs: Vec<(&[bool], HashValue)>) -> HashValue {\n\n assert!(!kvs.is_empty());\n\n\n\n // If there is only one entry, it is the root.\n\n if kvs.len() == 1 {\n\n return kvs[0].1;\n\n }\n\n\n\n // Otherwise the tree has more than one leaves, which means we can find which ones are in the\n\n // left subtree and which ones are in the right subtree. So we find the first key that starts\n\n // with a 1-bit.\n\n let left_hash;\n\n let right_hash;\n\n match kvs.iter().position(|(key, _value)| key[0]) {\n\n Some(0) => {\n\n // Every key starts with a 1-bit, i.e., they are all in the right subtree.\n\n left_hash = *SPARSE_MERKLE_PLACEHOLDER_HASH;\n\n right_hash = compute_root_hash_impl(reduce(&kvs));\n\n }\n\n Some(index) => {\n", "file_path": "storage/jellyfish-merkle/src/jellyfish_merkle_test.rs", "rank": 24, "score": 330793.69717654807 }, { "content": "pub fn code(blocks: &mut Vec<Vec<F::Bytecode>>) {\n\n let mut changed = true;\n\n while changed {\n\n let fall_through_removed = remove_fall_through(blocks);\n\n let block_removed = remove_empty_blocks(blocks);\n\n changed = fall_through_removed || block_removed;\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/to_bytecode/remove_fallthrough_jumps.rs", "rank": 25, "score": 330752.30581211555 }, { "content": "fn remove_fall_through(blocks: &mut Vec<Vec<F::Bytecode>>) -> bool {\n\n use F::Bytecode as B;\n\n let mut changed = false;\n\n for (label, block) in blocks.iter_mut().enumerate() {\n\n let next_block: u16 = (label + 1).try_into().unwrap();\n\n let remove_last = match block.last().unwrap() {\n\n B::Branch(lbl) if lbl == &next_block => true,\n\n _ => false,\n\n };\n\n if remove_last {\n\n changed = true;\n\n block.pop();\n\n }\n\n }\n\n changed\n\n}\n\n\n", "file_path": "language/move-lang/src/to_bytecode/remove_fallthrough_jumps.rs", "rank": 26, "score": 328620.8650560328 }, { "content": "fn kinds<'a>(ks: impl Iterator<Item = &'a Kind>) -> Vec<F::Kind> {\n\n ks.map(kind).collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/to_bytecode/translate.rs", "rank": 27, "score": 327829.0379366485 }, { "content": "/// This function checks the extra requirements on the signature of the main function of a script.\n\npub fn verify_main_signature(script: &CompiledScript) -> Vec<VMStatus> {\n\n let function_handle = &script.function_handle_at(script.main().function);\n\n let function_signature = &script.function_signature_at(function_handle.signature);\n\n if !function_signature.return_types.is_empty() {\n\n return vec![VMStatus::new(StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE)];\n\n }\n\n for arg_type in &function_signature.arg_types {\n\n if !arg_type.is_primitive() {\n\n return vec![VMStatus::new(StatusCode::INVALID_MAIN_FUNCTION_SIGNATURE)];\n\n }\n\n }\n\n vec![]\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 28, "score": 326551.2800434367 }, { "content": "/// Determine if a struct at the given index is a resource\n\npub fn struct_is_resource(state: &AbstractState, struct_index: StructDefinitionIndex) -> bool {\n\n let struct_def = state.module.struct_def_at(struct_index);\n\n StructDefinitionView::new(&state.module, struct_def).is_nominal_resource()\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 29, "score": 325617.69122481823 }, { "content": "fn remove_empty_blocks(blocks: &mut Vec<Vec<F::Bytecode>>) -> bool {\n\n let mut label_map = HashMap::new();\n\n let mut num_removed = 0;\n\n\n\n let mut removed = false;\n\n let old_blocks = std::mem::replace(blocks, vec![]);\n\n for (label, block) in old_blocks.into_iter().enumerate() {\n\n let lbl = label as u16;\n\n label_map.insert(lbl, lbl - num_removed);\n\n\n\n if block.is_empty() {\n\n num_removed += 1;\n\n removed = true;\n\n } else {\n\n blocks.push(block)\n\n }\n\n }\n\n\n\n if removed {\n\n super::remap_offsets(blocks, &label_map);\n\n }\n\n\n\n removed\n\n}\n", "file_path": "language/move-lang/src/to_bytecode/remove_fallthrough_jumps.rs", "rank": 30, "score": 325322.67366009124 }, { "content": "/// Serializes a `LocalsSignature`.\n\n///\n\n/// A `LocalsSignature` gets serialized as follows:\n\n/// - `SignatureType::LOCAL_SIGNATURE` as 1 byte\n\n/// - The vector of `SignatureToken`s for locals\n\nfn serialize_locals_signature(binary: &mut BinaryData, signature: &LocalsSignature) -> Result<()> {\n\n binary.push(SignatureType::LOCAL_SIGNATURE as u8)?;\n\n serialize_signature_tokens(binary, &signature.0)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 31, "score": 324687.7900498019 }, { "content": "pub fn code(mut blocks: Vec<Vec<F::Bytecode>>) -> Vec<F::Bytecode> {\n\n let mut offset = 0;\n\n let mut label_to_offset = HashMap::new();\n\n for (lbl, block) in blocks.iter().enumerate() {\n\n label_to_offset.insert(lbl as u16, offset.try_into().unwrap());\n\n offset += block.len();\n\n }\n\n\n\n super::remap_offsets(&mut blocks, &label_to_offset);\n\n\n\n blocks.into_iter().flatten().collect()\n\n}\n", "file_path": "language/move-lang/src/to_bytecode/labels_to_offsets.rs", "rank": 32, "score": 323615.4928500207 }, { "content": "pub fn leq<Lbl: Eq>(lhs: &PathSlice<Lbl>, rhs: &PathSlice<Lbl>) -> bool {\n\n lhs.len() <= rhs.len() && lhs.iter().zip(rhs).all(|(l, r)| l == r)\n\n}\n\n\n", "file_path": "language/borrow-graph/src/paths.rs", "rank": 33, "score": 322793.4470680283 }, { "content": "fn serialize_kind(binary: &mut BinaryData, kind: Kind) -> Result<()> {\n\n binary.push(match kind {\n\n Kind::All => SerializedKind::ALL,\n\n Kind::Resource => SerializedKind::RESOURCE,\n\n Kind::Unrestricted => SerializedKind::UNRESTRICTED,\n\n } as u8)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 34, "score": 320693.1340527944 }, { "content": "/// Determine whether two tokens on the stack have the same type\n\npub fn stack_has_polymorphic_eq(state: &AbstractState, index1: usize, index2: usize) -> bool {\n\n if stack_has(state, index2, None) {\n\n state.stack_peek(index1) == state.stack_peek(index2)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 35, "score": 315678.7866355963 }, { "content": "// next continuously mutates a state and returns a u64-index\n\npub fn next(state: &mut Vec<u8>) -> u64 {\n\n // state = SHA-3-256(state)\n\n std::mem::replace(\n\n state,\n\n libra_crypto::HashValue::from_sha3_256(state).to_vec(),\n\n );\n\n let mut temp = [0u8; 8];\n\n temp.copy_from_slice(&state[..8]);\n\n // return state[0..8]\n\n u64::from_le_bytes(temp)\n\n}\n\n\n\n/// The MultiProposer maps a round to an ordered list of authors.\n\n/// The primary proposer is determined by an index of hash(round) % num_proposers.\n\n/// The secondary proposer is determined by hash(hash(round)) % (num_proposers - 1), etc.\n\n/// In order to ensure the required number of proposers, a set of the proposers to choose from\n\n/// is updated after each hash: a chosen candidate is removed to avoid duplication.\n\n///\n\n/// Note the hash doesn't have to be cryptographic. The goal is to make sure that different\n\n/// combinations of consecutive leaders are going to appear with equal probability.\n", "file_path": "consensus/src/chained_bft/liveness/multi_proposer_election.rs", "rank": 36, "score": 312241.98861951224 }, { "content": "/// Write a `u64` in Little Endian format.\n\npub fn write_u64(binary: &mut BinaryData, value: u64) -> Result<()> {\n\n binary.extend(&value.to_le_bytes())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 37, "score": 312033.67327666434 }, { "content": "/// Write a `u16` in Little Endian format.\n\npub fn write_u16(binary: &mut BinaryData, value: u16) -> Result<()> {\n\n binary.extend(&value.to_le_bytes())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 38, "score": 312033.67327666434 }, { "content": "/// Write a `u32` in Little Endian format.\n\npub fn write_u32(binary: &mut BinaryData, value: u32) -> Result<()> {\n\n binary.extend(&value.to_le_bytes())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 39, "score": 312033.67327666434 }, { "content": "pub fn function_signature(context: &mut Context, sig: &mut FunctionSignature) {\n\n for (_, st) in &mut sig.parameters {\n\n single_type(context, st);\n\n }\n\n type_(context, &mut sig.return_type)\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 40, "score": 311962.5863768581 }, { "content": "/// Determine whether an abstract value on the stack that is a reference points to something of the\n\n/// same type as another abstract value on the stack\n\npub fn stack_ref_polymorphic_eq(state: &AbstractState, index1: usize, index2: usize) -> bool {\n\n if stack_has(state, index2, None) {\n\n if let Some(abstract_value) = state.stack_peek(index1) {\n\n match abstract_value.token {\n\n SignatureToken::MutableReference(token) | SignatureToken::Reference(token) => {\n\n let abstract_value_inner = AbstractValue {\n\n token: (*token).clone(),\n\n kind: SignatureTokenView::new(&state.module, &*token).kind(&[]),\n\n };\n\n return Some(abstract_value_inner) == state.stack_peek(index2);\n\n }\n\n _ => return false,\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 41, "score": 311690.8913329326 }, { "content": "/// Create type value for a struct specified by a definition index and actuals\n\n/// specified by LocalsSignatureIndex.\n\npub fn format_struct_type_value_from_def_idx(\n\n module: &VerifiedModule,\n\n struct_def_index: &StructDefinitionIndex,\n\n type_actuals_idx: &LocalsSignatureIndex,\n\n) -> String {\n\n format_struct_type_value(\n\n module,\n\n &module.struct_def_at(*struct_def_index).struct_handle,\n\n &module.locals_signature_at(*type_actuals_idx).0,\n\n )\n\n}\n\n\n", "file_path": "language/move-prover/bytecode-to-boogie/src/translator.rs", "rank": 42, "score": 311682.47222130944 }, { "content": "/// Computes the root hash of a sparse Merkle tree. `kvs` consists of the entire set of key-value\n\n/// pairs stored in the tree.\n\nfn compute_root_hash(kvs: Vec<(Vec<bool>, HashValue)>) -> HashValue {\n\n let mut kv_ref = vec![];\n\n for (key, value) in &kvs {\n\n kv_ref.push((&key[..], *value));\n\n }\n\n compute_root_hash_impl(kv_ref)\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/jellyfish_merkle_test.rs", "rank": 43, "score": 311511.98617104336 }, { "content": "fn arb_ledger_infos_with_sigs() -> impl Strategy<Value = Vec<LedgerInfoWithSignatures>> {\n\n (\n\n any_with::<AccountInfoUniverse>(3),\n\n vec((any::<LedgerInfoWithSignaturesGen>(), 0..10usize), 1..100),\n\n )\n\n .prop_map(|(mut universe, gens)| {\n\n gens.into_iter()\n\n .map(|(ledger_info_gen, block_size)| {\n\n ledger_info_gen.materialize(&mut universe, block_size)\n\n })\n\n .collect()\n\n })\n\n}\n\n\n\nproptest! {\n\n #![proptest_config(ProptestConfig::with_cases(10))]\n\n\n\n #[test]\n\n fn test_ledger_info_put_get_verify(\n\n ledger_infos_with_sigs in arb_ledger_infos_with_sigs()\n", "file_path": "storage/libradb/src/ledger_store/ledger_info_test.rs", "rank": 44, "score": 311405.15167982975 }, { "content": "// Workaround for https://github.com/serde-rs/serde/issues/368\n\nfn default_as_true() -> bool {\n\n true\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]\n\n#[serde(rename_all = \"kebab-case\")]\n\npub struct Clippy {\n\n allowed: Vec<String>,\n\n}\n\n\n\nimpl Config {\n\n pub fn from_file(f: impl AsRef<Path>) -> Result<Self> {\n\n let contents = fs::read(f)?;\n\n Self::from_toml(&contents).map_err(Into::into)\n\n }\n\n\n\n pub fn from_toml(bytes: &[u8]) -> Result<Self> {\n\n toml::from_slice(bytes).map_err(Into::into)\n\n }\n\n\n", "file_path": "x/src/config.rs", "rank": 45, "score": 310511.2861810734 }, { "content": "/// The operand stack.\n\nstruct Stack(Vec<Value>);\n\n\n\nimpl Stack {\n\n /// Create a new empty operand stack.\n\n fn new() -> Self {\n\n Stack(vec![])\n\n }\n\n\n\n /// Push a `Value` on the stack if the max stack size has not been reached. Abort execution\n\n /// otherwise.\n\n fn push(&mut self, value: Value) -> VMResult<()> {\n\n if self.0.len() < OPERAND_STACK_SIZE_LIMIT {\n\n self.0.push(value);\n\n Ok(())\n\n } else {\n\n Err(VMStatus::new(StatusCode::EXECUTION_STACK_OVERFLOW))\n\n }\n\n }\n\n\n\n /// Pop a `Value` off the stack or abort execution if the stack is empty.\n", "file_path": "language/vm/vm-runtime/src/interpreter.rs", "rank": 46, "score": 309200.3984130739 }, { "content": "/// Given a maximum value `max` and a list of [`Index`](proptest::sample::Index) instances, picks\n\n/// integers in the range `[0, max)` uniformly randomly and without duplication.\n\n///\n\n/// If `indexes_len` is greater than `max`, all indexes will be returned.\n\n///\n\n/// This function implements [Robert Floyd's F2\n\n/// algorithm](https://blog.acolyer.org/2018/01/30/a-sample-of-brilliance/) for sampling without\n\n/// replacement.\n\npub fn pick_idxs<T, P>(max: usize, indexes: &T, indexes_len: usize) -> Vec<usize>\n\nwhere\n\n T: OpsIndex<usize, Output = P> + ?Sized,\n\n P: AsRef<PropIndex>,\n\n{\n\n // See https://blog.acolyer.org/2018/01/30/a-sample-of-brilliance/ (the F2 algorithm)\n\n // for a longer explanation. This is a variant that works with zero-indexing.\n\n let mut selected = BTreeSet::new();\n\n let to_select = indexes_len.min(max);\n\n for (iter_idx, choice) in ((max - to_select)..max).enumerate() {\n\n // \"RandInt(1, J)\" in the original algorithm means a number between 1\n\n // and choice, both inclusive. `PropIndex::index` picks a number between 0 and\n\n // whatever's passed in, with the latter exclusive. Pass in \"+1\" to ensure the same\n\n // range of values is picked from. (This also ensures that if choice is 0 then `index`\n\n // doesn't panic.\n\n let idx = indexes[iter_idx].as_ref().index(choice + 1);\n\n if !selected.insert(idx) {\n\n selected.insert(choice);\n\n }\n\n }\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 47, "score": 308958.94673758 }, { "content": "/// Check whether a command is debugging command.\n\npub fn debug_format_cmd(cmd: &str) -> bool {\n\n cmd.ends_with('?')\n\n}\n\n\n", "file_path": "client/src/commands.rs", "rank": 48, "score": 308585.64244297997 }, { "content": "/// Take a `Vec<u8>` and a value to write to that vector and applies LEB128 logic to\n\n/// compress the u16.\n\npub fn write_u16_as_uleb128(binary: &mut BinaryData, value: u16) -> Result<()> {\n\n write_u32_as_uleb128(binary, u32::from(value))\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 49, "score": 307679.0996291856 }, { "content": "/// Take a `Vec<u8>` and a value to write to that vector and applies LEB128 logic to\n\n/// compress the u32.\n\npub fn write_u32_as_uleb128(binary: &mut BinaryData, value: u32) -> Result<()> {\n\n let mut val = value;\n\n loop {\n\n let v: u8 = (val & 0x7f) as u8;\n\n if u32::from(v) != val {\n\n binary.push(v | 0x80)?;\n\n val >>= 7;\n\n } else {\n\n binary.push(v)?;\n\n break;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/file_format_common.rs", "rank": 50, "score": 307679.0996291856 }, { "content": "fn serialize_kinds(binary: &mut BinaryData, kinds: &[Kind]) -> Result<()> {\n\n write_u32_as_uleb128(binary, kinds.len() as u32)?;\n\n for kind in kinds {\n\n serialize_kind(binary, *kind)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 51, "score": 307439.45550194493 }, { "content": "/// Reduces the problem by removing the first bit of every key.\n\nfn reduce<'a>(kvs: &'a [(&[bool], HashValue)]) -> Vec<(&'a [bool], HashValue)> {\n\n kvs.iter().map(|(key, value)| (&key[1..], *value)).collect()\n\n}\n\n\n", "file_path": "storage/jellyfish-merkle/src/jellyfish_merkle_test.rs", "rank": 52, "score": 305403.20880483394 }, { "content": "fn type_parameters(context: &mut Context, type_parameters: Vec<(Name, Kind)>) -> Vec<N::TParam> {\n\n type_parameters\n\n .into_iter()\n\n .map(|(name, kind)| {\n\n let id = N::TParamID::next();\n\n let debug = name.clone();\n\n let tp = N::TParam { id, debug, kind };\n\n if let Err(old_loc) = context.tparams.add(name.clone(), tp.clone()) {\n\n context.error(vec![\n\n (\n\n name.loc,\n\n format!(\"Duplicate type parameter declared with name '{}'\", name),\n\n ),\n\n (old_loc, \"Previously defined here\".to_string()),\n\n ])\n\n }\n\n tp\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/naming/translate.rs", "rank": 53, "score": 303195.77400978154 }, { "content": "fn make_tparams(context: &mut Context, loc: Loc, tparam_constraints: Vec<Kind>) -> Vec<BaseType> {\n\n tparam_constraints\n\n .into_iter()\n\n .map(|constraint| {\n\n let tvar = sp(loc, BaseType_::Var(TVar::next()));\n\n context.add_kind_constraint(loc, tvar.clone(), constraint);\n\n tvar\n\n })\n\n .collect()\n\n}\n\n\n\n//**************************************************************************************************\n\n// Subtype and joining\n\n//**************************************************************************************************\n\n\n\npub enum TypingError {\n\n SubtypeError(Box<SingleType>, Box<SingleType>),\n\n Incompatible(Box<Type>, Box<Type>),\n\n RecursiveType(Loc),\n\n}\n\n\n", "file_path": "language/move-lang/src/typing/core.rs", "rank": 54, "score": 303195.77400978154 }, { "content": "/// Creates and sets default global logger.\n\n/// Caller must keep the returned guard alive.\n\npub fn set_default_global_logger(async_drain: bool, chan_size: Option<usize>) -> GlobalLoggerGuard {\n\n let logger = create_default_root_logger(async_drain, chan_size);\n\n set_global_logger(logger)\n\n}\n\n\n", "file_path": "common/logger/src/lib.rs", "rank": 55, "score": 300458.32949094137 }, { "content": "/// Check whether the local at `index` is of the given availability\n\npub fn local_availability_is(state: &AbstractState, index: u8, availability: BorrowState) -> bool {\n\n state\n\n .local_availability_is(index as usize, availability)\n\n .unwrap_or_else(|_| false)\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 56, "score": 300227.84361733333 }, { "content": "pub fn stack_struct_has_field(state: &AbstractState, field_index: FieldDefinitionIndex) -> bool {\n\n if let Some(struct_handle_index) = state.stack_peek(0).clone().and_then(|abstract_value| {\n\n SignatureToken::get_struct_handle_from_reference(&abstract_value.token)\n\n }) {\n\n return state\n\n .module\n\n .is_field_in_struct(field_index, struct_handle_index);\n\n }\n\n false\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 57, "score": 299081.8387821616 }, { "content": "/// Looks up the expected native struct definition from the module id (address and module) and\n\n/// function name where it was expected to be declared\n\npub fn resolve_native_struct(\n\n module: &ModuleId,\n\n struct_name: &IdentStr,\n\n) -> Option<&'static NativeStruct> {\n\n NATIVE_STRUCT_MAP.get(module)?.get(struct_name)\n\n}\n\n\n\nmacro_rules! add {\n\n ($m:ident, $addr:expr, $module:expr, $name:expr, $resource: expr, $ty_kinds: expr, $tag: expr) => {{\n\n let ty_args = $ty_kinds\n\n .iter()\n\n .enumerate()\n\n .map(|(id, _)| Type::TypeVariable(id as u16))\n\n .collect();\n\n let id = ModuleId::new($addr, Identifier::new($module).unwrap());\n\n let struct_table = $m.entry(id).or_insert_with(HashMap::new);\n\n let expected_index = StructHandleIndex(struct_table.len() as u16);\n\n\n\n let s = NativeStruct {\n\n expected_nominal_resource: $resource,\n\n expected_type_formals: $ty_kinds,\n\n expected_index,\n\n struct_type: NativeStructType::new($tag, ty_args),\n\n };\n\n let old = struct_table.insert(Identifier::new($name).unwrap(), s);\n\n assert!(old.is_none());\n\n }};\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_structs/dispatch.rs", "rank": 58, "score": 298568.97395810776 }, { "content": "/// Get up to MAX_EVENTS_IN_QUEUE last events and clears the queue\n\npub fn pop_last_entries() -> Vec<JsonLogEntry> {\n\n let mut queue = JSON_LOG_ENTRY_QUEUE.lock().unwrap();\n\n queue.drain(..).collect()\n\n}\n", "file_path": "common/debug-interface/src/json_log.rs", "rank": 59, "score": 297806.9969452154 }, { "content": "fn load_kind(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<Kind> {\n\n if let Ok(byte) = cursor.read_u8() {\n\n Ok(match SerializedKind::from_u8(byte)? {\n\n SerializedKind::ALL => Kind::All,\n\n SerializedKind::UNRESTRICTED => Kind::Unrestricted,\n\n SerializedKind::RESOURCE => Kind::Resource,\n\n })\n\n } else {\n\n Err(VMStatus::new(StatusCode::MALFORMED))\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 60, "score": 296293.1990946453 }, { "content": "fn base_types(context: &mut Context, bs: Vec<G::BaseType>) -> Result<Vec<F::SignatureToken>> {\n\n bs.into_iter().map(|b| base_type(context, b)).collect()\n\n}\n\n\n", "file_path": "language/move-lang/src/to_bytecode/translate.rs", "rank": 61, "score": 295640.7599722013 }, { "content": "pub fn native_ed25519_signature_verification(\n\n mut arguments: VecDeque<Value>,\n\n cost_table: &CostTable,\n\n) -> VMResult<NativeResult> {\n\n if arguments.len() != 3 {\n\n let msg = format!(\n\n \"wrong number of arguments for ed25519_signature_verification expected 3 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let msg = pop_arg!(arguments, ByteArray);\n\n let pubkey = pop_arg!(arguments, ByteArray);\n\n let signature = pop_arg!(arguments, ByteArray);\n\n\n\n let cost = native_gas(cost_table, NativeCostIndex::ED25519_VERIFY, msg.len());\n\n\n\n let sig = match ed25519::Ed25519Signature::try_from(signature.as_bytes()) {\n\n Ok(sig) => sig,\n\n Err(_) => {\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/signature.rs", "rank": 62, "score": 295333.0693116861 }, { "content": "pub fn factor<Lbl: Eq>(lhs: &PathSlice<Lbl>, mut rhs: Path<Lbl>) -> (Path<Lbl>, Path<Lbl>) {\n\n assert!(leq(lhs, &rhs));\n\n let suffix = rhs.split_off(lhs.len());\n\n (rhs, suffix)\n\n}\n\n\n", "file_path": "language/borrow-graph/src/paths.rs", "rank": 63, "score": 294929.0026417918 }, { "content": "fn compile_types(context: &mut Context, tys: &[Type]) -> Result<Vec<SignatureToken>> {\n\n tys.iter()\n\n .map(|ty| compile_type(context, ty))\n\n .collect::<Result<_>>()\n\n}\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/compiler.rs", "rank": 64, "score": 294300.7181223566 }, { "content": "fn use_local(context: &mut Context, loc: &Loc, local: &Var) {\n\n use LocalState as L;\n\n let state = context.get_state(local);\n\n match state {\n\n L::Available(_) => (),\n\n L::Unavailable(unavailable) | L::MaybeUnavailable { unavailable, .. } => {\n\n let verb = match state {\n\n LocalState::Available(_) => unreachable!(),\n\n LocalState::Unavailable(_) => \"has been moved\",\n\n LocalState::MaybeUnavailable { .. } => \"might have been moved\",\n\n };\n\n let unavailable = *unavailable;\n\n let vstr = match display_var(local.value()) {\n\n DisplayVar::Tmp => panic!(\"ICE invalid use tmp local\"),\n\n DisplayVar::Orig(s) => s,\n\n };\n\n context.error(vec![\n\n (*loc, format!(\"Invalid usage of local '{}'\", vstr)),\n\n (unavailable, format!(\"The value {} out of the local. The local must be assigned a new value before being used\", verb))\n\n ])\n\n }\n\n }\n\n}\n", "file_path": "language/move-lang/src/cfgir/locals/mod.rs", "rank": 65, "score": 292775.19189783465 }, { "content": "/// Serializes a `SignatureToken`.\n\n///\n\n/// A `SignatureToken` gets serialized as a variable size blob depending on composition.\n\n/// Values for types are defined in `SerializedType`.\n\nfn serialize_signature_token(binary: &mut BinaryData, token: &SignatureToken) -> Result<()> {\n\n match token {\n\n SignatureToken::Bool => binary.push(SerializedType::BOOL as u8)?,\n\n SignatureToken::U64 => binary.push(SerializedType::INTEGER as u8)?,\n\n SignatureToken::String => binary.push(SerializedType::STRING as u8)?,\n\n SignatureToken::ByteArray => binary.push(SerializedType::BYTEARRAY as u8)?,\n\n SignatureToken::Address => binary.push(SerializedType::ADDRESS as u8)?,\n\n SignatureToken::Struct(idx, types) => {\n\n binary.push(SerializedType::STRUCT as u8)?;\n\n write_u16_as_uleb128(binary, idx.0)?;\n\n serialize_signature_tokens(binary, types)?;\n\n }\n\n SignatureToken::Reference(boxed_token) => {\n\n binary.push(SerializedType::REFERENCE as u8)?;\n\n serialize_signature_token(binary, boxed_token.deref())?;\n\n }\n\n SignatureToken::MutableReference(boxed_token) => {\n\n binary.push(SerializedType::MUTABLE_REFERENCE as u8)?;\n\n serialize_signature_token(binary, boxed_token.deref())?;\n\n }\n\n SignatureToken::TypeParameter(idx) => {\n\n binary.push(SerializedType::TYPE_PARAMETER as u8)?;\n\n write_u16_as_uleb128(binary, *idx)?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 66, "score": 292548.41272739787 }, { "content": "/// Serializes a slice of `SignatureToken`s.\n\nfn serialize_signature_tokens(binary: &mut BinaryData, tokens: &[SignatureToken]) -> Result<()> {\n\n let len = tokens.len();\n\n if len > u8::max_value() as usize {\n\n bail!(\n\n \"arguments/locals size ({}) cannot exceed {}\",\n\n len,\n\n u8::max_value(),\n\n )\n\n }\n\n binary.push(len as u8)?;\n\n for token in tokens {\n\n serialize_signature_token(binary, token)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 67, "score": 292542.2175334974 }, { "content": "/// Batch verify a collection of signatures using a bitmap for matching signatures to keys.\n\npub fn native_ed25519_threshold_signature_verification(\n\n mut arguments: VecDeque<Value>,\n\n cost_table: &CostTable,\n\n) -> VMResult<NativeResult> {\n\n if arguments.len() != 4 {\n\n let msg = format!(\n\n \"wrong number of arguments for ed25519_signature_verification expected 4 found {}\",\n\n arguments.len()\n\n );\n\n return Err(VMStatus::new(StatusCode::UNREACHABLE).with_message(msg));\n\n }\n\n let message = pop_arg!(arguments, ByteArray);\n\n let public_keys = pop_arg!(arguments, ByteArray);\n\n let signatures = pop_arg!(arguments, ByteArray);\n\n let bitmap = pop_arg!(arguments, ByteArray);\n\n\n\n Ok(ed25519_threshold_signature_verification(\n\n &bitmap,\n\n &signatures,\n\n &public_keys,\n\n &message,\n\n cost_table,\n\n ))\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/native_functions/signature.rs", "rank": 68, "score": 292142.388916932 }, { "content": "/// Create a number of accounts without keypair from a wallet.\n\npub fn gen_accounts_from_wallet(wallet: &mut WalletLibrary, num_accounts: u64) -> Vec<AccountData> {\n\n (0..num_accounts)\n\n .map(|_| gen_next_account(wallet))\n\n .collect()\n\n}\n\n\n\n/// ---------------------------------------------------------------------------------- ///\n\n/// Helper functions and APIs to generate different types of transaction request(s). ///\n\n/// ---------------------------------------------------------------------------------- ///\n\n\n", "file_path": "benchmark/src/load_generator.rs", "rank": 69, "score": 289643.05071992404 }, { "content": "/// Deserializes a `SignatureToken`.\n\nfn load_signature_token(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<SignatureToken> {\n\n if let Ok(byte) = cursor.read_u8() {\n\n match SerializedType::from_u8(byte)? {\n\n SerializedType::BOOL => Ok(SignatureToken::Bool),\n\n SerializedType::INTEGER => Ok(SignatureToken::U64),\n\n SerializedType::STRING => Ok(SignatureToken::String),\n\n SerializedType::BYTEARRAY => Ok(SignatureToken::ByteArray),\n\n SerializedType::ADDRESS => Ok(SignatureToken::Address),\n\n SerializedType::REFERENCE => {\n\n let ref_token = load_signature_token(cursor)?;\n\n Ok(SignatureToken::Reference(Box::new(ref_token)))\n\n }\n\n SerializedType::MUTABLE_REFERENCE => {\n\n let ref_token = load_signature_token(cursor)?;\n\n Ok(SignatureToken::MutableReference(Box::new(ref_token)))\n\n }\n\n SerializedType::STRUCT => {\n\n let sh_idx = read_uleb_u16_internal(cursor)?;\n\n let types = load_signature_tokens(cursor)?;\n\n Ok(SignatureToken::Struct(StructHandleIndex(sh_idx), types))\n", "file_path": "language/vm/src/deserializer.rs", "rank": 70, "score": 289002.48781135876 }, { "content": "pub fn impl_enum_verifyingkey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n let res = quote! {\n\n impl libra_crypto::VerifyingKey for #name {\n\n type SigningKeyMaterial = #pkt;\n\n type SignatureMaterial = #st;\n\n }\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 71, "score": 287238.6227188047 }, { "content": "pub fn impl_enum_privatekey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let res = quote! {\n\n impl libra_crypto::PrivateKey for #name {\n\n type PublicKeyMaterial = #pkt;\n\n }\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 72, "score": 287238.6227188047 }, { "content": "pub fn impl_enum_signingkey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n\n\n let mut match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_arms.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign_message(message)),\n\n });\n\n }\n\n let res = quote! {\n\n impl libra_crypto::SigningKey for #name {\n\n type VerifyingKeyMaterial = #pkt;\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 73, "score": 287238.6227188047 }, { "content": "pub fn impl_enum_publickey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let mut from_match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n from_match_arms.extend(quote! {\n\n #pkt::#variant_ident(key) => #name::#variant_ident(key.into()),\n\n });\n\n }\n\n let mut res = quote! {\n\n impl From<&#pkt> for #name {\n\n fn from(public_key: &#pkt) -> Self {\n\n match public_key {\n\n #from_match_arms\n\n }\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 74, "score": 287238.6227188047 }, { "content": "/// This trait defines a type that can serve as a [`Schema::Value`].\n\npub trait ValueCodec<S: Schema + ?Sized>: Sized + PartialEq + Debug {\n\n /// Converts `self` to bytes to be stored in DB.\n\n fn encode_value(&self) -> Result<Vec<u8>>;\n\n /// Converts bytes fetched from DB to `Self`.\n\n fn decode_value(data: &[u8]) -> Result<Self>;\n\n}\n\n\n", "file_path": "storage/schemadb/src/schema.rs", "rank": 75, "score": 286285.45836422616 }, { "content": "pub fn is_allowed_script(publishing_option: &VMPublishingOption, program: &[u8]) -> bool {\n\n match publishing_option {\n\n VMPublishingOption::Open | VMPublishingOption::CustomScripts => true,\n\n VMPublishingOption::Locked(whitelist) => {\n\n let hash_value = HashValue::from_sha3_256(program);\n\n whitelist.contains(hash_value.as_ref())\n\n }\n\n }\n\n}\n\n\n\n/// Represents a [`SignedTransaction`] that has been *validated*. This includes all the steps\n\n/// required to ensure that a transaction is valid, other than verifying the submitted program.\n\npub struct ValidatedTransaction<'alloc, 'txn, P>\n\nwhere\n\n 'alloc: 'txn,\n\n P: ModuleCache<'alloc>,\n\n{\n\n txn: SignatureCheckedTransaction,\n\n txn_state: Option<ValidatedTransactionState<'alloc, 'txn, P>>,\n\n}\n", "file_path": "language/vm/vm-runtime/src/process_txn/validate.rs", "rank": 76, "score": 285827.52909281757 }, { "content": "/// Deserializes a code stream (`Bytecode`s).\n\nfn load_code(cursor: &mut Cursor<&[u8]>, code: &mut Vec<Bytecode>) -> BinaryLoaderResult<()> {\n\n let bytecode_count = read_u16_internal(cursor)?;\n\n while code.len() < bytecode_count as usize {\n\n let byte = cursor\n\n .read_u8()\n\n .map_err(|_| VMStatus::new(StatusCode::MALFORMED))?;\n\n let bytecode = match Opcodes::from_u8(byte)? {\n\n Opcodes::POP => Bytecode::Pop,\n\n Opcodes::RET => Bytecode::Ret,\n\n Opcodes::BR_TRUE => {\n\n let jump = read_u16_internal(cursor)?;\n\n Bytecode::BrTrue(jump)\n\n }\n\n Opcodes::BR_FALSE => {\n\n let jump = read_u16_internal(cursor)?;\n\n Bytecode::BrFalse(jump)\n\n }\n\n Opcodes::BRANCH => {\n\n let jump = read_u16_internal(cursor)?;\n\n Bytecode::Branch(jump)\n", "file_path": "language/vm/src/deserializer.rs", "rank": 77, "score": 285733.11398592097 }, { "content": "/// Create a dummy module to wrap the bytecode program in local@code\n\npub fn dummy_procedure_module(code: Vec<Bytecode>) -> CompiledModule {\n\n let mut module = empty_module();\n\n let mut code_unit = CodeUnit::default();\n\n code_unit.code = code;\n\n let mut fun_def = FunctionDefinition::default();\n\n fun_def.code = code_unit;\n\n\n\n module.function_signatures.push(FunctionSignature {\n\n arg_types: vec![],\n\n return_types: vec![],\n\n type_formals: vec![],\n\n });\n\n let fun_handle = FunctionHandle {\n\n module: ModuleHandleIndex(0),\n\n name: IdentifierIndex(0),\n\n signature: FunctionSignatureIndex(0),\n\n };\n\n\n\n module.function_handles.push(fun_handle);\n\n module.function_defs.push(fun_def);\n\n module.freeze().unwrap()\n\n}\n", "file_path": "language/vm/src/file_format.rs", "rank": 78, "score": 285074.3780027355 }, { "content": "#[derive(PartialEq, Eq, Debug, Clone)]\n\nenum ValueImpl {\n\n /// Locals are invalid on entry of a function and when moved out.\n\n Invalid,\n\n // Primitive types\n\n U64(u64),\n\n Address(AccountAddress),\n\n Bool(bool),\n\n ByteArray(ByteArray),\n\n String(VMString),\n\n\n\n /// A struct in Move.\n\n Struct(Struct),\n\n\n\n /// A native struct\n\n NativeStruct(NativeStructValue),\n\n\n\n /// Reference to a local.\n\n Reference(Reference),\n\n /// Global reference into storage.\n\n GlobalRef(GlobalRef),\n", "file_path": "language/vm/vm-runtime/vm-runtime-types/src/value.rs", "rank": 79, "score": 284038.089428939 }, { "content": "/// Serialize the given data structure as a `Vec<u8>` of LCS.\n\n///\n\n/// Serialization can fail if `T`'s implementation of `Serialize` decides to\n\n/// fail, if `T` contains sequences which are longer than `MAX_SEQUENCE_LENGTH`,\n\n/// or if `T` attempts to serialize an unsupported datatype such as a f32,\n\n/// f64, or char.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use libra_canonical_serialization::to_bytes;\n\n/// use serde::Serialize;\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Ip([u8; 4]);\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Port(u16);\n\n///\n\n/// #[derive(Serialize)]\n\n/// struct Service {\n\n/// ip: Ip,\n\n/// port: Vec<Port>,\n\n/// connection_max: Option<u32>,\n\n/// enabled: bool,\n\n/// }\n\n///\n\n/// let service = Service {\n\n/// ip: Ip([192, 168, 1, 1]),\n\n/// port: vec![Port(8001), Port(8002), Port(8003)],\n\n/// connection_max: Some(5000),\n\n/// enabled: false,\n\n/// };\n\n///\n\n/// let bytes = to_bytes(&service).unwrap();\n\n/// let expected = vec![\n\n/// 0xc0, 0xa8, 0x01, 0x01, 0x03, 0x00, 0x00, 0x00,\n\n/// 0x41, 0x1f, 0x42, 0x1f, 0x43, 0x1f, 0x01, 0x88,\n\n/// 0x13, 0x00, 0x00, 0x00,\n\n/// ];\n\n/// assert_eq!(bytes, expected);\n\n/// ```\n\npub fn to_bytes<T>(value: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: ?Sized + Serialize,\n\n{\n\n let mut serializer = Serializer::new();\n\n value.serialize(&mut serializer)?;\n\n Ok(serializer.end())\n\n}\n\n\n", "file_path": "common/lcs/src/ser.rs", "rank": 80, "score": 284008.58956868586 }, { "content": "/// Put reference to local at `index` in register\n\npub fn local_take_borrow(\n\n state: &AbstractState,\n\n index: u8,\n\n mutability: Mutability,\n\n) -> Result<AbstractState, VMError> {\n\n let mut state = state.clone();\n\n state.local_take_borrow(index as usize, mutability)?;\n\n Ok(state)\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 81, "score": 282460.15814515937 }, { "content": "fn parse_and_compile_modules(s: impl AsRef<str>) -> Vec<CompiledModule> {\n\n let compiler = Compiler {\n\n skip_stdlib_deps: true,\n\n ..Compiler::default()\n\n };\n\n compiler\n\n .into_compiled_program(s.as_ref())\n\n .expect(\"Failed to compile program\")\n\n .modules\n\n}\n\n\n", "file_path": "language/vm/vm-runtime/src/unit_tests/module_cache_tests.rs", "rank": 82, "score": 279131.0900254624 }, { "content": "/// Reports the number of transactions in a block.\n\npub fn report_block_count(count: usize) {\n\n match i64::try_from(count) {\n\n Ok(val) => BLOCK_TRANSACTION_COUNT.set(val),\n\n Err(_) => BLOCK_TRANSACTION_COUNT.set(std::i64::MAX),\n\n }\n\n}\n\n\n\n// All statistics gather operations for the time taken/gas usage should go through this macro. This\n\n// gives us the ability to turn these metrics on and off easily from one place.\n\n#[macro_export]\n\nmacro_rules! record_stats {\n\n // Gather some information that is only needed in relation to recording statistics\n\n (info | $($stmt:stmt);+;) => {\n\n $($stmt);+;\n\n };\n\n // Set the $ident gauge to $amount\n\n (gauge set | $ident:ident | $amount:expr) => {\n\n VM_COUNTERS.set($ident, $amount as f64)\n\n };\n\n // Increment the $ident gauge by $amount\n", "file_path": "language/vm/vm-runtime/src/counters.rs", "rank": 83, "score": 277841.9474610517 }, { "content": "/// Push the field at `field_index` of a struct as an `AbstractValue` to the stack\n\npub fn stack_struct_borrow_field(\n\n state: &AbstractState,\n\n field_index: FieldDefinitionIndex,\n\n) -> Result<AbstractState, VMError> {\n\n let mut state = state.clone();\n\n state.register_move();\n\n let field_signature = state.module.get_field_signature(field_index).0.clone();\n\n let abstract_value = AbstractValue {\n\n token: SignatureToken::MutableReference(Box::new(field_signature.clone())),\n\n kind: SignatureTokenView::new(&state.module, &field_signature).kind(&[]),\n\n };\n\n state = stack_push(&state, abstract_value)?;\n\n Ok(state)\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 84, "score": 277824.35165736265 }, { "content": "/// Determine whether the struct at the given index can be constructed from the values on\n\n/// the stack.\n\npub fn stack_satisfies_struct_signature(\n\n state: &AbstractState,\n\n struct_index: StructDefinitionIndex,\n\n) -> bool {\n\n let struct_def = state.module.struct_def_at(struct_index);\n\n let struct_def = StructDefinitionView::new(&state.module, struct_def);\n\n let field_token_views = struct_def\n\n .fields()\n\n .into_iter()\n\n .flatten()\n\n .map(|field| field.type_signature().token());\n\n let mut satisfied = true;\n\n for (i, token_view) in field_token_views.enumerate() {\n\n let abstract_value = AbstractValue {\n\n token: token_view.as_inner().clone(),\n\n kind: token_view.kind(&[]),\n\n };\n\n if !stack_has(state, i, Some(abstract_value)) {\n\n satisfied = false;\n\n }\n\n }\n\n satisfied\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 85, "score": 277810.0232778342 }, { "content": "/// Check whether the local at `index` exists\n\npub fn local_exists(state: &AbstractState, index: u8) -> bool {\n\n state.local_exists(index as usize)\n\n}\n\n\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 86, "score": 276977.79549590807 }, { "content": "pub fn default_config() -> VMConfig {\n\n VMConfig {\n\n publishing_options: VMPublishingOption::Locked(HashSet::from_iter(\n\n allowing_script_hashes().into_iter(),\n\n )),\n\n }\n\n}\n", "file_path": "language/transaction-builder/src/lib.rs", "rank": 87, "score": 276661.1575905369 }, { "content": "fn idents(names: impl IntoIterator<Item = &'static str>) -> Vec<Identifier> {\n\n names.into_iter().map(ident).collect()\n\n}\n", "file_path": "language/vm/vm-runtime/src/unit_tests/mod.rs", "rank": 88, "score": 276630.7364873827 }, { "content": "pub fn struct_name_from_handle_index(module: &VerifiedModule, idx: StructHandleIndex) -> String {\n\n let struct_handle = module.struct_handle_at(idx);\n\n let struct_handle_view = StructHandleView::new(module, struct_handle);\n\n let module_name = module.identifier_at(struct_handle_view.module_handle().name);\n\n let struct_name = struct_handle_view.name();\n\n format!(\"{}_{}\", module_name, struct_name)\n\n}\n\n\n", "file_path": "language/move-prover/bytecode-to-boogie/src/translator.rs", "rank": 89, "score": 275647.8333908548 }, { "content": "pub fn project_is_root() -> Result<bool> {\n\n let mut project = locate_project()?;\n\n project.pop();\n\n\n\n Ok(project == project_root())\n\n}\n\n\n", "file_path": "x/src/utils.rs", "rank": 90, "score": 275565.73602161277 }, { "content": "fn load_nominal_resource_flag(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<bool> {\n\n if let Ok(byte) = cursor.read_u8() {\n\n Ok(match SerializedNominalResourceFlag::from_u8(byte)? {\n\n SerializedNominalResourceFlag::NOMINAL_RESOURCE => true,\n\n SerializedNominalResourceFlag::NORMAL_STRUCT => false,\n\n })\n\n } else {\n\n Err(VMStatus::new(StatusCode::MALFORMED))\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 91, "score": 273682.9593158922 }, { "content": "pub fn function_acquires(context: &mut Context, bs: &mut BTreeSet<BaseType>) {\n\n let old = std::mem::replace(bs, BTreeSet::new());\n\n for mut bt in old {\n\n base_type(context, &mut bt);\n\n assert!(\n\n bs.insert(bt),\n\n \"ICE the acquires set was already a set. Expand should not change that\"\n\n );\n\n }\n\n}\n\n\n\n//**************************************************************************************************\n\n// Types\n\n//**************************************************************************************************\n\n\n", "file_path": "language/move-lang/src/typing/expand.rs", "rank": 92, "score": 273525.6013877666 }, { "content": "/// Create type value for a struct with given type actuals.\n\npub fn format_struct_type_value(\n\n module: &VerifiedModule,\n\n struct_handle_idx: &StructHandleIndex,\n\n args: &[SignatureToken],\n\n) -> String {\n\n let struct_name = struct_name_from_handle_index(module, *struct_handle_idx);\n\n format!(\n\n \"{}_type_value({})\",\n\n struct_name,\n\n format_type_values(module, args)\n\n )\n\n}\n\n\n", "file_path": "language/move-prover/bytecode-to-boogie/src/translator.rs", "rank": 93, "score": 273470.1557422256 }, { "content": "pub fn transaction_status_eq(t1: &TransactionStatus, t2: &TransactionStatus) -> bool {\n\n match (t1, t2) {\n\n (TransactionStatus::Discard(s1), TransactionStatus::Discard(s2))\n\n | (TransactionStatus::Keep(s1), TransactionStatus::Keep(s2)) => assert_status_eq(s1, s2),\n\n _ => false,\n\n }\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! assert_prologue_parity {\n\n ($e1:expr, $e2:expr, $e3:expr) => {\n\n assert_status_eq(&$e1.unwrap(), &$e3);\n\n assert!(transaction_status_eq($e2, &TransactionStatus::Discard($e3)));\n\n };\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! assert_prologue_disparity {\n\n ($e1:expr => $e2:expr, $e3:expr => $e4:expr) => {\n\n assert_eq!($e1, $e2);\n\n assert!(transaction_status_eq($e3, &$e4));\n\n };\n\n}\n", "file_path": "language/e2e-tests/src/lib.rs", "rank": 94, "score": 273353.5378494484 }, { "content": "pub fn default_reply_error_logger<T: std::fmt::Debug>(e: T) {\n\n error!(\"Failed to reply error due to {:?}\", e)\n\n}\n\n\n", "file_path": "common/grpc-helpers/src/lib.rs", "rank": 95, "score": 272488.3690507503 }, { "content": "fn types(context: &mut Context, sp!(_, t_): G::Type) -> Result<Vec<F::SignatureToken>> {\n\n use G::Type_ as T;\n\n match t_ {\n\n T::Unit => Ok(vec![]),\n\n T::Single(st) => Ok(vec![single_type(context, st)?]),\n\n T::Multiple(ss) => ss.into_iter().map(|st| single_type(context, st)).collect(),\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/to_bytecode/translate.rs", "rank": 96, "score": 272175.1781806974 }, { "content": "/// Return the simplest module that will pass the bounds checker\n\npub fn empty_module() -> CompiledModuleMut {\n\n CompiledModuleMut {\n\n module_handles: vec![ModuleHandle {\n\n address: AddressPoolIndex::new(0),\n\n name: IdentifierIndex::new(0),\n\n }],\n\n address_pool: vec![AccountAddress::default()],\n\n identifiers: vec![self_module_name().to_owned()],\n\n user_strings: vec![],\n\n function_defs: vec![],\n\n struct_defs: vec![],\n\n field_defs: vec![],\n\n struct_handles: vec![],\n\n function_handles: vec![],\n\n type_signatures: vec![],\n\n function_signatures: vec![],\n\n locals_signatures: vec![LocalsSignature(vec![])],\n\n byte_array_pool: vec![],\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/file_format.rs", "rank": 97, "score": 272032.83566477033 }, { "content": "/// This produces a round that is often higher than the parent, but not\n\n/// too high\n\npub fn some_round(initial_round: Round) -> impl Strategy<Value = Round> {\n\n prop_oneof![\n\n 9 => Just(1 + initial_round),\n\n 1 => bigger_round(initial_round),\n\n ]\n\n}\n\n\n\nprop_compose! {\n\n /// This creates a child with a parent on its left, and a QC on the left\n\n /// of the parent. This, depending on branching, does not require the\n\n /// QC to always be an ancestor or the parent to always be the highest QC\n\n fn child(\n\n signer_strategy: impl Strategy<Value = ValidatorSigner>,\n\n block_forest_strategy: impl Strategy<Value = LinearizedBlockForest<Vec<usize>>>,\n\n )(\n\n signer in signer_strategy,\n\n (forest_vec, parent_idx, qc_idx) in block_forest_strategy\n\n .prop_flat_map(|forest_vec| {\n\n let len = forest_vec.len();\n\n (Just(forest_vec), 0..len)\n", "file_path": "consensus/consensus-types/src/block_test_utils.rs", "rank": 98, "score": 268711.76259740547 }, { "content": "/// TODO: This is a temporary function that represents memory\n\n/// safety for a reference. This should be removed and replaced\n\n/// with appropriate memory safety premises when the borrow checking\n\n/// infrastructure is fully implemented.\n\n/// `index` is `Some(i)` if the instruction can be memory safe when operating\n\n/// on non-reference types.\n\npub fn memory_safe(state: &AbstractState, index: Option<usize>) -> bool {\n\n match index {\n\n Some(index) => {\n\n if stack_has_reference(state, index, Mutability::Either) {\n\n ALLOW_MEMORY_UNSAFE\n\n } else {\n\n true\n\n }\n\n }\n\n None => ALLOW_MEMORY_UNSAFE,\n\n }\n\n}\n\n\n\n/// Wrapper for enclosing the arguments of `stack_has` so that only the `state` needs\n\n/// to be given.\n\n#[macro_export]\n\nmacro_rules! state_stack_has {\n\n ($e1: expr, $e2: expr) => {\n\n Box::new(move |state| stack_has(state, $e1, $e2))\n\n };\n", "file_path": "language/tools/test-generation/src/transitions.rs", "rank": 99, "score": 268684.0202935172 } ]
Rust
dynomite/src/ext.rs
sbruton/dynomite
0f3e5a045a0bbd6fd5ae6b1f83c66ccbb0ac6376
use crate::dynamodb::{ AttributeValue, BackupSummary, DynamoDb, ListBackupsError, ListBackupsInput, ListTablesError, ListTablesInput, QueryError, QueryInput, ScanError, ScanInput, }; use futures::{stream, Stream, TryStreamExt}; #[cfg(feature = "default")] use rusoto_core_default::RusotoError; #[cfg(feature = "rustls")] use rusoto_core_rustls::RusotoError; use std::{collections::HashMap, pin::Pin}; type DynomiteStream<I, E> = Pin<Box<dyn Stream<Item = Result<I, RusotoError<E>>> + Send>>; type DynomiteUserStream<I, E> = Pin<Box<dyn Stream<Item = Result<I, E>> + Send>>; pub trait DynamoDbExt { fn list_backups_pages( self, input: ListBackupsInput, ) -> DynomiteStream<BackupSummary, ListBackupsError>; fn list_tables_pages( self, input: ListTablesInput, ) -> DynomiteStream<String, ListTablesError>; fn query_pages( self, input: QueryInput, ) -> DynomiteStream<HashMap<String, AttributeValue>, QueryError>; fn scan_pages<E: From<RusotoError<ScanError>> + Sized + Sync + Send>( self, input: ScanInput, ) -> DynomiteUserStream<HashMap<String, AttributeValue>, E>; } impl<D> DynamoDbExt for D where D: DynamoDb + Clone + Send + Sync + 'static, { fn list_backups_pages( self, input: ListBackupsInput, ) -> DynomiteStream<BackupSummary, ListBackupsError> { enum PageState { Next(Option<String>, ListBackupsInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_backup_arn.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_backup_arn, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => { return Ok(None) as Result<_, RusotoError<ListBackupsError>> } }; let resp = clone .list_backups(ListBackupsInput { exclusive_start_backup_arn, ..input.clone() }) .await?; let next_state = match resp .last_evaluated_backup_arn .filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter( resp.backup_summaries .unwrap_or_default() .into_iter() .map(Ok), ), next_state, ))) } }, ) .try_flatten(), ) } fn list_tables_pages( self, input: ListTablesInput, ) -> DynomiteStream<String, ListTablesError> { enum PageState { Next(Option<String>, ListTablesInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_table_name.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_table_name, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => { return Ok(None) as Result<_, RusotoError<ListTablesError>> } }; let resp = clone .list_tables(ListTablesInput { exclusive_start_table_name, ..input.clone() }) .await?; let next_state = match resp .last_evaluated_table_name .filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter(resp.table_names.unwrap_or_default().into_iter().map(Ok)), next_state, ))) } }, ) .try_flatten(), ) } fn query_pages( self, input: QueryInput, ) -> DynomiteStream<HashMap<String, AttributeValue>, QueryError> { #[allow(clippy::large_enum_variant)] enum PageState { Next(Option<HashMap<String, AttributeValue>>, QueryInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_key.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_key, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => { return Ok(None) as Result<_, RusotoError<QueryError>> } }; let resp = clone .query(QueryInput { exclusive_start_key, ..input.clone() }) .await?; let next_state = match resp.last_evaluated_key.filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter(resp.items.unwrap_or_default().into_iter().map(Ok)), next_state, ))) } }, ) .try_flatten(), ) } fn scan_pages<E>( self, input: ScanInput, ) -> DynomiteUserStream<HashMap<String, AttributeValue>, E> where E: From<RusotoError<ScanError>> + Sized + Sync + Send, { #[allow(clippy::large_enum_variant)] enum PageState { Next(Option<HashMap<String, AttributeValue>>, ScanInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_key.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_key, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => return Ok(None) as Result<_, E>, }; let resp = clone .scan(ScanInput { exclusive_start_key, ..input.clone() }) .await?; let next_state = match resp.last_evaluated_key.filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter(resp.items.unwrap_or_default().into_iter().map(Ok)), next_state, ))) } }, ) .try_flatten(), ) } }
use crate::dynamodb::{ AttributeValue, BackupSummary, DynamoDb, ListBackupsError, ListBackupsInput, ListTablesError, ListTablesInput, QueryError, QueryInput, ScanError, ScanInput, }; use futures::{stream, Stream, TryStreamExt}; #[cfg(feature = "default")] use rusoto_core_default::RusotoError; #[cfg(feature = "rustls")] use rusoto_core_rustls::RusotoError; use std::{collections::HashMap, pin::Pin}; type DynomiteStream<I, E> = Pin<Box<dyn Stream<Item = Result<I, RusotoError<E>>> + Send>>; type DynomiteUserStream<I, E> = Pin<Box<dyn Stream<Item = Result<I, E>> + Send>>; pub trait DynamoDbExt { fn list_backups_pages( self, input: ListBackupsInput, ) -> DynomiteStream<BackupSummary, ListBackupsError>; fn list_tables_pages( self, input: ListTablesInput, ) -> DynomiteStream<String, ListTablesError>; fn query_pages( self, input: QueryInput, ) -> DynomiteStream<HashMap<String, AttributeValue>, QueryError>; fn scan_pages<E: From<RusotoError<ScanError>> + Sized + Sync + Send>( self, input: ScanInput, ) -> DynomiteUserStream<HashMap<String, AttributeValue>, E>; } impl<D> DynamoDbExt for D where D: DynamoDb + Clone + Send + Sync + 'static, { fn list_backups_pages( self, input: ListBackupsInput, ) -> DynomiteStream<BackupSummary, ListBackupsError> { enum PageState { Next(Option<String>, ListBackupsInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_backup_arn.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_backup_arn, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => { return Ok(None) as Result<_, RusotoError<ListBackupsError>> } }; let resp = clone .list_backups(ListBackupsInput { exclusive_start_backup_arn, ..input.clone() }) .await?; let next_state = match resp .last_evaluated_backup_arn .filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter( resp.backup_summaries .unwrap_or_default() .into_iter() .map(Ok), ), next_state, ))) } }, ) .try_flatten(), ) } fn list_tables_pages( self, input: ListTablesInput, ) -> DynomiteStream<String, ListTablesError> { enum PageState { Next(Option<String>, ListTablesInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_table_name.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_table_name, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => { return Ok(None) as Result<_, RusotoError<ListTablesError>> } }; let resp = clone .list_tables(ListTablesInput { exclusive_start_table_name, ..input.clone() }) .await?; let next_state = match resp .last_evaluated_table_name .filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter(resp.table_names.unwrap_or_default().into_iter().map(Ok)), next_state, ))) } }, ) .try_flatten(), ) } fn query_pages( self, input: QueryInput, ) -> DynomiteStream<HashMap<String, AttributeValue>, QueryError> { #[allow(clippy::large_enum_variant)] enum PageState { Next(Option<HashMap<String, AttributeValue>>, QueryInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_key.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_key, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => { return Ok(None) as Result<_, RusotoError<QueryError>> } }; let resp = clone .query(QueryInput { exclusive_start_key, ..input.clone() }) .await?; let next_state = match resp.last_evaluated_key.filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }; Ok(Some(( stream::iter(resp.items.unwrap_or_default().into_iter().map(Ok)), next_state, ))) } }, ) .try_flatten(), ) } fn scan_pages<E>( self, input: ScanInput, ) -> DynomiteUserStream<HashMap<String, AttributeValue>, E> where E: From<RusotoError<ScanError>> + Sized + Sync + Send, { #[allow(clippy::large_enum_variant)] enum PageState { Next(Option<HashMap<String, AttributeValue>>, ScanInput), End, } Box::pin( stream::try_unfold( PageState::Next(input.exclusive_start_key.clone(), input), move |state| { let clone = self.clone(); async move { let (exclusive_start_key, input) = match state { PageState::Next(start, input) => (start, input), PageState::End => return Ok(None) as Result<_, E>, }; let resp = clone .scan(ScanInput { exclusive_start_key, ..input.clone() }) .await?; let next_state =
; Ok(Some(( stream::iter(resp.items.unwrap_or_default().into_iter().map(Ok)), next_state, ))) } }, ) .try_flatten(), ) } }
match resp.last_evaluated_key.filter(|next| !next.is_empty()) { Some(next) => PageState::Next(Some(next), input), _ => PageState::End, }
if_condition
[ { "content": "#[proc_macro_error::proc_macro_error]\n\n#[proc_macro_derive(Attributes, attributes(dynomite))]\n\npub fn derive_attributes(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse_macro_input!(input);\n\n\n\n let gen = match expand_attributes(ast) {\n\n Ok(g) => g,\n\n Err(e) => return e.to_compile_error().into(),\n\n };\n\n\n\n gen.into_token_stream().into()\n\n}\n\n\n\n/// Derives `dynomite::Attribute` for enum types\n\n///\n\n/// # Panics\n\n///\n\n/// This proc macro will panic when applied to other types\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 2, "score": 139263.89962742254 }, { "content": "#[proc_macro_error::proc_macro_error]\n\n#[proc_macro_derive(Item, attributes(partition_key, sort_key, dynomite))]\n\npub fn derive_item(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse_macro_input!(input);\n\n\n\n let gen = match expand_item(ast) {\n\n Ok(g) => g,\n\n Err(e) => return e.to_compile_error().into(),\n\n };\n\n\n\n gen.into_token_stream().into()\n\n}\n\n\n\n/// similar in spirit to `#[derive(Item)]` except these are exempt from declaring\n\n/// partition and sort keys\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 3, "score": 139263.89962742254 }, { "content": "#[proc_macro_error::proc_macro_error]\n\n#[proc_macro_derive(Attribute)]\n\npub fn derive_attribute(input: TokenStream) -> TokenStream {\n\n let ast = syn::parse_macro_input!(input);\n\n let gen = expand_attribute(ast);\n\n gen.into_token_stream().into()\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 4, "score": 139263.89962742254 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Error> {\n\n let client = DynamoDbClient::new(Default::default()).with_retries(Policy::default());\n\n\n\n lambda::run(handler(move |_, _| {\n\n let client = client.clone();\n\n async move {\n\n let tables = client\n\n .list_tables(Default::default())\n\n .await?\n\n .table_names\n\n .unwrap_or_default();\n\n Ok::<_, Error>(tables.join(\"\\n\"))\n\n }\n\n }))\n\n .await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "dynomite/examples/lambda.rs", "rank": 5, "score": 134512.97350677426 }, { "content": "/// A type capable of being converted into an or from and AWS `AttributeValue`\n\n///\n\n/// Default implementations of this are provided for each type of `AttributeValue` field\n\n/// which map to naturally fitting native Rustlang types.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use dynomite::{dynamodb::AttributeValue, Attribute};\n\n///\n\n/// assert_eq!(\n\n/// \"test\".to_string().into_attr().s,\n\n/// AttributeValue {\n\n/// s: Some(\"test\".to_string()),\n\n/// ..AttributeValue::default()\n\n/// }\n\n/// .s\n\n/// );\n\n/// ```\n\npub trait Attribute: Sized {\n\n /// Returns a conversion into an `AttributeValue`\n\n fn into_attr(self: Self) -> AttributeValue;\n\n /// Returns a fallible conversion from an `AttributeValue`\n\n fn from_attr(value: AttributeValue) -> Result<Self, AttributeError>;\n\n}\n\n\n", "file_path": "dynomite/src/lib.rs", "rank": 6, "score": 117938.00245147853 }, { "content": "/// A type capable of being produced from\n\n/// a set of string keys and `AttributeValues`\n\npub trait FromAttributes: Sized {\n\n /// Returns an instance of of a type resolved at runtime from a collection\n\n /// of a `String` keys and `AttributeValues`. If\n\n /// a instance can not be resolved and `AttributeError` will be returned.\n\n fn from_attrs(attrs: Attributes) -> Result<Self, AttributeError>;\n\n}\n\n\n\n/// Coerces a homogenious HashMap of attribute values into a homogeneous Map of types\n\n/// that implement Attribute\n\n#[allow(clippy::implicit_hasher)]\n\nimpl<A: Attribute> FromAttributes for HashMap<String, A> {\n\n fn from_attrs(attrs: Attributes) -> Result<Self, AttributeError> {\n\n attrs\n\n .into_iter()\n\n .try_fold(HashMap::new(), |mut result, (k, v)| {\n\n result.insert(k, A::from_attr(v)?);\n\n Ok(result)\n\n })\n\n }\n\n}\n", "file_path": "dynomite/src/lib.rs", "rank": 7, "score": 117930.11253734812 }, { "content": "/// An interface for adapting a `DynamoDb` impl\n\n/// to a `RetryingDynamoDb` impl\n\npub trait Retries<D>\n\nwhere\n\n D: DynamoDb + 'static,\n\n{\n\n /// Consumes a `DynamoDb` impl and produces\n\n /// a `DynamoDb` which retries its operations when appropriate\n\n fn with_retries(\n\n self,\n\n policy: Policy,\n\n ) -> RetryingDynamoDb<D>;\n\n}\n\n\n\nimpl<D> Retries<D> for D\n\nwhere\n\n D: DynamoDb + 'static,\n\n{\n\n fn with_retries(\n\n self,\n\n policy: Policy,\n\n ) -> RetryingDynamoDb<D> {\n", "file_path": "dynomite/src/retry.rs", "rank": 9, "score": 89702.77333948752 }, { "content": "/// A type which can be converted to and from a set of String keys and\n\n/// `AttributeValues`.\n\n///\n\n/// # Examples\n\n///\n\n/// Below is an example of doing this manually for demonstration.\n\n///\n\n/// ```\n\n/// use dynomite::{\n\n/// dynamodb::AttributeValue, Attribute, AttributeError, Attributes, FromAttributes, Item,\n\n/// };\n\n/// use std::collections::HashMap;\n\n///\n\n/// #[derive(PartialEq, Debug, Clone)]\n\n/// struct Person {\n\n/// id: String,\n\n/// }\n\n///\n\n/// impl Item for Person {\n\n/// fn key(&self) -> Attributes {\n\n/// let mut attrs = HashMap::new();\n\n/// attrs.insert(\"id\".into(), \"123\".to_string().into_attr());\n\n/// attrs\n\n/// }\n\n/// }\n\n///\n\n/// impl FromAttributes for Person {\n\n/// fn from_attrs(attrs: Attributes) -> Result<Self, AttributeError> {\n\n/// Ok(Self {\n\n/// id: attrs\n\n/// .get(\"id\")\n\n/// .and_then(|val| val.s.clone())\n\n/// .ok_or(AttributeError::MissingField { name: \"id\".into() })?,\n\n/// })\n\n/// }\n\n/// }\n\n///\n\n/// impl Into<Attributes> for Person {\n\n/// fn into(self: Self) -> Attributes {\n\n/// let mut attrs = HashMap::new();\n\n/// attrs.insert(\"id\".into(), \"123\".to_string().into_attr());\n\n/// attrs\n\n/// }\n\n/// }\n\n/// let person = Person { id: \"123\".into() };\n\n/// let attrs: Attributes = person.clone().into();\n\n/// assert_eq!(Ok(person), FromAttributes::from_attrs(attrs))\n\n/// ```\n\n///\n\n/// You can get this all for free automatically using `#[derive(Item)]` on your structs. This is the recommended approach.\n\n///\n\n/// ```\n\n/// use dynomite::Item;\n\n/// #[derive(Item)]\n\n/// struct Book {\n\n/// #[dynomite(partition_key)]\n\n/// id: String,\n\n/// }\n\n/// ```\n\n///\n\n/// ## Renaming fields\n\n///\n\n/// In some cases you may be dealing with a DynamoDB table whose\n\n/// fields are named using conventions that do not align with Rust's conventions.\n\n/// You can leverage the `rename` attribute to map Rust's fields back to its source name\n\n/// explicitly\n\n///\n\n/// ```\n\n/// use dynomite::Item;\n\n///\n\n/// #[derive(Item)]\n\n/// struct Book {\n\n/// #[dynomite(partition_key)]\n\n/// id: String,\n\n/// #[dynomite(rename = \"notConventional\")]\n\n/// not_conventional: String,\n\n/// }\n\n/// ```\n\n///\n\n/// ## Accommodating sparse data\n\n///\n\n/// In some cases you may be dealing with a DynamoDB table whose\n\n/// fields are absent for some records. This is different than fields whose records\n\n/// have `NULL` attribute type values. In these cases you can use the `default` field\n\n/// attribute to communicate that the `std::default::Default::default()` value for the fields\n\n/// type will be used in the absence of data.\n\n///\n\n/// ```\n\n/// use dynomite::Item;\n\n///\n\n/// #[derive(Item)]\n\n/// struct Book {\n\n/// #[dynomite(partition_key)]\n\n/// id: String,\n\n/// #[dynomite(default)]\n\n/// summary: Option<String>,\n\n/// }\n\n/// ```\n\n///\n\n/// ## Item attribute projections\n\n///\n\n/// DynamoDB `Item`s are a set of attributes with a uniquely identifying\n\n/// partition key. At times, you may wish to project over these attributes into a type\n\n/// that does not include a partition_key. For that specific purpose, instead of\n\n/// deriving an `Item` type you'll want to derive `Attributes`\n\n///\n\n/// ```\n\n/// use dynomite::Attributes;\n\n///\n\n/// #[derive(Attributes)]\n\n/// struct BookProjection {\n\n/// author: String,\n\n/// #[dynomite(default)]\n\n/// summary: Option<String>\n\n/// }\n\npub trait Item: Into<Attributes> + FromAttributes {\n\n /// Returns the set of attributes which make up this item's primary key\n\n ///\n\n /// This is often used in item look ups\n\n fn key(&self) -> Attributes;\n\n}\n\n\n", "file_path": "dynomite/src/lib.rs", "rank": 10, "score": 81909.57209195156 }, { "content": "/// ```rust,ignore\n\n/// impl ::dynomite::Item for Name {\n\n/// fn key(&self) -> ::std::collections::HashMap<String, ::dynomite::dynamodb::AttributeValue> {\n\n/// let mut keys = ::std::collections::HashMap::new();\n\n/// keys.insert(\"field_deser_name\", to_attribute_value(field));\n\n/// keys\n\n/// }\n\n/// }\n\n/// ```\n\nfn get_item_trait(\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n let item = quote!(::dynomite::Item);\n\n let attribute_map = quote!(\n\n ::std::collections::HashMap<String, ::dynomite::dynamodb::AttributeValue>\n\n );\n\n let partition_key_field = fields.iter().find(|f| f.is_partition_key());\n\n let sort_key_field = fields.iter().find(|f| f.is_sort_key());\n\n let partition_key_insert = partition_key_field.map(get_key_inserter).transpose()?;\n\n let sort_key_insert = sort_key_field.map(get_key_inserter).transpose()?;\n\n\n\n Ok(partition_key_field\n\n .map(|_| {\n\n quote! {\n\n impl #item for #name {\n\n fn key(&self) -> #attribute_map {\n\n let mut keys = ::std::collections::HashMap::new();\n\n #partition_key_insert\n\n #sort_key_insert\n\n keys\n\n }\n\n }\n\n }\n\n })\n\n .unwrap_or_else(proc_macro2::TokenStream::new))\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 11, "score": 70623.58427638594 }, { "content": "/// ```rust,ignore\n\n/// impl ::dynomite::FromAttributes for Name {\n\n/// fn from_attrs(mut item: ::dynomite::Attributes) -> Result<Self, ::dynomite::Error> {\n\n/// Ok(Self {\n\n/// field_name: ::dynomite::Attribute::from_attr(\n\n/// item.remove(\"field_deser_name\").ok_or(Error::MissingField { name: \"field_deser_name\".into() })?\n\n/// )\n\n/// })\n\n/// }\n\n/// }\n\n/// ```\n\nfn get_from_attributes_trait(\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n let from_attrs = quote!(::dynomite::FromAttributes);\n\n let from_attribute_map = get_from_attributes_function(fields)?;\n\n\n\n Ok(quote! {\n\n impl #from_attrs for #name {\n\n #from_attribute_map\n\n }\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 12, "score": 70619.58849024109 }, { "content": "// impl From<Name> for ::dynomite::Attributes {\n\n// fn from(n: Name) -> Self {\n\n// ...\n\n// }\n\n// }\n\n//\n\nfn get_to_attribute_map_trait(\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n let attributes = quote!(::dynomite::Attributes);\n\n let from = quote!(::std::convert::From);\n\n let to_attribute_map = get_to_attribute_map_function(name, fields)?;\n\n\n\n Ok(quote! {\n\n impl #from<#name> for #attributes {\n\n #to_attribute_map\n\n }\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 13, "score": 68679.07840111415 }, { "content": "fn get_dynomite_item_traits(\n\n vis: &Visibility,\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n let impls = get_item_impls(vis, name, fields)?;\n\n\n\n Ok(quote! {\n\n #impls\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 14, "score": 68672.08739469794 }, { "content": "fn expand_attribute(ast: DeriveInput) -> impl ToTokens {\n\n let name = &ast.ident;\n\n match ast.data {\n\n Enum(variants) => {\n\n make_dynomite_attr(name, &variants.variants.into_iter().collect::<Vec<_>>())\n\n }\n\n _ => panic!(\"Dynomite Attributes can only be generated for enum types\"),\n\n }\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 15, "score": 57888.997940583205 }, { "content": "fn expand_attributes(ast: DeriveInput) -> syn::Result<impl ToTokens> {\n\n use syn::spanned::Spanned as _;\n\n let name = &ast.ident;\n\n match ast.data {\n\n Struct(DataStruct { fields, .. }) => match fields {\n\n Fields::Named(named) => {\n\n make_dynomite_attributes(name, &named.named.into_iter().collect::<Vec<_>>())\n\n }\n\n fields => Err(syn::Error::new(\n\n fields.span(),\n\n \"Dynomite Attributes require named fields\",\n\n )),\n\n },\n\n _ => panic!(\"Dynomite Attributes can only be generated for structs\"),\n\n }\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 16, "score": 53385.00481681969 }, { "content": "fn expand_item(ast: DeriveInput) -> syn::Result<impl ToTokens> {\n\n use syn::spanned::Spanned as _;\n\n let name = &ast.ident;\n\n let vis = &ast.vis;\n\n match ast.data {\n\n Struct(DataStruct { fields, .. }) => match fields {\n\n Fields::Named(named) => {\n\n make_dynomite_item(vis, name, &named.named.into_iter().collect::<Vec<_>>())\n\n }\n\n fields => Err(syn::Error::new(\n\n fields.span(),\n\n \"Dynomite Items require named fields\",\n\n )),\n\n },\n\n _ => panic!(\"Dynomite Items can only be generated for structs\"),\n\n }\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 17, "score": 53385.00481681969 }, { "content": "/// Predicate trait that determines if an impl\n\n/// type is retryable\n\ntrait Retry {\n\n /// Return true if type is retryable\n\n fn retryable(&self) -> bool;\n\n}\n\n\n", "file_path": "dynomite/src/retry.rs", "rank": 18, "score": 51662.62526481011 }, { "content": "/// ```rust,ignore\n\n/// impl ::dynomite::Attribute for Name {\n\n/// fn into_attr(self) -> ::dynomite::dynamodb::AttributeValue {\n\n/// let arm = match self {\n\n/// Name::Variant => \"Variant\".to_string()\n\n/// };\n\n/// ::dynomite::dynamodb::AttributeValue {\n\n/// s: Some(arm),\n\n/// ..Default::default()\n\n/// }\n\n/// }\n\n/// fn from_attr(value: ::dynomite::dynamodb::AttributeValue) -> Result<Self, ::dynomite::AttributeError> {\n\n/// value.s.ok_or(::dynomite::AttributeError::InvalidType)\n\n/// .and_then(|value| match &value[..] {\n\n/// \"Variant\" => Ok(Name::Variant),\n\n/// _ => Err(::dynomite::AttributeError::InvalidFormat)\n\n/// })\n\n/// }\n\n/// }\n\n/// ```\n\nfn make_dynomite_attr(\n\n name: &Ident,\n\n variants: &[Variant],\n\n) -> impl ToTokens {\n\n let attr = quote!(::dynomite::Attribute);\n\n let err = quote!(::dynomite::AttributeError);\n\n let into_match_arms = variants.iter().map(|var| {\n\n let vname = &var.ident;\n\n quote! {\n\n #name::#vname => stringify!(#vname).to_string(),\n\n }\n\n });\n\n let from_match_arms = variants.iter().map(|var| {\n\n let vname = &var.ident;\n\n quote! {\n\n stringify!(#vname) => ::std::result::Result::Ok(#name::#vname),\n\n }\n\n });\n\n\n\n quote! {\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 19, "score": 43674.13422988412 }, { "content": "/// ```rust,ignore\n\n/// #[derive(Item, Debug, Clone, PartialEq)]\n\n/// pub struct NameKey {\n\n/// partition_key_field,\n\n/// range_key\n\n/// }\n\n/// ```\n\nfn get_key_struct(\n\n vis: &Visibility,\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n let name = Ident::new(&format!(\"{}Key\", name), Span::call_site());\n\n\n\n let partition_key_field = fields\n\n .iter()\n\n .find(|field| field.is_partition_key())\n\n .cloned()\n\n .map(|field| {\n\n // clone because this is a new struct\n\n // note: this in inherits field attrs so that\n\n // we retain dynomite(rename = \"xxx\")\n\n let field = field.field.clone();\n\n quote! {\n\n #field\n\n }\n\n });\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 20, "score": 43661.64271110669 }, { "content": "fn main() {}", "file_path": "dynomite/trybuild-tests/item-has-no-partition-key.rs", "rank": 21, "score": 43655.80872431716 }, { "content": "fn make_dynomite_item(\n\n vis: &Visibility,\n\n name: &Ident,\n\n fields: &[Field],\n\n) -> syn::Result<impl ToTokens> {\n\n let item_fields = fields.iter().map(ItemField::new).collect::<Vec<_>>();\n\n // all items must have 1 primary_key\n\n let partition_key_count = item_fields.iter().filter(|f| f.is_partition_key()).count();\n\n if partition_key_count != 1 {\n\n return Err(syn::Error::new(\n\n name.span(),\n\n format!(\n\n \"All Item's must declare one and only one partition_key. The `{}` Item declared {}\",\n\n name, partition_key_count\n\n ),\n\n ));\n\n }\n\n // impl Item for Name + NameKey struct\n\n let dynamodb_traits = get_dynomite_item_traits(vis, name, &item_fields)?;\n\n // impl ::dynomite::FromAttributes for Name\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 22, "score": 43655.80872431716 }, { "content": "fn main() {\n\n\n\n fail();\n\n\n\n}\n\n\n", "file_path": "dynomite/trybuild-tests/item-not-on-struct-fail.rs", "rank": 23, "score": 43655.80872431716 }, { "content": "fn get_item_impls(\n\n vis: &Visibility,\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n // impl ::dynomite::Item for Name ...\n\n let item_trait = get_item_trait(name, fields)?;\n\n // pub struct NameKey ...\n\n let key_struct = get_key_struct(vis, name, fields)?;\n\n\n\n Ok(quote! {\n\n #item_trait\n\n /// #name Key\n\n #key_struct\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 24, "score": 43655.80872431716 }, { "content": "fn main() {}", "file_path": "dynomite/trybuild-tests/dynomite-unknown-attr.rs", "rank": 25, "score": 43655.80872431716 }, { "content": "fn make_dynomite_attributes(\n\n name: &Ident,\n\n fields: &[Field],\n\n) -> syn::Result<impl ToTokens> {\n\n let item_fields = fields.iter().map(ItemField::new).collect::<Vec<_>>();\n\n // impl ::dynomite::FromAttributes for Name\n\n let from_attribute_map = get_from_attributes_trait(name, &item_fields)?;\n\n // impl From<Name> for ::dynomite::Attributes\n\n let to_attribute_map = get_to_attribute_map_trait(name, &item_fields)?;\n\n // impl Attribute for Name (these are essentially just a map)\n\n let attribute = quote!(::dynomite::Attribute);\n\n let impl_attribute = quote! {\n\n impl #attribute for #name {\n\n fn into_attr(self: Self) -> ::dynomite::AttributeValue {\n\n ::dynomite::AttributeValue {\n\n m: Some(self.into()),\n\n ..::dynomite::AttributeValue::default()\n\n }\n\n }\n\n fn from_attr(value: ::dynomite::AttributeValue) -> Result<Self, ::dynomite::AttributeError> {\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 26, "score": 43655.80872431716 }, { "content": "#[derive(Item)]\n\nfn fail() {\n\n println!(\"This should fail\");\n\n}\n", "file_path": "dynomite/trybuild-tests/item-not-on-struct-fail.rs", "rank": 27, "score": 43655.80872431716 }, { "content": "// generates the `from(...)` method for attribute map From conversion\n\n//\n\n// fn from(item: Foo) -> Self {\n\n// let mut values = Self::new();\n\n// values.insert(\n\n// \"foo\".to_string(),\n\n// ::dynomite::Attribute::into_attr(item.field)\n\n// );\n\n// ...\n\n// values\n\n// }\n\nfn get_to_attribute_map_function(\n\n name: &Ident,\n\n fields: &[ItemField],\n\n) -> syn::Result<impl ToTokens> {\n\n let to_attribute_value = quote!(::dynomite::Attribute::into_attr);\n\n\n\n let field_conversions = fields\n\n .iter()\n\n .map(|field| {\n\n let field_deser_name = field.deser_name();\n\n\n\n let field_ident = &field.field.ident;\n\n Ok(quote! {\n\n values.insert(\n\n #field_deser_name.to_string(),\n\n #to_attribute_value(item.#field_ident)\n\n );\n\n })\n\n })\n\n .collect::<syn::Result<Vec<_>>>()?;\n\n\n\n Ok(quote! {\n\n fn from(item: #name) -> Self {\n\n let mut values = Self::new();\n\n #(#field_conversions)*\n\n values\n\n }\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 28, "score": 42702.06264739879 }, { "content": "fn main() {}", "file_path": "dynomite/trybuild-tests/item-has-multiple-partition-keys.rs", "rank": 29, "score": 42693.01860815056 }, { "content": "#[test]\n\nfn try_build_tests() {\n\n let t = trybuild::TestCases::new();\n\n t.compile_fail(\"trybuild-tests/*.rs\");\n\n}\n", "file_path": "dynomite/tests/try_build_test.rs", "rank": 30, "score": 42693.01860815056 }, { "content": "fn main() {}", "file_path": "dynomite/trybuild-tests/attributes-derived-unamed-fields-struct.rs", "rank": 31, "score": 41797.71503191961 }, { "content": "fn main() {}", "file_path": "dynomite/trybuild-tests/item-derived-unamed-field-struct.rs", "rank": 32, "score": 41797.71503191961 }, { "content": "fn parse_attrs(all_attrs: &[Attribute]) -> Vec<Attr> {\n\n all_attrs\n\n .iter()\n\n .filter(|attr| attr.path.is_ident(\"dynomite\"))\n\n .flat_map(|attr| {\n\n attr.parse_args_with(Punctuated::<Attr, Token![,]>::parse_terminated)\n\n .unwrap_or_abort()\n\n })\n\n .collect()\n\n}\n\n\n\n/// Derives `dynomite::Item` type for struts with named fields\n\n///\n\n/// # Attributes\n\n///\n\n/// * `#[dynomite(partition_key)]` - required attribute, expected to be applied the target [partition attribute](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.CoreComponents.html#HowItWorks.CoreComponents.PrimaryKey) field with an derivable DynamoDB attribute value of String, Number or Binary\n\n/// * `#[dynomite(sort_key)]` - optional attribute, may be applied to one target [sort attribute](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.CoreComponents.html#HowItWorks.CoreComponents.SecondaryIndexes) field with an derivable DynamoDB attribute value of String, Number or Binary\n\n/// * `#[dynomite(rename = \"actualName\")]` - optional attribute, may be applied any item attribute field, useful when the DynamoDB table you're interfacing with has attributes whose names don't following Rust's naming conventions\n\n///\n\n/// # Panics\n\n///\n\n/// This proc macro will panic when applied to other types\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 33, "score": 35154.84971320423 }, { "content": "/// ```rust,ignore\n\n/// keys.insert(\n\n/// \"field_deser_name\", to_attribute_value(field)\n\n/// );\n\n/// ```\n\nfn get_key_inserter(field: &ItemField) -> syn::Result<impl ToTokens> {\n\n let to_attribute_value = quote!(::dynomite::Attribute::into_attr);\n\n\n\n let field_deser_name = field.deser_name();\n\n let field_ident = &field.field.ident;\n\n Ok(quote! {\n\n keys.insert(\n\n #field_deser_name.to_string(),\n\n #to_attribute_value(self.#field_ident.clone())\n\n );\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 34, "score": 30671.69926083122 }, { "content": "fn get_from_attributes_function(fields: &[ItemField]) -> syn::Result<impl ToTokens> {\n\n let attributes = quote!(::dynomite::Attributes);\n\n let from_attribute_value = quote!(::dynomite::Attribute::from_attr);\n\n let err = quote!(::dynomite::AttributeError);\n\n\n\n let field_conversions = fields.iter().map(|field| {\n\n // field has #[dynomite(renameField = \"...\")] attribute\n\n let field_deser_name = field.deser_name();\n\n\n\n let field_ident = &field.field.ident;\n\n if field.is_default_when_absent() {\n\n Ok(quote! {\n\n #field_ident: match attrs.remove(#field_deser_name) {\n\n Some(field) => #from_attribute_value(field)?,\n\n _ => ::std::default::Default::default()\n\n }\n\n })\n\n } else {\n\n Ok(quote! {\n\n #field_ident: #from_attribute_value(\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 35, "score": 30671.69926083122 }, { "content": "}\n\n\n\n#[async_trait::async_trait]\n\nimpl<D> DynamoDb for RetryingDynamoDb<D>\n\nwhere\n\n D: DynamoDb + Sync + Send + Clone + 'static,\n\n{\n\n async fn batch_get_item(\n\n &self,\n\n input: BatchGetItemInput,\n\n ) -> Result<BatchGetItemOutput, RusotoError<BatchGetItemError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.batch_get_item(input).await }\n\n },\n\n Counter(0),\n", "file_path": "dynomite/src/retry.rs", "rank": 43, "score": 23.335034322302636 }, { "content": " let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.delete_backup(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn delete_item(\n\n &self,\n\n input: DeleteItemInput,\n\n ) -> Result<DeleteItemOutput, RusotoError<DeleteItemError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.delete_item(input).await }\n", "file_path": "dynomite/src/retry.rs", "rank": 45, "score": 17.745097318331172 }, { "content": "use dynomite::{\n\n attr_map,\n\n dynamodb::{\n\n AttributeDefinition, CreateTableInput, DynamoDb, DynamoDbClient, GetItemInput,\n\n KeySchemaElement, ProvisionedThroughput, PutItemInput, ScanInput,\n\n },\n\n retry::Policy,\n\n Attributes, DynamoDbExt, FromAttributes, Item, Retries,\n\n};\n\nuse futures::{future, TryStreamExt};\n\n#[cfg(feature = \"default\")]\n\nuse rusoto_core_default::Region;\n\n#[cfg(feature = \"rustls\")]\n\nuse rusoto_core_rustls::Region;\n\nuse std::error::Error;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Attributes, Debug, Clone)]\n\npub struct Author {\n\n id: Uuid,\n", "file_path": "dynomite/examples/demo.rs", "rank": 46, "score": 17.57469705172328 }, { "content": " let input = input.clone();\n\n async move { client.query(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn restore_table_from_backup(\n\n &self,\n\n input: RestoreTableFromBackupInput,\n\n ) -> Result<RestoreTableFromBackupOutput, RusotoError<RestoreTableFromBackupError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.restore_table_from_backup(input).await }\n\n },\n", "file_path": "dynomite/src/retry.rs", "rank": 47, "score": 17.464646775847328 }, { "content": " }\n\n\n\n async fn scan(\n\n &self,\n\n input: ScanInput,\n\n ) -> Result<ScanOutput, RusotoError<ScanError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.scan(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn tag_resource(\n", "file_path": "dynomite/src/retry.rs", "rank": 48, "score": 17.392254287227686 }, { "content": " .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.put_item(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn query(\n\n &self,\n\n input: QueryInput,\n\n ) -> Result<QueryOutput, RusotoError<QueryError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n", "file_path": "dynomite/src/retry.rs", "rank": 49, "score": 17.33915035253233 }, { "content": " .await\n\n }\n\n\n\n async fn describe_backup(\n\n &self,\n\n input: DescribeBackupInput,\n\n ) -> Result<DescribeBackupOutput, RusotoError<DescribeBackupError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.describe_backup(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n", "file_path": "dynomite/src/retry.rs", "rank": 50, "score": 17.076876806370496 }, { "content": " .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.create_table(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn delete_backup(\n\n &self,\n\n input: DeleteBackupInput,\n\n ) -> Result<DeleteBackupOutput, RusotoError<DeleteBackupError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n", "file_path": "dynomite/src/retry.rs", "rank": 52, "score": 16.880482553352422 }, { "content": " }\n\n\n\n async fn get_item(\n\n &self,\n\n input: GetItemInput,\n\n ) -> Result<GetItemOutput, RusotoError<GetItemError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.get_item(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn list_backups(\n", "file_path": "dynomite/src/retry.rs", "rank": 53, "score": 16.8762425967146 }, { "content": "\n\n async fn create_backup(\n\n &self,\n\n input: CreateBackupInput,\n\n ) -> Result<CreateBackupOutput, RusotoError<CreateBackupError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.create_backup(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn create_global_table(\n\n &self,\n", "file_path": "dynomite/src/retry.rs", "rank": 54, "score": 16.77687902429279 }, { "content": " },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn delete_table(\n\n &self,\n\n input: DeleteTableInput,\n\n ) -> Result<DeleteTableOutput, RusotoError<DeleteTableError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.delete_table(input).await }\n\n },\n\n Counter(0),\n\n )\n", "file_path": "dynomite/src/retry.rs", "rank": 55, "score": 16.62962401415472 }, { "content": " )\n\n .await\n\n }\n\n\n\n async fn batch_write_item(\n\n &self,\n\n input: BatchWriteItemInput,\n\n ) -> Result<BatchWriteItemOutput, RusotoError<BatchWriteItemError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.batch_write_item(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n", "file_path": "dynomite/src/retry.rs", "rank": 56, "score": 16.57988425390848 }, { "content": " async fn list_tables(\n\n &self,\n\n input: ListTablesInput,\n\n ) -> Result<ListTablesOutput, RusotoError<ListTablesError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.list_tables(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn list_tags_of_resource(\n\n &self,\n\n input: ListTagsOfResourceInput,\n", "file_path": "dynomite/src/retry.rs", "rank": 57, "score": 16.554929813190604 }, { "content": " move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.update_continuous_backups(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn update_contributor_insights(\n\n &self,\n\n input: UpdateContributorInsightsInput,\n\n ) -> Result<UpdateContributorInsightsOutput, RusotoError<UpdateContributorInsightsError>> {\n\n // todo: retry\n\n self.inner\n\n .clone()\n\n .client\n\n .update_contributor_insights(input)\n\n .await\n", "file_path": "dynomite/src/retry.rs", "rank": 58, "score": 16.46416772977923 }, { "content": " self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.update_item(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn update_table(\n\n &self,\n\n input: UpdateTableInput,\n\n ) -> Result<UpdateTableOutput, RusotoError<UpdateTableError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n", "file_path": "dynomite/src/retry.rs", "rank": 59, "score": 16.421488228860653 }, { "content": "\n\n async fn update_time_to_live(\n\n &self,\n\n input: UpdateTimeToLiveInput,\n\n ) -> Result<UpdateTimeToLiveOutput, RusotoError<UpdateTimeToLiveError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.update_time_to_live(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn describe_endpoints(\n\n &self\n", "file_path": "dynomite/src/retry.rs", "rank": 60, "score": 16.391416279020994 }, { "content": " &self,\n\n input: TagResourceInput,\n\n ) -> Result<(), RusotoError<TagResourceError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.tag_resource(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn untag_resource(\n\n &self,\n\n input: UntagResourceInput,\n\n ) -> Result<(), RusotoError<UntagResourceError>> {\n", "file_path": "dynomite/src/retry.rs", "rank": 62, "score": 16.272812350419407 }, { "content": " }\n\n\n\n async fn transact_write_items(\n\n &self,\n\n input: TransactWriteItemsInput,\n\n ) -> Result<TransactWriteItemsOutput, RusotoError<TransactWriteItemsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.transact_write_items(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n}\n\n\n", "file_path": "dynomite/src/retry.rs", "rank": 63, "score": 16.218778245895766 }, { "content": " }\n\n\n\n async fn update_global_table(\n\n &self,\n\n input: UpdateGlobalTableInput,\n\n ) -> Result<UpdateGlobalTableOutput, RusotoError<UpdateGlobalTableError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.update_global_table(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn update_global_table_settings(\n", "file_path": "dynomite/src/retry.rs", "rank": 64, "score": 16.205567927258215 }, { "content": " self.inner.client.list_contributor_insights(input).await\n\n }\n\n\n\n async fn list_global_tables(\n\n &self,\n\n input: ListGlobalTablesInput,\n\n ) -> Result<ListGlobalTablesOutput, RusotoError<ListGlobalTablesError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.list_global_tables(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n", "file_path": "dynomite/src/retry.rs", "rank": 65, "score": 16.1722192215347 }, { "content": " async fn describe_continuous_backups(\n\n &self,\n\n input: DescribeContinuousBackupsInput,\n\n ) -> Result<DescribeContinuousBackupsOutput, RusotoError<DescribeContinuousBackupsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.describe_continuous_backups(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn describe_contributor_insights(\n\n &self,\n\n input: DescribeContributorInsightsInput,\n", "file_path": "dynomite/src/retry.rs", "rank": 66, "score": 16.11672658927436 }, { "content": " .client\n\n .describe_table_replica_auto_scaling(input)\n\n .await\n\n }\n\n\n\n async fn describe_time_to_live(\n\n &self,\n\n input: DescribeTimeToLiveInput,\n\n ) -> Result<DescribeTimeToLiveOutput, RusotoError<DescribeTimeToLiveError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.describe_time_to_live(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n", "file_path": "dynomite/src/retry.rs", "rank": 67, "score": 16.084441096384065 }, { "content": " Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn restore_table_to_point_in_time(\n\n &self,\n\n input: RestoreTableToPointInTimeInput,\n\n ) -> Result<RestoreTableToPointInTimeOutput, RusotoError<RestoreTableToPointInTimeError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.restore_table_to_point_in_time(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n", "file_path": "dynomite/src/retry.rs", "rank": 68, "score": 16.022120828666804 }, { "content": " self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.untag_resource(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn update_continuous_backups(\n\n &self,\n\n input: UpdateContinuousBackupsInput,\n\n ) -> Result<UpdateContinuousBackupsOutput, RusotoError<UpdateContinuousBackupsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n", "file_path": "dynomite/src/retry.rs", "rank": 69, "score": 16.021276761837104 }, { "content": "//!\n\n//! To disable any of these features\n\n//!\n\n//! ```toml\n\n//! [dependencies.dynomite]\n\n//! version = \"xxx\"\n\n//! default-features = false\n\n//! features = [\"feature-you-want\"]\n\n//! ```\n\n\n\n#![deny(missing_docs)]\n\n// reexported\n\n// note: this is used inside the attr_map! macro\n\n#[cfg(feature = \"default\")]\n\npub use rusoto_dynamodb_default as dynamodb;\n\n\n\n#[cfg(feature = \"rustls\")]\n\npub use rusoto_dynamodb_rustls as dynamodb;\n\n\n\nuse bytes::Bytes;\n", "file_path": "dynomite/src/lib.rs", "rank": 70, "score": 15.832020405955134 }, { "content": " }\n\n\n\n async fn describe_global_table_settings(\n\n &self,\n\n input: DescribeGlobalTableSettingsInput,\n\n ) -> Result<DescribeGlobalTableSettingsOutput, RusotoError<DescribeGlobalTableSettingsError>>\n\n {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.describe_global_table_settings(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n", "file_path": "dynomite/src/retry.rs", "rank": 71, "score": 15.73434380511356 }, { "content": " move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.update_table(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn update_table_replica_auto_scaling(\n\n &self,\n\n input: UpdateTableReplicaAutoScalingInput,\n\n ) -> Result<UpdateTableReplicaAutoScalingOutput, RusotoError<UpdateTableReplicaAutoScalingError>>\n\n {\n\n self.inner\n\n .client\n\n .update_table_replica_auto_scaling(input)\n\n .await\n\n }\n", "file_path": "dynomite/src/retry.rs", "rank": 72, "score": 15.66034488300149 }, { "content": " .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.describe_table(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn describe_table_replica_auto_scaling(\n\n &self,\n\n input: DescribeTableReplicaAutoScalingInput,\n\n ) -> Result<\n\n DescribeTableReplicaAutoScalingOutput,\n\n RusotoError<DescribeTableReplicaAutoScalingError>,\n\n > {\n\n self.inner\n", "file_path": "dynomite/src/retry.rs", "rank": 73, "score": 15.548987945330087 }, { "content": "/// Assumes a you are running the following `dynamodb-local`\n\n/// on your host machine\n\n///\n\n/// ```bash\n\n/// $ docker run -p 8000:8000 amazon/dynamodb-local\n\n/// ```\n\nuse dynomite::{\n\n attr_map,\n\n dynamodb::{\n\n AttributeDefinition, CreateTableInput, DynamoDb, DynamoDbClient, GetItemInput,\n\n KeySchemaElement, ProvisionedThroughput, PutItemInput, ScanInput,\n\n },\n\n retry::Policy,\n\n DynamoDbExt, FromAttributes, Item, Retries,\n\n};\n\nuse futures::{future, TryStreamExt};\n\n#[cfg(feature = \"default\")]\n\nuse rusoto_core_default::Region;\n\n#[cfg(feature = \"rustls\")]\n\nuse rusoto_core_rustls::Region;\n", "file_path": "dynomite/examples/local.rs", "rank": 74, "score": 15.543063919236134 }, { "content": " &self,\n\n input: ListBackupsInput,\n\n ) -> Result<ListBackupsOutput, RusotoError<ListBackupsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.list_backups(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn list_contributor_insights(\n\n &self,\n\n input: ListContributorInsightsInput,\n\n ) -> Result<ListContributorInsightsOutput, RusotoError<ListContributorInsightsError>> {\n", "file_path": "dynomite/src/retry.rs", "rank": 75, "score": 15.382277593932514 }, { "content": " ) -> Result<ListTagsOfResourceOutput, RusotoError<ListTagsOfResourceError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.list_tags_of_resource(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn put_item(\n\n &self,\n\n input: PutItemInput,\n\n ) -> Result<PutItemOutput, RusotoError<PutItemError>> {\n\n self.inner\n\n .policy\n", "file_path": "dynomite/src/retry.rs", "rank": 76, "score": 15.368167387955474 }, { "content": " input: CreateGlobalTableInput,\n\n ) -> Result<CreateGlobalTableOutput, RusotoError<CreateGlobalTableError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.create_global_table(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn create_table(\n\n &self,\n\n input: CreateTableInput,\n\n ) -> Result<CreateTableOutput, RusotoError<CreateTableError>> {\n\n self.inner\n", "file_path": "dynomite/src/retry.rs", "rank": 77, "score": 15.299022059103232 }, { "content": " ) -> Result<DescribeContributorInsightsOutput, RusotoError<DescribeContributorInsightsError>>\n\n {\n\n self.inner.client.describe_contributor_insights(input).await\n\n }\n\n\n\n async fn describe_global_table(\n\n &self,\n\n input: DescribeGlobalTableInput,\n\n ) -> Result<DescribeGlobalTableOutput, RusotoError<DescribeGlobalTableError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.describe_global_table(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n", "file_path": "dynomite/src/retry.rs", "rank": 78, "score": 15.259359752510061 }, { "content": " ) -> Result<DescribeEndpointsResponse, RusotoError<DescribeEndpointsError>> {\n\n // no apparent retryable errors\n\n self.inner.client.describe_endpoints().await\n\n }\n\n\n\n async fn transact_get_items(\n\n &self,\n\n input: TransactGetItemsInput,\n\n ) -> Result<TransactGetItemsOutput, RusotoError<TransactGetItemsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.transact_get_items(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n", "file_path": "dynomite/src/retry.rs", "rank": 79, "score": 15.174849535277161 }, { "content": " &self,\n\n input: UpdateGlobalTableSettingsInput,\n\n ) -> Result<UpdateGlobalTableSettingsOutput, RusotoError<UpdateGlobalTableSettingsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n let input = input.clone();\n\n async move { client.update_global_table_settings(input).await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn update_item(\n\n &self,\n\n input: UpdateItemInput,\n\n ) -> Result<UpdateItemOutput, RusotoError<UpdateItemError>> {\n", "file_path": "dynomite/src/retry.rs", "rank": 80, "score": 15.055040612010838 }, { "content": " async fn describe_limits(\n\n &self\n\n ) -> Result<DescribeLimitsOutput, RusotoError<DescribeLimitsError>> {\n\n self.inner\n\n .policy\n\n .retry_if(\n\n move || {\n\n let client = self.inner.clone().client.clone();\n\n async move { client.describe_limits().await }\n\n },\n\n Counter(0),\n\n )\n\n .await\n\n }\n\n\n\n async fn describe_table(\n\n &self,\n\n input: DescribeTableInput,\n\n ) -> Result<DescribeTableOutput, RusotoError<DescribeTableError>> {\n\n self.inner\n", "file_path": "dynomite/src/retry.rs", "rank": 81, "score": 14.896389399488195 }, { "content": "use dynomite_derive::{Attribute, Item};\n\n\n\n#[derive(Item, Default, PartialEq, Debug, Clone)]\n\npub struct Author {\n\n #[dynomite(partition_key)]\n\n name: String,\n\n}\n\n\n\n#[derive(Attribute, PartialEq, Debug, Clone)]\n\npub enum Category {\n\n Foo,\n\n}\n\n\n\nimpl Default for Category {\n\n fn default() -> Self {\n\n Category::Foo\n\n }\n\n}\n\n\n\n#[derive(Item, Default, PartialEq, Debug, Clone)]\n\npub struct Book {\n\n #[dynomite(partition_key)]\n\n title: String,\n\n category: Category,\n\n authors: Option<Vec<Author>>,\n\n}\n\n\n\n#[derive(Item, PartialEq, Debug, Clone)]\n", "file_path": "dynomite/tests/derived.rs", "rank": 82, "score": 13.359955286244093 }, { "content": "use again::{Condition, RetryPolicy};\n\nuse log::debug;\n\n#[cfg(feature = \"default\")]\n\nuse rusoto_core_default::RusotoError;\n\n#[cfg(feature = \"rustls\")]\n\nuse rusoto_core_rustls::RusotoError;\n\nuse std::{sync::Arc, time::Duration};\n\n\n\n/// Pre-configured retry policies for fallible operations\n\n///\n\n/// A `Default` impl of retrying 5 times with an exponential backoff of 100 milliseconds\n\n#[derive(Clone, PartialEq, Debug)]\n\npub enum Policy {\n\n /// Limited number of times to retry\n\n Limit(usize),\n\n /// Limited number of times to retry with fixed pause between retries\n\n Pause(usize, Duration),\n\n /// Limited number of times to retry with an exponential pause between retries\n\n Exponential(usize, Duration),\n\n}\n", "file_path": "dynomite/src/retry.rs", "rank": 83, "score": 13.181763423942824 }, { "content": " impl #attr for #name {\n\n fn into_attr(self) -> ::dynomite::dynamodb::AttributeValue {\n\n let arm = match self {\n\n #(#into_match_arms)*\n\n };\n\n ::dynomite::dynamodb::AttributeValue {\n\n s: ::std::option::Option::Some(arm),\n\n ..::std::default::Default::default()\n\n }\n\n }\n\n fn from_attr(value: ::dynomite::dynamodb::AttributeValue) -> ::std::result::Result<Self, #err> {\n\n value.s.ok_or(::dynomite::AttributeError::InvalidType)\n\n .and_then(|value| match &value[..] {\n\n #(#from_match_arms)*\n\n _ => ::std::result::Result::Err(::dynomite::AttributeError::InvalidFormat)\n\n })\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 85, "score": 12.616679094129006 }, { "content": "# 0.2.1\n\n\n\n* Add support for configuring policies for retrying requests [based on DynamoDB recommendations](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Programming.Errors.html)\n\n\n\n\n\n```rust\n\nuse dynomite::{Retries, retry::Policy};\n\nuse dynomite::dynamodb::{DynamoDb, DynamoDbClient};\n\n\n\nfn main() {\n\n let client =\n\n DynamoDbClient::new(Default::default())\n\n .with_retries(Policy::default());\n\n\n\n // any client operation will now be retried when\n\n // appropriate\n\n let tables = client.list_tables(Default::default());\n\n // other important work...\n\n}\n\n```\n\n\n\n* update documentation to highlight more concisely areas of focus\n\n\n\n# 0.2.0\n\n\n\n* upgraded to 2018 edition\n\n * a side effect of this is that an interaction with 2018-style imports caused a name conflict with `dynomite::Item` and now `dynomite_derive::Item`. As a result the dynomite crate now has a\n\n compiler feature flag called \"derive\" which is no by default that resolves this. If you do not wish to have the feature enabled by default add the following to your Cargo.toml\n\n\n\n ```toml\n\n [dependencies.dynomite]\n\n version = \"0.2\"\n\n default-features = false\n\n features = [\"uuid\"]\n\n ```\n\n* updates to supported Attribute type conversions\n\n\n\n * numeric sets (NS) no longer support vec type conversions, only sets types!\n\n * list types (L) now support any type that implements `Attribute`, previously this only\n\n supported lists of types that implemented `Item` (a complex time). This means lists of scalars are now supported by default\n\n * `Cow<str>` is now supported for String Attributes\n\n * `FromAttributes` is now implemented for `XXXMap` types of `String` to `Attribute` types.\n\n This means you now get free, Item-link integration for homogenious maps\n\n * much needed unit tests now cover the correctness of implementations!\n\n* (breaking change) the `DynamoDbExt.stream_xxx` methods which produced auto-paginating streams have been renamed to `DynamoDbExt.xxx_pages` to be more intention-revealing and inline with naming conventions of other language sdk's methods that implement similar functionality.\n\n\n", "file_path": "CHANGELOG.md", "rank": 86, "score": 11.857589079727266 }, { "content": "impl Parse for Attr {\n\n fn parse(input: ParseStream) -> syn::Result<Self> {\n\n use self::Attr::*;\n\n let name: Ident = input.parse()?;\n\n let name_str = name.to_string();\n\n if input.peek(Token![=]) {\n\n // `name = value` attributes.\n\n let assign = input.parse::<Token![=]>()?; // skip '='\n\n if input.peek(LitStr) {\n\n let lit: LitStr = input.parse()?;\n\n match &*name_str {\n\n \"rename\" => Ok(Rename(name, lit)),\n\n unsupported => abort! {\n\n name,\n\n \"unsupported dynomite {} attribute\",\n\n unsupported\n\n },\n\n }\n\n } else {\n\n abort! {\n", "file_path": "dynomite-derive/src/attr.rs", "rank": 87, "score": 11.431880918304874 }, { "content": "//! dynomite field attributes\n\n\n\nuse proc_macro_error::abort;\n\nuse syn::{\n\n parse::{Parse, ParseStream},\n\n Ident, LitStr, Token,\n\n};\n\n\n\n#[derive(Clone)]\n\npub enum Attr {\n\n /// Denotes field should be replaced with Default impl when absent in ddb\n\n Default(Ident),\n\n /// Denotes field should be renamed to value of ListStr\n\n Rename(Ident, LitStr),\n\n /// Denotes Item partition (primary) key\n\n PartitionKey(Ident),\n\n /// Denotes Item sort key\n\n SortKey(Ident),\n\n}\n\n\n", "file_path": "dynomite-derive/src/attr.rs", "rank": 88, "score": 11.151787464610868 }, { "content": "//! // dynamodb types require only primary key attributes and may contain\n\n//! // other fields. when looking up items only those key attributes are required\n\n//! // dynomite derives a new {Name}Key struct for your which contains\n\n//! // only those and also implements Item\n\n//! let key = PersonKey { id: \"123\".into() };\n\n//! let key_attributes: Attributes = key.clone().into();\n\n//! // convert attributes into person type\n\n//! assert_eq!(key, PersonKey::from_attrs(key_attributes).unwrap());\n\n//! ```\n\n\n\nextern crate proc_macro;\n\n\n\nmod attr;\n\nuse attr::Attr;\n\n\n\nuse proc_macro::TokenStream;\n\nuse proc_macro2::Span;\n\nuse proc_macro_error::ResultExt;\n\nuse quote::{quote, ToTokens};\n\nuse syn::{\n\n punctuated::Punctuated,\n\n Attribute,\n\n Data::{Enum, Struct},\n\n DataStruct, DeriveInput, Field, Fields, Ident, Token, Variant, Visibility,\n\n};\n\n\n\n/// A Field and all its extracted dynomite derive attrs\n\n#[derive(Clone)]\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 89, "score": 11.116576311984543 }, { "content": "pub mod retry;\n\n\n\npub use crate::{ext::DynamoDbExt, retry::Retries};\n\n\n\npub use crate::error::AttributeError;\n\n/// Type alias for map of named attribute values\n\npub type Attributes = HashMap<String, AttributeValue>;\n\n\n\n/// A type which can be converted to and from a set of String keys and\n\n/// `AttributeValues`.\n\n///\n\n/// # Examples\n\n///\n\n/// Below is an example of doing this manually for demonstration.\n\n///\n\n/// ```\n\n/// use dynomite::{\n\n/// dynamodb::AttributeValue, Attribute, AttributeError, Attributes, FromAttributes, Item,\n\n/// };\n\n/// use std::collections::HashMap;\n", "file_path": "dynomite/src/lib.rs", "rank": 90, "score": 10.783955621777821 }, { "content": " RetryingDynamoDb::new(self, policy)\n\n }\n\n}\n\n\n\nimpl<D> RetryingDynamoDb<D>\n\nwhere\n\n D: DynamoDb + 'static,\n\n{\n\n /// Return a new instance with a configured retry policy\n\n pub fn new(\n\n client: D,\n\n policy: Policy,\n\n ) -> Self {\n\n Self {\n\n inner: Arc::new(Inner {\n\n client,\n\n policy: policy.into(),\n\n }),\n\n }\n\n }\n", "file_path": "dynomite/src/retry.rs", "rank": 91, "score": 10.378291151717733 }, { "content": "///\n\n/// This provides some convenience for some interfaces,\n\n/// like [query](../rusoto_dynamodb/struct.QueryInput.html#structfield.expression_attribute_values)\n\n/// where a map of this type is required.\n\n///\n\n/// This syntax for this macro is the same as [maplit](https://crates.io/crates/maplit).\n\n///\n\n/// A avoid using `&str` slices for values when creating a mapping for a `String` `AttributeValue`.\n\n/// Instead use a `String`.\n\n///\n\n/// ## Example\n\n///\n\n/// ```\n\n/// use dynomite::dynamodb::QueryInput;\n\n/// use dynomite::attr_map;\n\n///\n\n/// let query = QueryInput {\n\n/// table_name: \"some_table\".into(),\n\n/// key_condition_expression: Some(\n\n/// \"partitionKeyName = :partitionkeyval\".into()\n", "file_path": "dynomite/src/lib.rs", "rank": 92, "score": 9.3013475336763 }, { "content": "#[cfg(feature = \"chrono\")]\n\nuse chrono::{\n\n offset::{FixedOffset, Local},\n\n DateTime, Utc,\n\n};\n\n\n\n// we re-export this because we\n\n// refer to it with in derive macros\n\n#[doc(hidden)]\n\npub use dynamodb::AttributeValue;\n\nuse std::{\n\n borrow::Cow,\n\n collections::{BTreeMap, BTreeSet, HashMap, HashSet},\n\n time::SystemTime,\n\n};\n\n#[cfg(feature = \"uuid\")]\n\nuse uuid::Uuid;\n\n\n\npub mod error;\n\nmod ext;\n", "file_path": "dynomite/src/lib.rs", "rank": 93, "score": 8.531796440483859 }, { "content": "# 0.1.5\n\n\n\n* updated dependencies\n\n\n\n * `Rusoto-*` 0.34 -> 0.36\n\n\n\n# 0.1.4\n\n\n\n* add Stream oriented extension interfaces for paginated apis\n\n\n\nBy default, the `DyanomoDb` apis `list_backups`, `list_tables`, `query`, `scan`\n\nall require application management of pagination using inconsistent api's.\n\nThis release brings a consistent interface for each with extension methods prefixed with `stream_`\n\nwhich return a consistent interface for retrieving a `futures::Stream` of their\n\nrespective values.\n\n\n\n* add `maplit!` inspired `attr_map!` helper macro useful in query contexts when providing `expression_attribute_values`\n\n\n\n* pin rusoto crate versioning to minor release `0.34`\n\n\n\nIn the past this crate was pinned to a major version of rusoto. It will be pinned to a minor\n\nversion going forward.\n\n\n\nSee the [demo application](https://github.com/softprops/dynomite/blob/5ed3444a46a02bd560644fed35adb553ffb8a0f0/dynomite-derive/examples/demo.rs) for examples of updated interfaces.\n\n\n\n# 0.1.3\n\n\n\n* fix examples for rusoto breaking changes in 0.32, async release\n\n\n\n# 0.1.2\n\n\n\n* fix `dynomite-derive` `dynomite` dependency version\n\n\n\n# 0.1.1\n\n\n\n* initial release\n", "file_path": "CHANGELOG.md", "rank": 94, "score": 8.489339297705044 }, { "content": " use ::dynomite::FromAttributes;\n\n value\n\n .m\n\n .ok_or(::dynomite::AttributeError::InvalidType)\n\n .and_then(Self::from_attrs)\n\n }\n\n }\n\n };\n\n\n\n Ok(quote! {\n\n #from_attribute_map\n\n #to_attribute_map\n\n #impl_attribute\n\n })\n\n}\n\n\n", "file_path": "dynomite-derive/src/lib.rs", "rank": 95, "score": 8.420967379303747 }, { "content": " .into_iter()\n\n .map(Attribute::from_attr)\n\n .collect()\n\n }\n\n}\n\n\n\nimpl<T: Attribute> Attribute for Option<T> {\n\n fn into_attr(self: Self) -> AttributeValue {\n\n match self {\n\n Some(value) => value.into_attr(),\n\n _ => AttributeValue {\n\n null: Some(true),\n\n ..Default::default()\n\n },\n\n }\n\n }\n\n fn from_attr(value: AttributeValue) -> Result<Self, AttributeError> {\n\n match value.null {\n\n Some(true) => Ok(None),\n\n _ => Ok(Some(Attribute::from_attr(value)?)),\n", "file_path": "dynomite/src/lib.rs", "rank": 96, "score": 8.259487572811313 }, { "content": " })\n\n }\n\n}\n\n\n\n/// An `rfc3339` formatted version of `DateTime<FixedOffset>`, represented by the `S` AttributeValue type\n\n#[cfg(feature = \"chrono\")]\n\nimpl Attribute for DateTime<FixedOffset> {\n\n fn into_attr(self: Self) -> AttributeValue {\n\n AttributeValue {\n\n s: Some(self.to_rfc3339()),\n\n ..Default::default()\n\n }\n\n }\n\n fn from_attr(value: AttributeValue) -> Result<Self, AttributeError> {\n\n value\n\n .s\n\n .ok_or(AttributeError::InvalidType)\n\n .and_then(|s| match DateTime::parse_from_rfc3339(&s) {\n\n Ok(date_time) => Ok(date_time),\n\n Err(_) => Err(AttributeError::InvalidFormat),\n", "file_path": "dynomite/src/lib.rs", "rank": 97, "score": 8.203260959016447 }, { "content": " client\n\n .put_item(PutItemInput {\n\n table_name: table_name.clone(),\n\n // convert book into it's attribute map representation\n\n item: Book {\n\n id: Uuid::new_v4(),\n\n title: \"rust and beyond\".into(),\n\n }\n\n .into(),\n\n ..PutItemInput::default()\n\n })\n\n .await?\n\n );\n\n\n\n // scan through all pages of results in the books table for books who's title is \"rust\"\n\n println!(\n\n \"scan result {:#?}\",\n\n client\n\n .clone()\n\n .scan_pages(ScanInput {\n", "file_path": "dynomite/examples/local.rs", "rank": 98, "score": 8.118057567380369 }, { "content": "use std::error::Error;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Item, Debug, Clone)]\n\npub struct Book {\n\n #[dynomite(partition_key, rename = \"Id\")]\n\n id: Uuid,\n\n #[dynomite(rename = \"bookTitle\", default)]\n\n title: String,\n\n}\n\n\n\n/// create a book table with a single string (S) primary key.\n\n/// if this table does not already exists\n\n/// this may take a second or two to provision.\n\n/// it will fail if this table already exists but that's okay,\n\n/// this is just an example :)\n\nasync fn bootstrap<D>(\n\n client: &D,\n\n table_name: String,\n\n) where\n", "file_path": "dynomite/examples/local.rs", "rank": 99, "score": 8.064633586664845 } ]
Rust
language/bytecode-verifier/src/control_flow.rs
GreyHyphen/dijets
8c94c5c079fd025929dbd0455fc0e76d62e418c2
use move_binary_format::{ errors::{PartialVMError, PartialVMResult}, file_format::{Bytecode, CodeOffset, CodeUnit, FunctionDefinitionIndex}, }; use move_core_types::vm_status::StatusCode; use std::{collections::HashSet, convert::TryInto}; pub fn verify( current_function_opt: Option<FunctionDefinitionIndex>, code: &CodeUnit, ) -> PartialVMResult<()> { let current_function = current_function_opt.unwrap_or(FunctionDefinitionIndex(0)); match code.code.last() { None => return Err(PartialVMError::new(StatusCode::EMPTY_CODE_UNIT)), Some(last) if !last.is_unconditional_branch() => { return Err(PartialVMError::new(StatusCode::INVALID_FALL_THROUGH) .at_code_offset(current_function, (code.code.len() - 1) as CodeOffset)) } Some(_) => (), } let context = &ControlFlowVerifier { current_function, code: &code.code, }; let labels = instruction_labels(context); check_jumps(context, labels) } #[derive(Clone, Copy)] enum Label { Loop { last_continue: u16 }, Code, } struct ControlFlowVerifier<'a> { current_function: FunctionDefinitionIndex, code: &'a Vec<Bytecode>, } impl<'a> ControlFlowVerifier<'a> { fn code(&self) -> impl Iterator<Item = (CodeOffset, &'a Bytecode)> { self.code .iter() .enumerate() .map(|(idx, instr)| (idx.try_into().unwrap(), instr)) } fn labeled_code<'b: 'a>( &self, labels: &'b [Label], ) -> impl Iterator<Item = (CodeOffset, &'a Bytecode, &'b Label)> { self.code() .zip(labels) .map(|((i, instr), lbl)| (i, instr, lbl)) } fn error(&self, status: StatusCode, offset: CodeOffset) -> PartialVMError { PartialVMError::new(status).at_code_offset(self.current_function, offset) } } fn instruction_labels(context: &ControlFlowVerifier) -> Vec<Label> { let mut labels: Vec<Label> = (0..context.code.len()).map(|_| Label::Code).collect(); let mut loop_continue = |loop_idx: CodeOffset, last_continue: CodeOffset| { labels[loop_idx as usize] = Label::Loop { last_continue } }; for (i, instr) in context.code() { match instr { Bytecode::Branch(prev) | Bytecode::BrTrue(prev) | Bytecode::BrFalse(prev) if *prev <= i => { loop_continue(*prev, i) } _ => (), } } labels } fn check_jumps(context: &ControlFlowVerifier, labels: Vec<Label>) -> PartialVMResult<()> { check_continues(context, &labels)?; check_breaks(context, &labels)?; check_no_loop_splits(context, &labels) } fn check_code< F: FnMut(&Vec<(CodeOffset, CodeOffset)>, CodeOffset, &Bytecode) -> PartialVMResult<()>, >( context: &ControlFlowVerifier, labels: &[Label], mut check: F, ) -> PartialVMResult<()> { let mut loop_stack: Vec<(CodeOffset, CodeOffset)> = vec![]; for (i, instr, label) in context.labeled_code(labels) { if let Label::Loop { last_continue } = label { loop_stack.push((i, *last_continue)); } check(&loop_stack, i, instr)?; match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j <= i => { let (_cur_loop, last_continue) = loop_stack.last().unwrap(); if i == *last_continue { loop_stack.pop(); } } _ => (), } } Ok(()) } fn check_continues(context: &ControlFlowVerifier, labels: &[Label]) -> PartialVMResult<()> { check_code(context, labels, |loop_stack, i, instr| { match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j <= i => { let (cur_loop, _last_continue) = loop_stack.last().unwrap(); let is_continue = *j <= i; if is_continue && j != cur_loop { Err(context.error(StatusCode::INVALID_LOOP_CONTINUE, i)) } else { Ok(()) } } _ => Ok(()), } }) } fn check_breaks(context: &ControlFlowVerifier, labels: &[Label]) -> PartialVMResult<()> { check_code(context, labels, |loop_stack, i, instr| { match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j > i => { match loop_stack.last() { Some((_cur_loop, last_continue)) if j > last_continue && *j != last_continue + 1 => { Err(context.error(StatusCode::INVALID_LOOP_BREAK, i)) } _ => Ok(()), } } _ => Ok(()), } }) } fn check_no_loop_splits(context: &ControlFlowVerifier, labels: &[Label]) -> PartialVMResult<()> { let is_break = |loop_stack: &Vec<(CodeOffset, CodeOffset)>, jump_target: CodeOffset| -> bool { match loop_stack.last() { None => false, Some((_cur_loop, last_continue)) => jump_target > *last_continue, } }; let loop_depth = count_loop_depth(labels); check_code(context, labels, |loop_stack, i, instr| { match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j > i && !is_break(loop_stack, *j) => { let j = *j; let before_depth = loop_depth[i as usize]; let after_depth = match &labels[j as usize] { Label::Loop { .. } => loop_depth[j as usize] - 1, Label::Code => loop_depth[j as usize], }; if before_depth != after_depth { Err(context.error(StatusCode::INVALID_LOOP_SPLIT, i)) } else { Ok(()) } } _ => Ok(()), } }) } fn count_loop_depth(labels: &[Label]) -> Vec<usize> { let last_continues: HashSet<CodeOffset> = labels .iter() .filter_map(|label| match label { Label::Loop { last_continue } => Some(*last_continue), Label::Code => None, }) .collect(); let mut count = 0; let mut counts = vec![]; for (idx, label) in labels.iter().enumerate() { if let Label::Loop { .. } = label { count += 1 } counts.push(count); if last_continues.contains(&idx.try_into().unwrap()) { count -= 1; } } counts }
use move_binary_format::{ errors::{PartialVMError, PartialVMResult}, file_format::{Bytecode, CodeOffset, CodeUnit, FunctionDefinitionIndex}, }; use move_core_types::vm_status::StatusCode; use std::{collections::HashSet, convert::TryInto}; pub fn verify( current_function_opt: Option<FunctionDefinitionIndex>, code: &CodeUnit, ) -> PartialVMResult<()> { let current_function = current_function_opt.unwrap_or(FunctionDefinitionIndex(0)); match code.code.last() { None => return Err(PartialVMError::new(StatusCode::EMPTY_CODE_UNIT)), Some(last) if !last.is_unconditional_branch() => { return Err(PartialVMError::new(StatusCode::INVALID_FALL_THROUGH) .at_code_offset(current_function, (code.code.len() - 1) as CodeOffset)) } Some(_) => (), } let context = &ControlFlowVerifier { current_function, code: &code.code, }; let labels = instruction_labels(context); check_jumps(context, labels) } #[derive(Clone, Copy)] enum Label { Loop { last_continue: u16 }, Code, } struct ControlFlowVerifier<'a> { current_function: FunctionDefinitionIndex, code: &'a Vec<Bytecode>, } impl<'a> ControlFlowVerifier<'a> { fn code(&self) -> impl Iterator<Item = (CodeOffset, &'a Bytecode)> { self.code .iter() .enumerate() .map(|(idx, instr)| (idx.try_into().unwrap(), instr)) } fn labeled_code<'b: 'a>( &self, labels: &'b [Label], ) -> impl Iterator<Item = (CodeOffset, &'a Bytecode, &'b Label)> { self.code() .zip(labels) .map(|((i, instr), lbl)| (i, instr, lbl)) } fn error(&self, status: StatusCode, offset: CodeOffset) -> PartialVMError { PartialVMError::new(status).at_code_offset(self.current_function, offset) } } fn instruction_labels(context: &ControlFlowVerifier) -> Vec<Label> { let mut labels: Vec<Label> = (0..context.code.len()).map(|_| Label::Code).collect(); let mut loop_continue = |loop_idx: CodeOffset, last_continue: CodeOffset| { labels[loop_idx as usize] = Label::Loop { last_continue } }; for (i, instr) in context.code() { match instr { Bytecode::Branch(prev) | Bytecode::BrTrue(prev) | Bytecode::BrFalse(prev) if *prev <= i => { loop_continue(*prev, i) } _ => (), } } labels } fn check_jumps(context: &ControlFlowVerifier, labels: Vec<Label>) -> PartialVMResult<()> { check_continues(context, &labels)?; check_breaks(context, &labels)?; check_no_loop_splits(context, &labels) } fn check_code< F: FnMut(&Vec<(CodeOffset, CodeOffset)>, CodeOffset, &Bytecode) -> PartialVMResult<()>, >( context: &ControlFlowVerifier, labels: &[Label], mut check: F, ) -> PartialVMResult<()> { let mut loop_stack: Vec<(CodeOffset, CodeOffset)> = vec![]; for (i, instr, label) in context.labeled_code(labels) { if let Label::Loop { last_continue } = label { loop_stack.push((i, *last_continue)); } check(&loop_stack, i, instr)?; match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j <= i => { let (_cur_loop, last_continue) = loop_stack.last().unwrap(); if i == *last_continue { loop_stack.pop(); } } _ => (), } } Ok(()) } fn check_continues(context: &ControlFlowVerifier, labels: &[Label]) -> PartialVMResult<()> { check_code(context, labels, |loop_stack, i, instr| { match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j <= i => { let (cur_loop, _last_continue) = loop_stack.last().unwrap(); let is_continue = *j <= i; if is_continue && j != cur_loop { Err(context.error(StatusCode::INVALID_LOOP_CONTINUE, i)) } else { Ok(()) } } _ => Ok(()), } }) } fn check_breaks(context: &ControlFlowVerifier, labels: &[Label]) -> PartialVMResult<()> { check_code(context, labels, |lo
Err(context.error(StatusCode::INVALID_LOOP_BREAK, i)) } _ => Ok(()), } } _ => Ok(()), } }) } fn check_no_loop_splits(context: &ControlFlowVerifier, labels: &[Label]) -> PartialVMResult<()> { let is_break = |loop_stack: &Vec<(CodeOffset, CodeOffset)>, jump_target: CodeOffset| -> bool { match loop_stack.last() { None => false, Some((_cur_loop, last_continue)) => jump_target > *last_continue, } }; let loop_depth = count_loop_depth(labels); check_code(context, labels, |loop_stack, i, instr| { match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j > i && !is_break(loop_stack, *j) => { let j = *j; let before_depth = loop_depth[i as usize]; let after_depth = match &labels[j as usize] { Label::Loop { .. } => loop_depth[j as usize] - 1, Label::Code => loop_depth[j as usize], }; if before_depth != after_depth { Err(context.error(StatusCode::INVALID_LOOP_SPLIT, i)) } else { Ok(()) } } _ => Ok(()), } }) } fn count_loop_depth(labels: &[Label]) -> Vec<usize> { let last_continues: HashSet<CodeOffset> = labels .iter() .filter_map(|label| match label { Label::Loop { last_continue } => Some(*last_continue), Label::Code => None, }) .collect(); let mut count = 0; let mut counts = vec![]; for (idx, label) in labels.iter().enumerate() { if let Label::Loop { .. } = label { count += 1 } counts.push(count); if last_continues.contains(&idx.try_into().unwrap()) { count -= 1; } } counts }
op_stack, i, instr| { match instr { Bytecode::Branch(j) | Bytecode::BrTrue(j) | Bytecode::BrFalse(j) if *j > i => { match loop_stack.last() { Some((_cur_loop, last_continue)) if j > last_continue && *j != last_continue + 1 => {
function_block-random_span
[ { "content": "fn remap_branch_offsets(code: &mut Vec<Bytecode>, fake_to_actual: &HashMap<u16, u16>) {\n\n for instr in code {\n\n match instr {\n\n Bytecode::BrTrue(offset) | Bytecode::BrFalse(offset) | Bytecode::Branch(offset) => {\n\n *offset = fake_to_actual[offset]\n\n }\n\n _ => (),\n\n }\n\n }\n\n}\n", "file_path": "language/compiler/ir-to-bytecode/src/compiler.rs", "rank": 1, "score": 466556.1979261938 }, { "content": "/// Create a dummy module to wrap the bytecode program in local@code\n\npub fn dummy_procedure_module(code: Vec<Bytecode>) -> CompiledModule {\n\n let mut module = empty_module();\n\n let code_unit = CodeUnit {\n\n code,\n\n ..Default::default()\n\n };\n\n let fun_def = FunctionDefinition {\n\n code: Some(code_unit),\n\n ..Default::default()\n\n };\n\n\n\n let fun_handle = FunctionHandle {\n\n module: ModuleHandleIndex(0),\n\n name: IdentifierIndex(0),\n\n parameters: SignatureIndex(0),\n\n return_: SignatureIndex(0),\n\n type_parameters: vec![],\n\n };\n\n\n\n module.function_handles.push(fun_handle);\n\n module.function_defs.push(fun_def);\n\n module\n\n}\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/support/mod.rs", "rank": 5, "score": 417655.6631664055 }, { "content": "fn verify_imported_structs(context: &Context) -> PartialVMResult<()> {\n\n let self_module = context.resolver.self_handle_idx();\n\n for (idx, struct_handle) in context.resolver.struct_handles().iter().enumerate() {\n\n if Some(struct_handle.module) == self_module {\n\n continue;\n\n }\n\n let owner_module_id = context\n\n .resolver\n\n .module_id_for_handle(context.resolver.module_handle_at(struct_handle.module));\n\n // TODO: remove unwrap\n\n let owner_module = context.dependency_map.get(&owner_module_id).unwrap();\n\n let struct_name = context.resolver.identifier_at(struct_handle.name);\n\n match context\n\n .struct_id_to_handle_map\n\n .get(&(owner_module_id, struct_name.to_owned()))\n\n {\n\n Some(def_idx) => {\n\n let def_handle = owner_module.struct_handle_at(*def_idx);\n\n if !compatible_struct_abilities(struct_handle.abilities, def_handle.abilities)\n\n || !compatible_struct_type_parameters(\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 8, "score": 385561.19891121855 }, { "content": "#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\nenum Label {\n\n Local(LocalIndex),\n\n Global(StructDefinitionIndex),\n\n Field(FieldHandleIndex),\n\n}\n\n\n\n// Needed for debugging with the borrow graph\n\nimpl std::fmt::Display for Label {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Label::Local(i) => write!(f, \"local#{}\", i),\n\n Label::Global(i) => write!(f, \"resource@{}\", i),\n\n Label::Field(i) => write!(f, \"field#{}\", i),\n\n }\n\n }\n\n}\n\n\n\n/// AbstractState is the analysis state over which abstract interpretation is performed.\n\n#[derive(Clone, Debug, PartialEq)]\n\npub(crate) struct AbstractState {\n", "file_path": "language/bytecode-verifier/src/reference_safety/abstract_state.rs", "rank": 10, "score": 360426.54552121594 }, { "content": "/// Deserializes a code stream (`Bytecode`s).\n\nfn load_code(cursor: &mut VersionedCursor, code: &mut Vec<Bytecode>) -> BinaryLoaderResult<()> {\n\n let bytecode_count = load_bytecode_count(cursor)?;\n\n\n\n while code.len() < bytecode_count {\n\n let byte = cursor.read_u8().map_err(|_| {\n\n PartialVMError::new(StatusCode::MALFORMED).with_message(\"Unexpected EOF\".to_string())\n\n })?;\n\n let opcode = Opcodes::from_u8(byte)?;\n\n // version checking\n\n match opcode {\n\n Opcodes::VEC_PACK\n\n | Opcodes::VEC_LEN\n\n | Opcodes::VEC_IMM_BORROW\n\n | Opcodes::VEC_MUT_BORROW\n\n | Opcodes::VEC_PUSH_BACK\n\n | Opcodes::VEC_POP_BACK\n\n | Opcodes::VEC_UNPACK\n\n | Opcodes::VEC_SWAP => {\n\n if cursor.version() < VERSION_3 {\n\n return Err(\n", "file_path": "language/move-binary-format/src/deserializer.rs", "rank": 11, "score": 355025.60403478105 }, { "content": "fn serialize_bytecode_offset(binary: &mut BinaryData, offset: u16) -> Result<()> {\n\n write_as_uleb128(binary, offset, BYTECODE_INDEX_MAX)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/serializer.rs", "rank": 12, "score": 352013.14609987545 }, { "content": "pub fn verify_account_balance<F>(\n\n account_state_with_proof: &AccountStateWithProof,\n\n f: F,\n\n) -> Result<()>\n\nwhere\n\n F: Fn(u64) -> bool,\n\n{\n\n let balance = if let Some(blob) = &account_state_with_proof.blob {\n\n AccountState::try_from(blob)?\n\n .get_balance_resources()?\n\n .get(&from_currency_code_string(XUS_NAME).unwrap())\n\n .map(|b| b.coin())\n\n .unwrap_or(0)\n\n } else {\n\n 0\n\n };\n\n ensure!(\n\n f(balance),\n\n \"balance {} doesn't satisfy the condition passed in\",\n\n balance\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "execution/executor-test-helpers/src/integration_test_impl.rs", "rank": 13, "score": 350623.4721738008 }, { "content": "// This function performs a DFS in the module graph starting from each node in `items_to_explore`\n\n// and explores the neighbors of a node using the `immediate_nexts` closure.\n\n//\n\n// During the DFS,\n\n// - 1) if the `target_module_id` is found, the exploration will be short-circuited and returns a\n\n// PartialVMError bearing the StatusCode specified in `error_on_cycle`.\n\n// - 2) if the `target_module_id` is not found, the modules visited in the DFS will be returned at\n\n// the end of function execution.\n\nfn collect_all_with_cycle_detection<F: Fn(&ModuleId) -> PartialVMResult<Vec<ModuleId>>>(\n\n target_module_id: &ModuleId,\n\n items_to_explore: &[ModuleId],\n\n immediate_nexts: &F,\n\n error_on_cycle: StatusCode,\n\n) -> PartialVMResult<BTreeSet<ModuleId>> {\n\n fn collect_all_with_cycle_detection_recursive<\n\n F: Fn(&ModuleId) -> PartialVMResult<Vec<ModuleId>>,\n\n >(\n\n target_module_id: &ModuleId,\n\n cursor_module_id: &ModuleId,\n\n immediate_nexts: &F,\n\n visited_modules: &mut BTreeSet<ModuleId>,\n\n ) -> PartialVMResult<bool> {\n\n if cursor_module_id == target_module_id {\n\n return Ok(true);\n\n }\n\n if visited_modules.insert(cursor_module_id.clone()) {\n\n for next in immediate_nexts(cursor_module_id)? {\n\n if collect_all_with_cycle_detection_recursive(\n", "file_path": "language/bytecode-verifier/src/cyclic_dependencies.rs", "rank": 14, "score": 349844.9415226271 }, { "content": "pub fn verify_script_impl<'a>(\n\n script: &CompiledScript,\n\n dependencies: impl IntoIterator<Item = &'a CompiledModule>,\n\n) -> PartialVMResult<()> {\n\n let context = &Context::script(script, dependencies);\n\n\n\n verify_imported_modules(context)?;\n\n verify_imported_structs(context)?;\n\n verify_imported_functions(context)?;\n\n verify_all_script_visibility_usage(context)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 15, "score": 349135.87108277384 }, { "content": "/// A fallible wrapper around [`std::vec::Vec::copy_from_slice`]\n\npub fn copy_slice_to_vec<T>(slice: &[T], vec: &mut [T]) -> Result<(), CopySliceError>\n\nwhere\n\n T: Copy,\n\n{\n\n if slice.len() != vec.len() {\n\n return Err(CopySliceError);\n\n }\n\n\n\n vec.copy_from_slice(slice);\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Error, Debug)]\n\n#[error(\"can't copy source slice into destination slice: sizes don't match\")]\n\npub struct CopySliceError;\n", "file_path": "common/fallible/src/copy_from_slice.rs", "rank": 16, "score": 344710.4254249577 }, { "content": "struct Context<'a, 'b> {\n\n resolver: BinaryIndexedView<'a>,\n\n // (Module -> CompiledModule) for (at least) all immediate dependencies\n\n dependency_map: BTreeMap<ModuleId, &'b CompiledModule>,\n\n // (Module::StructName -> handle) for all types of all dependencies\n\n struct_id_to_handle_map: HashMap<(ModuleId, Identifier), StructHandleIndex>,\n\n // (Module::FunctionName -> handle) for all functions that can ever be called by this\n\n // module/script in all dependencies\n\n func_id_to_handle_map: HashMap<(ModuleId, Identifier), FunctionHandleIndex>,\n\n // (handle -> visibility) for all function handles found in the module being checked\n\n function_visibilities: HashMap<FunctionHandleIndex, Visibility>,\n\n}\n\n\n\nimpl<'a, 'b> Context<'a, 'b> {\n\n fn module(\n\n module: &'a CompiledModule,\n\n dependencies: impl IntoIterator<Item = &'b CompiledModule>,\n\n ) -> Self {\n\n Self::new(BinaryIndexedView::Module(module), dependencies)\n\n }\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 17, "score": 343473.1958515747 }, { "content": "pub fn impl_enum_publickey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let mut from_match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n from_match_arms.extend(quote! {\n\n #pkt::#variant_ident(key) => #name::#variant_ident(key.into()),\n\n });\n\n }\n\n let mut res = quote! {\n\n impl From<&#pkt> for #name {\n\n fn from(public_key: &#pkt) -> Self {\n\n match public_key {\n\n #from_match_arms\n\n }\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 18, "score": 342159.3557790187 }, { "content": "pub fn impl_enum_verifyingkey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n let res = quote! {\n\n impl dijets_crypto::VerifyingKey for #name {\n\n type SigningKeyMaterial = #pkt;\n\n type SignatureMaterial = #st;\n\n }\n\n impl dijets_crypto::private::Sealed for #name {}\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 19, "score": 342159.3557790187 }, { "content": "pub fn impl_enum_privatekey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let res = quote! {\n\n impl dijets_crypto::PrivateKey for #name {\n\n type PublicKeyMaterial = #pkt;\n\n }\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 20, "score": 342159.3557790188 }, { "content": "pub fn impl_enum_signingkey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n\n\n let mut match_arms_arbitrary = quote! {};\n\n let mut match_struct_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_struct_arms.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign(message)),\n\n });\n\n match_arms_arbitrary.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign_arbitrary_message(message)),\n\n });\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 21, "score": 342159.3557790188 }, { "content": "pub fn impl_enum_signature(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let priv_kt: syn::Type = private_key_type.parse().unwrap();\n\n let pub_kt: syn::Type = public_key_type.parse().unwrap();\n\n let mut res = impl_enum_tryfrom(name, variants);\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n let mut match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_arms.extend(quote! {\n\n (#name::#variant_ident(sig), #pub_kt::#variant_ident(pk)) => {\n\n sig.verify_arbitrary_msg(message, pk)\n\n }\n\n })\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 22, "score": 342159.3557790187 }, { "content": "fn verify_module_impl<D, F>(\n\n module: &CompiledModule,\n\n imm_deps: D,\n\n imm_friends: F,\n\n) -> PartialVMResult<()>\n\nwhere\n\n D: Fn(&ModuleId) -> PartialVMResult<Vec<ModuleId>>,\n\n F: Fn(&ModuleId) -> PartialVMResult<Vec<ModuleId>>,\n\n{\n\n let self_id = module.self_id();\n\n\n\n // collect and check that there is no cyclic dependency relation\n\n let all_deps = collect_all_with_cycle_detection(\n\n &self_id,\n\n &module.immediate_dependencies(),\n\n &imm_deps,\n\n StatusCode::CYCLIC_MODULE_DEPENDENCY,\n\n )?;\n\n\n\n // collect and check that there is no cyclic friend relation\n", "file_path": "language/bytecode-verifier/src/cyclic_dependencies.rs", "rank": 23, "score": 339033.4164457122 }, { "content": "fn verify_imported_modules(context: &Context) -> PartialVMResult<()> {\n\n let self_module = context.resolver.self_handle_idx();\n\n for (idx, module_handle) in context.resolver.module_handles().iter().enumerate() {\n\n let module_id = context.resolver.module_id_for_handle(module_handle);\n\n if Some(ModuleHandleIndex(idx as u16)) != self_module\n\n && !context.dependency_map.contains_key(&module_id)\n\n {\n\n return Err(verification_error(\n\n StatusCode::MISSING_DEPENDENCY,\n\n IndexKind::ModuleHandle,\n\n idx as TableIndex,\n\n ));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 24, "score": 336834.6838936838 }, { "content": "fn verify_imported_functions(context: &Context) -> PartialVMResult<()> {\n\n let self_module = context.resolver.self_handle_idx();\n\n for (idx, function_handle) in context.resolver.function_handles().iter().enumerate() {\n\n if Some(function_handle.module) == self_module {\n\n continue;\n\n }\n\n let owner_module_id = context\n\n .resolver\n\n .module_id_for_handle(context.resolver.module_handle_at(function_handle.module));\n\n let function_name = context.resolver.identifier_at(function_handle.name);\n\n // TODO: remove unwrap\n\n let owner_module = context.dependency_map.get(&owner_module_id).unwrap();\n\n match context\n\n .func_id_to_handle_map\n\n .get(&(owner_module_id.clone(), function_name.to_owned()))\n\n {\n\n Some(def_idx) => {\n\n let def_handle = owner_module.function_handle_at(*def_idx);\n\n // compatible type parameter constraints\n\n if !compatible_fun_type_parameters(\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 25, "score": 336834.6838936838 }, { "content": "fn verify_all_script_visibility_usage(context: &Context) -> PartialVMResult<()> {\n\n match &context.resolver {\n\n BinaryIndexedView::Module(m) => {\n\n for (idx, fdef) in m.function_defs().iter().enumerate() {\n\n let code = match &fdef.code {\n\n None => continue,\n\n Some(code) => &code.code,\n\n };\n\n verify_script_visibility_usage(\n\n context,\n\n fdef.visibility,\n\n FunctionDefinitionIndex(idx as TableIndex),\n\n code,\n\n )?\n\n }\n\n Ok(())\n\n }\n\n BinaryIndexedView::Script(s) => verify_script_visibility_usage(\n\n context,\n\n Visibility::Script,\n\n FunctionDefinitionIndex(0),\n\n &s.code().code,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 26, "score": 333024.8619653054 }, { "content": "fn error(type_: SignatureToken, data: Vec<u8>, code: StatusCode) {\n\n let mut module = empty_module();\n\n module.constant_pool = vec![Constant { type_, data }];\n\n assert!(\n\n constants::verify_module(&module)\n\n .unwrap_err()\n\n .major_status()\n\n == code\n\n )\n\n}\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/constants_tests.rs", "rank": 27, "score": 329808.8562749373 }, { "content": "#[inline]\n\npub fn pick_slice_idxs(max: usize, indexes: &[impl AsRef<PropIndex>]) -> Vec<usize> {\n\n pick_idxs(max, indexes, indexes.len())\n\n}\n\n\n\n/// Wrapper for `proptest`'s [`Index`][proptest::sample::Index] that allows `AsRef` to work.\n\n///\n\n/// There is no blanket `impl<T> AsRef<T> for T`, so `&[PropIndex]` doesn't work with\n\n/// `&[impl AsRef<PropIndex>]` (unless an impl gets added upstream). `Index` does.\n\n#[derive(Arbitrary, Clone, Copy, Debug)]\n\npub struct Index(PropIndex);\n\n\n\nimpl AsRef<PropIndex> for Index {\n\n fn as_ref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Deref for Index {\n\n type Target = PropIndex;\n\n\n\n fn deref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 28, "score": 324814.7727968745 }, { "content": "/// Data attached to each edge. Indicating the type of the edge.\n\nenum Edge<'a> {\n\n /// This type of edge from type formal T1 to T2 means the type bound to T1 is used to\n\n /// instantiate T2 unmodified, thus the name `Identity`.\n\n ///\n\n /// Example:\n\n /// ```\n\n /// // foo<T>() { bar<T>(); return; }\n\n /// //\n\n /// // edge: foo_T --Id--> bar_T\n\n /// ```\n\n Identity,\n\n /// This type of edge from type formal T1 to T2 means T2 is instantiated with a type resulted\n\n /// by applying one or more type constructors to T1 (potentially with other types).\n\n ///\n\n /// This is interesting to us as it creates a new (and bigger) type.\n\n ///\n\n /// Example:\n\n /// ```\n\n /// // struct Baz<T> {}\n\n /// // foo<T>() { bar<Baz<T>>(); return; }\n", "file_path": "language/bytecode-verifier/src/instantiation_loops.rs", "rank": 29, "score": 321905.65918700525 }, { "content": "#[test]\n\nfn invalid_struct_in_fn_return_() {\n\n use SignatureToken::*;\n\n\n\n let mut m = basic_test_module();\n\n m.function_handles[0].return_ = SignatureIndex(1);\n\n m.signatures\n\n .push(Signature(vec![Struct(StructHandleIndex::new(1))]));\n\n assert_eq!(\n\n BoundsChecker::verify_module(&m).unwrap_err().major_status(),\n\n StatusCode::INDEX_OUT_OF_BOUNDS\n\n );\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/bounds_tests.rs", "rank": 30, "score": 317979.0010108326 }, { "content": "pub fn verify_module<D, F>(module: &CompiledModule, imm_deps: D, imm_friends: F) -> VMResult<()>\n\nwhere\n\n D: Fn(&ModuleId) -> PartialVMResult<Vec<ModuleId>>,\n\n F: Fn(&ModuleId) -> PartialVMResult<Vec<ModuleId>>,\n\n{\n\n verify_module_impl(module, imm_deps, imm_friends)\n\n .map_err(|e| e.finish(Location::Module(module.self_id())))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/cyclic_dependencies.rs", "rank": 32, "score": 310096.3906508374 }, { "content": "fn pack(verifier: &mut ReferenceSafetyAnalysis, struct_def: &StructDefinition) {\n\n for _ in 0..num_fields(struct_def) {\n\n checked_verify!(verifier.stack.pop().unwrap().is_value())\n\n }\n\n // TODO maybe call state.value_for\n\n verifier.stack.push(AbstractValue::NonReference)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/reference_safety/mod.rs", "rank": 33, "score": 309883.76039278234 }, { "content": "fn unpack(verifier: &mut ReferenceSafetyAnalysis, struct_def: &StructDefinition) {\n\n checked_verify!(verifier.stack.pop().unwrap().is_value());\n\n // TODO maybe call state.value_for\n\n for _ in 0..num_fields(struct_def) {\n\n verifier.stack.push(AbstractValue::NonReference)\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/reference_safety/mod.rs", "rank": 34, "score": 309883.76039278234 }, { "content": "fn serialize_field_offset(binary: &mut BinaryData, offset: u16) -> Result<()> {\n\n write_as_uleb128(binary, offset, FIELD_OFFSET_MAX)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/serializer.rs", "rank": 36, "score": 306502.92382031353 }, { "content": "pub fn timed_block_on<F>(runtime: &mut runtime::Runtime, f: F) -> <F as Future>::Output\n\nwhere\n\n F: Future,\n\n{\n\n runtime\n\n .block_on(async { timeout(TEST_TIMEOUT, f).await })\n\n .expect(\"test timed out\")\n\n}\n", "file_path": "consensus/src/test_utils/mod.rs", "rank": 37, "score": 304762.4941125586 }, { "content": "pub fn impl_enum_valid_crypto_material(name: &Ident, variants: &DataEnum) -> TokenStream {\n\n let mut try_from = impl_enum_tryfrom(name, variants);\n\n\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n try_from.extend(quote! {\n\n\n\n impl dijets_crypto::ValidCryptoMaterial for #name {\n\n fn to_bytes(&self) -> Vec<u8> {\n\n match self {\n\n #to_bytes_arms\n\n }\n\n }\n\n }\n\n });\n\n try_from.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 38, "score": 302069.45471551723 }, { "content": "pub fn stdlib_bytecode_files() -> Vec<String> {\n\n let path = path_in_crate(COMPILED_OUTPUT_PATH);\n\n let names = dijets_stdlib_files();\n\n let res: Vec<String> =\n\n find_filenames(&[path], |p| extension_equals(p, MOVE_COMPILED_EXTENSION))\n\n .unwrap()\n\n .into_iter()\n\n .filter(|s| {\n\n let path = Path::new(s);\n\n for name in &names {\n\n let suffix = \"_\".to_owned()\n\n + Path::new(name)\n\n .with_extension(MOVE_COMPILED_EXTENSION)\n\n .file_name()\n\n .unwrap()\n\n .to_str()\n\n .unwrap();\n\n if path\n\n .file_name()\n\n .map(|f| f.to_str())\n", "file_path": "language/dijets-framework/src/lib.rs", "rank": 39, "score": 301360.7484586504 }, { "content": "/// Generate a sequence of `NetworkMessage`, bcs serialize them, and write them\n\n/// out to a buffer using our length-prefixed message codec.\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n let network_msgs = gen.generate(vec(any::<NetworkMessage>(), 1..20));\n\n\n\n let (write_socket, mut read_socket) = MemorySocket::new_pair();\n\n let mut writer = NetworkMessageSink::new(write_socket, constants::MAX_FRAME_SIZE, None);\n\n\n\n // Write the `NetworkMessage`s to a fake socket\n\n let f_send = async move {\n\n for network_msg in &network_msgs {\n\n writer.send(network_msg).await.unwrap();\n\n }\n\n };\n\n // Read the serialized `NetworkMessage`s from the fake socket\n\n let f_recv = async move {\n\n let mut buf = Vec::new();\n\n read_socket.read_to_end(&mut buf).await.unwrap();\n\n buf\n\n };\n\n\n\n let (_, buf) = block_on(future::join(f_send, f_recv));\n\n buf\n\n}\n\n\n", "file_path": "network/src/peer/fuzzing.rs", "rank": 40, "score": 301270.713105063 }, { "content": "/// generate_corpus produces an arbitrary transaction to submit to JSON RPC service\n\npub fn generate_corpus(gen: &mut ValueGenerator) -> Vec<u8> {\n\n // use proptest to generate a SignedTransaction\n\n let txn = gen.generate(proptest::arbitrary::any::<\n\n dijets_types::transaction::SignedTransaction,\n\n >());\n\n let payload = hex::encode(bcs::to_bytes(&txn).unwrap());\n\n let request =\n\n serde_json::json!({\"jsonrpc\": \"2.0\", \"method\": \"submit\", \"params\": [payload], \"id\": 1});\n\n serde_json::to_vec(&request).expect(\"failed to convert JSON to byte array\")\n\n}\n\n\n", "file_path": "json-rpc/src/fuzzing.rs", "rank": 41, "score": 301264.59374821215 }, { "content": "pub fn verify_module<'a>(\n\n module: &CompiledModule,\n\n dependencies: impl IntoIterator<Item = &'a CompiledModule>,\n\n) -> VMResult<()> {\n\n verify_module_impl(module, dependencies)\n\n .map_err(|e| e.finish(Location::Module(module.self_id())))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 42, "score": 300593.1668447557 }, { "content": "pub fn verify_script<'a>(\n\n script: &CompiledScript,\n\n dependencies: impl IntoIterator<Item = &'a CompiledModule>,\n\n) -> VMResult<()> {\n\n verify_script_impl(script, dependencies).map_err(|e| e.finish(Location::Script))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 43, "score": 300593.1668447557 }, { "content": "pub fn impl_enum_tryfrom(name: &Ident, variants: &DataEnum) -> proc_macro2::TokenStream {\n\n // the TryFrom dispatch\n\n let mut try_iter = variants.variants.iter();\n\n let first_variant = try_iter\n\n .next()\n\n .expect(\"#[derive(ValidCryptoMaterial] requires a non-empty enum.\");\n\n let first_variant_ident = &first_variant.ident;\n\n let first_variant_arg = &first_variant\n\n .fields\n\n .iter()\n\n .next()\n\n .expect(\"Unrecognized enum for key types\")\n\n .ty;\n\n\n\n let mut try_chain = quote! {\n\n #first_variant_arg::try_from(bytes).and_then(|key| Ok(#name::#first_variant_ident(key)))\n\n };\n\n for variant in try_iter {\n\n let variant_ident = &variant.ident;\n\n let variant_arg = &variant\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 44, "score": 298898.0489206507 }, { "content": "pub fn factor<Lbl: Eq>(lhs: &PathSlice<Lbl>, mut rhs: Path<Lbl>) -> (Path<Lbl>, Path<Lbl>) {\n\n assert!(leq(lhs, &rhs));\n\n let suffix = rhs.split_off(lhs.len());\n\n (rhs, suffix)\n\n}\n\n\n", "file_path": "language/borrow-graph/src/paths.rs", "rank": 45, "score": 298173.7724602088 }, { "content": "pub fn fixed_retry_strategy(delay_ms: u64, tries: usize) -> impl Iterator<Item = Duration> {\n\n FixedDelay::new(delay_ms).take(tries)\n\n}\n\n\n", "file_path": "common/retrier/src/lib.rs", "rank": 46, "score": 289793.84994247236 }, { "content": "/// Verify correctness of tables.\n\n///\n\n/// Tables cannot have duplicates, must cover the entire blob and must be disjoint.\n\nfn check_tables(tables: &mut Vec<Table>, binary_len: usize) -> BinaryLoaderResult<u32> {\n\n // there is no real reason to pass a mutable reference but we are sorting next line\n\n tables.sort_by(|t1, t2| t1.offset.cmp(&t2.offset));\n\n\n\n let mut current_offset: u32 = 0;\n\n let mut table_types = HashSet::new();\n\n for table in tables {\n\n if table.offset != current_offset {\n\n return Err(PartialVMError::new(StatusCode::BAD_HEADER_TABLE));\n\n }\n\n if table.count == 0 {\n\n return Err(PartialVMError::new(StatusCode::BAD_HEADER_TABLE));\n\n }\n\n match current_offset.checked_add(table.count) {\n\n Some(checked_offset) => current_offset = checked_offset,\n\n None => return Err(PartialVMError::new(StatusCode::BAD_HEADER_TABLE)),\n\n }\n\n if !table_types.insert(table.kind) {\n\n return Err(PartialVMError::new(StatusCode::DUPLICATE_TABLE));\n\n }\n", "file_path": "language/move-binary-format/src/deserializer.rs", "rank": 47, "score": 285659.5649395083 }, { "content": "#[test]\n\nfn generic_mut_borrow_field_on_non_generic_struct() {\n\n let mut module = make_module();\n\n // bogus `MutBorrowFieldGeneric S.t`\n\n module.function_defs[2].code = Some(CodeUnit {\n\n locals: SignatureIndex(0),\n\n code: vec![\n\n Bytecode::LdU64(10),\n\n Bytecode::Pack(StructDefinitionIndex(0)),\n\n Bytecode::MutBorrowFieldGeneric(FieldInstantiationIndex(0)),\n\n Bytecode::Pop,\n\n Bytecode::Ret,\n\n ],\n\n });\n\n module.field_instantiations.push(FieldInstantiation {\n\n handle: FieldHandleIndex(0),\n\n type_parameters: SignatureIndex(2),\n\n });\n\n module.field_handles.push(FieldHandle {\n\n owner: StructDefinitionIndex(0),\n\n field: 0,\n\n });\n\n module.signatures.push(Signature(vec![SignatureToken::U64]));\n\n let err = InstructionConsistency::verify_module(&module)\n\n .expect_err(\"MutBorrowFieldGeneric to non generic struct must fail\");\n\n assert_eq!(\n\n err.major_status(),\n\n StatusCode::GENERIC_MEMBER_OPCODE_MISMATCH\n\n );\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/generic_ops_tests.rs", "rank": 48, "score": 285397.59251709265 }, { "content": "#[test]\n\nfn non_generic_mut_borrow_field_on_generic_struct() {\n\n let mut module = make_module();\n\n // bogus `MutBorrowField GS<T>.f`\n\n module.function_defs[2].code = Some(CodeUnit {\n\n locals: SignatureIndex(0),\n\n code: vec![\n\n Bytecode::LdU64(10),\n\n Bytecode::PackGeneric(StructDefInstantiationIndex(0)),\n\n Bytecode::MutBorrowField(FieldHandleIndex(0)),\n\n Bytecode::Pop,\n\n Bytecode::Ret,\n\n ],\n\n });\n\n module\n\n .struct_def_instantiations\n\n .push(StructDefInstantiation {\n\n def: StructDefinitionIndex(1),\n\n type_parameters: SignatureIndex(2),\n\n });\n\n module.field_handles.push(FieldHandle {\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/generic_ops_tests.rs", "rank": 49, "score": 285397.59251709265 }, { "content": "#[test]\n\nfn non_generic_mut_borrow_global_to_generic_struct() {\n\n let mut module = make_module();\n\n // bogus `MutBorrowGlobal GR<T>`\n\n module.function_defs[2]\n\n .acquires_global_resources\n\n .push(StructDefinitionIndex(3));\n\n module.function_defs[2].code = Some(CodeUnit {\n\n locals: SignatureIndex(0),\n\n code: vec![\n\n Bytecode::LdConst(ConstantPoolIndex(0)),\n\n Bytecode::MutBorrowGlobal(StructDefinitionIndex(3)),\n\n Bytecode::Pop,\n\n Bytecode::Ret,\n\n ],\n\n });\n\n let err = InstructionConsistency::verify_module(&module)\n\n .expect_err(\"MutBorrowGlobal to generic function must fail\");\n\n assert_eq!(\n\n err.major_status(),\n\n StatusCode::GENERIC_MEMBER_OPCODE_MISMATCH\n\n );\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/generic_ops_tests.rs", "rank": 50, "score": 285397.59251709265 }, { "content": "#[test]\n\nfn generic_mut_borrow_global_to_non_generic_struct() {\n\n let mut module = make_module();\n\n // bogus `MutBorrowGlobalGeneric R`\n\n module.function_defs[2]\n\n .acquires_global_resources\n\n .push(StructDefinitionIndex(2));\n\n module.function_defs[2].code = Some(CodeUnit {\n\n locals: SignatureIndex(0),\n\n code: vec![\n\n Bytecode::LdConst(ConstantPoolIndex(0)),\n\n Bytecode::MutBorrowGlobalGeneric(StructDefInstantiationIndex(0)),\n\n Bytecode::Pop,\n\n Bytecode::Ret,\n\n ],\n\n });\n\n module\n\n .struct_def_instantiations\n\n .push(StructDefInstantiation {\n\n def: StructDefinitionIndex(2),\n\n type_parameters: SignatureIndex(2),\n\n });\n\n module.signatures.push(Signature(vec![SignatureToken::U64]));\n\n let err = InstructionConsistency::verify_module(&module)\n\n .expect_err(\"MutBorrowGlobalGeneric to non generic function must fail\");\n\n assert_eq!(\n\n err.major_status(),\n\n StatusCode::GENERIC_MEMBER_OPCODE_MISMATCH\n\n );\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/generic_ops_tests.rs", "rank": 51, "score": 285397.59251709265 }, { "content": "pub fn generate_corpus(gen: &mut dijets_proptest_helpers::ValueGenerator) -> Vec<u8> {\n\n let (init_msg, resp_msg) = generate_first_two_messages();\n\n // choose a random one\n\n let strategy = proptest::arbitrary::any::<bool>();\n\n if gen.generate(strategy) {\n\n init_msg\n\n } else {\n\n resp_msg\n\n }\n\n}\n\n\n\n//\n\n// Fuzzing\n\n// =======\n\n//\n\n// - fuzz_initiator: fuzzes the second message of the handshake, received by the initiator.\n\n// - fuzz_responder: fuzzes the first message of the handshake, received by the responder.\n\n//\n\n\n", "file_path": "network/src/noise/fuzzing.rs", "rank": 52, "score": 282804.180751869 }, { "content": "fn num_fields(struct_def: &StructDefinition) -> usize {\n\n match &struct_def.field_information {\n\n StructFieldInformation::Native => 0,\n\n StructFieldInformation::Declared(fields) => fields.len(),\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/reference_safety/mod.rs", "rank": 53, "score": 282205.9542018597 }, { "content": "/// Serializes a `Bytecode` stream. Serialization of the function body.\n\nfn serialize_code(binary: &mut BinaryData, code: &[Bytecode]) -> Result<()> {\n\n serialize_bytecode_count(binary, code.len())?;\n\n for opcode in code {\n\n serialize_instruction_inner(binary, opcode)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/move-binary-format/src/serializer.rs", "rank": 54, "score": 281445.34153615846 }, { "content": "fn next_number(initial: char, mut it: impl Iterator<Item = char>) -> Result<(Token, usize)> {\n\n let mut num = String::new();\n\n num.push(initial);\n\n loop {\n\n match it.next() {\n\n Some(c) if c.is_ascii_digit() => num.push(c),\n\n Some(c) if c.is_alphanumeric() => {\n\n let mut suffix = String::new();\n\n suffix.push(c);\n\n loop {\n\n match it.next() {\n\n Some(c) if c.is_ascii_alphanumeric() => suffix.push(c),\n\n _ => {\n\n let len = num.len() + suffix.len();\n\n let tok = match suffix.as_str() {\n\n \"u8\" => Token::U8(num),\n\n \"u64\" => Token::U64(num),\n\n \"u128\" => Token::U128(num),\n\n _ => bail!(\"invalid suffix\"),\n\n };\n", "file_path": "language/move-core/types/src/parser.rs", "rank": 55, "score": 279423.7242079023 }, { "content": "pub fn verify_transactions(\n\n txn_list_with_proof: &TransactionListWithProof,\n\n expected_txns: &[Transaction],\n\n) -> Result<()> {\n\n let txns = &txn_list_with_proof.transactions;\n\n ensure!(\n\n *txns == expected_txns,\n\n \"expected txns {:?} doesn't equal to returned txns {:?}\",\n\n expected_txns,\n\n txns\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "execution/executor-test-helpers/src/integration_test_impl.rs", "rank": 56, "score": 278872.0549923391 }, { "content": "pub fn mempool_service_transactions(label: &'static str, num: usize) {\n\n MEMPOOL_SERVICE_TXNS\n\n .with_label_values(&[label])\n\n .observe(num as f64)\n\n}\n\n\n\n/// Counter for tracking latency of mempool processing requests from consensus/state sync\n\n/// A 'fail' result means the mempool's callback response to consensus/state sync failed.\n\nstatic MEMPOOL_SERVICE_LATENCY: Lazy<HistogramVec> = Lazy::new(|| {\n\n register_histogram_vec!(\n\n \"dijets_mempool_service_latency_ms\",\n\n \"Latency of mempool processing request from consensus/state sync\",\n\n &[\"type\", \"result\"]\n\n )\n\n .unwrap()\n\n});\n\n\n", "file_path": "mempool/src/counters.rs", "rank": 57, "score": 277738.46016733296 }, { "content": "fn unpublish_checked<K, V, F>(map: &mut BTreeMap<K, Option<V>>, k: K, make_err: F) -> Result<()>\n\nwhere\n\n K: Ord,\n\n F: FnOnce() -> Error,\n\n{\n\n match map.entry(k) {\n\n btree_map::Entry::Occupied(entry) => {\n\n let r = entry.into_mut();\n\n match r {\n\n Some(_) => *r = None,\n\n None => return Err(make_err()),\n\n }\n\n }\n\n btree_map::Entry::Vacant(entry) => {\n\n entry.insert(None);\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/move-core/types/src/effects.rs", "rank": 58, "score": 276246.05472991103 }, { "content": "/// Helper for a \"canonical\" verification of a module.\n\n///\n\n/// Clients that rely on verification should call the proper passes\n\n/// internally rather than using this function.\n\n///\n\n/// This function is intended to provide a verification path for clients\n\n/// that do not require full control over verification. It is advised to\n\n/// call this umbrella function instead of each individual checkers to\n\n/// minimize the code locations that need to be updated should a new checker\n\n/// is introduced.\n\npub fn verify_module(module: &CompiledModule) -> VMResult<()> {\n\n BoundsChecker::verify_module(module).map_err(|e| {\n\n // We can't point the error at the module, because if bounds-checking\n\n // failed, we cannot safely index into module's handle to itself.\n\n e.finish(Location::Undefined)\n\n })?;\n\n DuplicationChecker::verify_module(module)?;\n\n SignatureChecker::verify_module(module)?;\n\n InstructionConsistency::verify_module(module)?;\n\n constants::verify_module(module)?;\n\n friends::verify_module(module)?;\n\n ability_field_requirements::verify_module(module)?;\n\n RecursiveStructDefChecker::verify_module(module)?;\n\n InstantiationLoopChecker::verify_module(module)?;\n\n CodeUnitVerifier::verify_module(module)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 59, "score": 275096.5269981212 }, { "content": "/// Helper for a \"canonical\" verification of a script.\n\n///\n\n/// Clients that rely on verification should call the proper passes\n\n/// internally rather than using this function.\n\n///\n\n/// This function is intended to provide a verification path for clients\n\n/// that do not require full control over verification. It is advised to\n\n/// call this umbrella function instead of each individual checkers to\n\n/// minimize the code locations that need to be updated should a new checker\n\n/// is introduced.\n\npub fn verify_script(script: &CompiledScript) -> VMResult<()> {\n\n BoundsChecker::verify_script(script).map_err(|e| e.finish(Location::Script))?;\n\n DuplicationChecker::verify_script(script)?;\n\n SignatureChecker::verify_script(script)?;\n\n InstructionConsistency::verify_script(script)?;\n\n constants::verify_script(script)?;\n\n CodeUnitVerifier::verify_script(script)?;\n\n script_signature::verify_script(script)\n\n}\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 60, "score": 275096.5269981212 }, { "content": "pub fn core_mempool_index_size(label: &'static str, size: usize) {\n\n CORE_MEMPOOL_INDEX_SIZE\n\n .with_label_values(&[label])\n\n .set(size as i64)\n\n}\n\n\n\n/// Counter tracking number of txns removed from core mempool\n\npub static CORE_MEMPOOL_REMOVED_TXNS: Lazy<IntCounter> = Lazy::new(|| {\n\n register_int_counter!(\n\n \"dijets_core_mempool_removed_txns_count\",\n\n \"Number of txns removed from core mempool\"\n\n )\n\n .unwrap()\n\n});\n\n\n\n/// Counter tracking latency of txns reaching various stages in committing\n\n/// (e.g. time from txn entering core mempool to being pulled in consensus block)\n\npub static CORE_MEMPOOL_TXN_COMMIT_LATENCY: Lazy<HistogramVec> = Lazy::new(|| {\n\n register_histogram_vec!(\n\n \"dijets_core_mempool_txn_commit_latency\",\n", "file_path": "mempool/src/counters.rs", "rank": 61, "score": 273459.6263988186 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n args.args.extend(args.benchname.clone());\n\n\n\n let mut direct_args = Vec::new();\n\n if args.no_run {\n\n direct_args.push(OsString::from(\"--no-run\"));\n\n };\n\n\n\n let cmd = CargoCommand::Bench {\n\n cargo_config: xctx.config().cargo_config(),\n\n direct_args: direct_args.as_slice(),\n\n args: &args.args,\n\n env: &[],\n\n };\n\n\n\n let packages = args.package_args.to_selected_packages(&xctx)?;\n\n cmd.run_on_packages(&packages)\n\n}\n", "file_path": "devtools/x/src/bench.rs", "rank": 62, "score": 271605.09488430683 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n let config = xctx.config();\n\n\n\n let mut packages = args.package_args.to_selected_packages(&xctx)?;\n\n if args.unit {\n\n packages.add_excludes(config.system_tests().iter().map(|(p, _)| p.as_str()));\n\n }\n\n\n\n args.args.extend(args.testname.clone());\n\n\n\n let generate_coverage = args.html_cov_dir.is_some() || args.html_lcov_dir.is_some();\n\n\n\n let llvm_profile_key = \"LLVM_PROFILE_FILE\";\n\n let llvm_profile_path: &str = \"target/debug/xtest-%p-%m.profraw\";\n\n let llvm_profile_path_ignored = \"target/debug/ignored-%p-%m.profraw\";\n\n\n\n let env_vars = if generate_coverage {\n\n if !xctx\n\n .installer()\n\n .install_via_rustup_if_needed(\"llvm-tools-preview\")\n", "file_path": "devtools/x/src/test.rs", "rank": 63, "score": 271605.09488430683 }, { "content": "pub fn run(mut args: Args, xctx: XContext) -> Result<()> {\n\n let mut pass_through_args = vec![];\n\n pass_through_args.extend(args.args);\n\n\n\n // Always run fix on all targets.\n\n args.build_args.all_targets = true;\n\n\n\n let mut direct_args = vec![];\n\n args.build_args.add_args(&mut direct_args);\n\n\n\n let cmd = CargoCommand::Fix {\n\n cargo_config: xctx.config().cargo_config(),\n\n direct_args: &direct_args,\n\n args: &pass_through_args,\n\n };\n\n let packages = args.package_args.to_selected_packages(&xctx)?;\n\n cmd.run_on_packages(&packages)\n\n}\n", "file_path": "devtools/x/src/fix.rs", "rank": 64, "score": 271605.09488430683 }, { "content": "#[derive(Eq, PartialEq, Hash, Copy, Clone)]\n\nstruct Node(FunctionDefinitionIndex, TypeParameterIndex);\n\n\n", "file_path": "language/bytecode-verifier/src/instantiation_loops.rs", "rank": 65, "score": 271340.9962791954 }, { "content": "#[cfg(any(test, feature = \"fuzzing\"))]\n\npub fn random_serializable_struct() -> impl Strategy<Value = TestDijetsCrypto> {\n\n (String::arbitrary()).prop_map(TestDijetsCrypto).no_shrink()\n\n}\n", "file_path": "crypto/crypto/src/test_utils.rs", "rank": 66, "score": 270931.61342017364 }, { "content": "/// Return an ephemeral, available port. On unix systems, the port returned will be in the\n\n/// TIME_WAIT state ensuring that the OS won't hand out this port for some grace period.\n\n/// Callers should be able to bind to this port given they use SO_REUSEADDR.\n\npub fn get_available_port() -> u16 {\n\n const MAX_PORT_RETRIES: u32 = 1000;\n\n\n\n for _ in 0..MAX_PORT_RETRIES {\n\n if let Ok(port) = get_ephemeral_port() {\n\n return port;\n\n }\n\n }\n\n\n\n panic!(\"Error: could not find an available port\");\n\n}\n\n\n", "file_path": "config/src/utils.rs", "rank": 67, "score": 270882.77218882047 }, { "content": "fn verify_instr(\n\n verifier: &mut TypeSafetyChecker,\n\n bytecode: &Bytecode,\n\n offset: CodeOffset,\n\n) -> PartialVMResult<()> {\n\n match bytecode {\n\n Bytecode::Pop => {\n\n let operand = verifier.stack.pop().unwrap();\n\n let abilities = verifier\n\n .resolver\n\n .abilities(&operand, verifier.function_view.type_parameters());\n\n if !abilities?.has_drop() {\n\n return Err(verifier.error(StatusCode::POP_WITHOUT_DROP_ABILITY, offset));\n\n }\n\n }\n\n\n\n Bytecode::BrTrue(_) | Bytecode::BrFalse(_) => {\n\n let operand = verifier.stack.pop().unwrap();\n\n if operand != ST::Bool {\n\n return Err(verifier.error(StatusCode::BR_TYPE_MISMATCH_ERROR, offset));\n", "file_path": "language/bytecode-verifier/src/type_safety.rs", "rank": 68, "score": 270566.30778282793 }, { "content": "pub fn verify_committed_txn_status(\n\n txn_with_proof: Option<&TransactionWithProof>,\n\n expected_txn: &Transaction,\n\n) -> Result<()> {\n\n let txn = &txn_with_proof\n\n .ok_or_else(|| anyhow!(\"Transaction is not committed.\"))?\n\n .transaction;\n\n\n\n ensure!(\n\n expected_txn == txn,\n\n \"The two transactions do not match. Expected txn: {:?}, returned txn: {:?}\",\n\n expected_txn,\n\n txn,\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "execution/executor-test-helpers/src/integration_test_impl.rs", "rank": 69, "score": 270135.55061891844 }, { "content": "fn parse_return_type(tokens: &mut Lexer) -> Result<Vec<Type>, ParseError<Loc, anyhow::Error>> {\n\n consume_token(tokens, Tok::Colon)?;\n\n let t = parse_type(tokens)?;\n\n let mut v = vec![t];\n\n while tokens.peek() == Tok::Star {\n\n tokens.advance()?;\n\n v.push(parse_type(tokens)?);\n\n }\n\n Ok(v)\n\n}\n\n\n\n// AcquireList: Vec<StructName> = {\n\n// \"acquires\" <s: StructName> <al: (\",\" <StructName>)*> => { ... }\n\n// }\n\n\n", "file_path": "language/compiler/ir-to-bytecode/syntax/src/syntax.rs", "rank": 70, "score": 270074.60727989 }, { "content": "fn verify_module_impl(module: &CompiledModule) -> PartialVMResult<()> {\n\n for (idx, constant) in module.constant_pool().iter().enumerate() {\n\n verify_constant(idx, constant)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 71, "score": 269789.6782927769 }, { "content": "fn verify_module_impl(module: &CompiledModule) -> PartialVMResult<()> {\n\n // cannot make friends with the module itself\n\n let self_handle = module.self_handle();\n\n if module.friend_decls().contains(self_handle) {\n\n return Err(PartialVMError::new(\n\n StatusCode::INVALID_FRIEND_DECL_WITH_SELF,\n\n ));\n\n }\n\n\n\n // cannot make friends with modules outside of the account address\n\n //\n\n // NOTE: this constraint is a policy decision rather than a technical requirement. The VM and\n\n // other bytecode verifier passes do not rely on the assumption that friend modules must be\n\n // declared within the same account address.\n\n //\n\n // However, lacking a definite use case of friending modules across account boundaries, and also\n\n // to minimize the associated changes on the module publishing flow, we temporarily enforce this\n\n // constraint and we may consider lifting this limitation in the future.\n\n let self_address =\n\n module.address_identifier_at(module.module_handle_at(module.self_handle_idx()).address);\n", "file_path": "language/bytecode-verifier/src/friends.rs", "rank": 72, "score": 269789.6782927769 }, { "content": "fn verify_script_impl(script: &CompiledScript) -> PartialVMResult<()> {\n\n for (idx, constant) in script.constant_pool.iter().enumerate() {\n\n verify_constant(idx, constant)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 73, "score": 269789.6782927769 }, { "content": "enum BoundsCheckingContext {\n\n Module,\n\n ModuleFunction(FunctionDefinitionIndex),\n\n Script,\n\n}\n\npub struct BoundsChecker<'a> {\n\n view: BinaryIndexedView<'a>,\n\n context: BoundsCheckingContext,\n\n}\n\n\n\nimpl<'a> BoundsChecker<'a> {\n\n pub fn verify_script(script: &'a CompiledScript) -> PartialVMResult<()> {\n\n let mut bounds_check = Self {\n\n view: BinaryIndexedView::Script(script),\n\n context: BoundsCheckingContext::Script,\n\n };\n\n bounds_check.verify_impl()?;\n\n\n\n let signatures = &script.signatures;\n\n let parameters = &script.parameters;\n", "file_path": "language/move-binary-format/src/check_bounds.rs", "rank": 74, "score": 269702.1711371343 }, { "content": "pub fn remap_set<T: Copy + Ord>(set: &mut BTreeSet<T>, id_map: &BTreeMap<T, T>) {\n\n for (old, new) in id_map {\n\n if set.remove(old) {\n\n set.insert(*new);\n\n }\n\n }\n\n}\n", "file_path": "language/borrow-graph/src/shared.rs", "rank": 75, "score": 268971.9364985776 }, { "content": "fn publish_checked<K, V, F>(map: &mut BTreeMap<K, Option<V>>, k: K, v: V, make_err: F) -> Result<()>\n\nwhere\n\n K: Ord,\n\n F: FnOnce() -> Error,\n\n{\n\n match map.entry(k) {\n\n btree_map::Entry::Occupied(entry) => {\n\n let r = entry.into_mut();\n\n match r {\n\n Some(_) => return Err(make_err()),\n\n None => *r = Some(v),\n\n }\n\n }\n\n btree_map::Entry::Vacant(entry) => {\n\n entry.insert(Some(v));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/move-core/types/src/effects.rs", "rank": 76, "score": 268233.9967028187 }, { "content": "#[test]\n\nfn invalid_type_param_in_fn_return_() {\n\n use SignatureToken::*;\n\n\n\n let mut m = basic_test_module();\n\n m.function_handles[0].return_ = SignatureIndex(1);\n\n m.signatures.push(Signature(vec![TypeParameter(0)]));\n\n assert_eq!(m.signatures.len(), 2);\n\n assert_eq!(\n\n BoundsChecker::verify_module(&m).unwrap_err().major_status(),\n\n StatusCode::INDEX_OUT_OF_BOUNDS\n\n );\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/bounds_tests.rs", "rank": 77, "score": 267797.2112916246 }, { "content": "pub fn verify_script(module: &CompiledScript) -> VMResult<()> {\n\n verify_script_impl(module).map_err(|e| e.finish(Location::Script))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 78, "score": 267586.82166181377 }, { "content": "pub fn verify_module(module: &CompiledModule) -> VMResult<()> {\n\n verify_module_impl(module).map_err(|e| e.finish(Location::Module(module.self_id())))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 79, "score": 267586.82166181377 }, { "content": "pub fn verify_module(module: &CompiledModule) -> VMResult<()> {\n\n verify_module_impl(module).map_err(|e| e.finish(Location::Module(module.self_id())))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/friends.rs", "rank": 80, "score": 267586.82166181377 }, { "content": "fn verify_module_impl<'a>(\n\n module: &CompiledModule,\n\n dependencies: impl IntoIterator<Item = &'a CompiledModule>,\n\n) -> PartialVMResult<()> {\n\n let context = &Context::module(module, dependencies);\n\n\n\n verify_imported_modules(context)?;\n\n verify_imported_structs(context)?;\n\n verify_imported_functions(context)?;\n\n verify_all_script_visibility_usage(context)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 81, "score": 266643.62884431 }, { "content": "/// Serializes a `CodeUnit`.\n\n///\n\n/// A `CodeUnit` is serialized as the code field of a `FunctionDefinition`.\n\n/// A `CodeUnit` gets serialized as follows:\n\n/// - `CodeUnit.max_stack_size` as a ULEB128\n\n/// - `CodeUnit.locals` as a ULEB128 (index into the `LocalSignaturePool`)\n\n/// - `CodeUnit.code` as variable size byte stream for the bytecode\n\nfn serialize_code_unit(binary: &mut BinaryData, code: &CodeUnit) -> Result<()> {\n\n serialize_signature_index(binary, &code.locals)?;\n\n serialize_code(binary, &code.code)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/serializer.rs", "rank": 82, "score": 266063.7757268619 }, { "content": "fn load_field_offset(cursor: &mut VersionedCursor) -> BinaryLoaderResult<u16> {\n\n read_uleb_internal(cursor, FIELD_OFFSET_MAX)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/deserializer.rs", "rank": 83, "score": 264469.4727053625 }, { "content": "fn load_bytecode_index(cursor: &mut VersionedCursor) -> BinaryLoaderResult<u16> {\n\n read_uleb_internal(cursor, BYTECODE_INDEX_MAX)\n\n}\n\n\n", "file_path": "language/move-binary-format/src/deserializer.rs", "rank": 84, "score": 264114.7144617449 }, { "content": "/// This function checks the extra requirements on the signature of the main function of a script.\n\npub fn verify_script(script: &CompiledScript) -> VMResult<()> {\n\n let resolver = &BinaryIndexedView::Script(script);\n\n let parameters = script.parameters;\n\n let return_type_opt = None;\n\n verify_main_signature_impl(resolver, parameters, return_type_opt)\n\n .map_err(|e| e.finish(Location::Script))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/script_signature.rs", "rank": 85, "score": 263640.74467401125 }, { "content": "fn verify_module_impl(module: &CompiledModule) -> PartialVMResult<()> {\n\n let view = BinaryIndexedView::Module(module);\n\n for (idx, struct_def) in module.struct_defs().iter().enumerate() {\n\n let sh = module.struct_handle_at(struct_def.struct_handle);\n\n let fields = match &struct_def.field_information {\n\n StructFieldInformation::Native => continue,\n\n StructFieldInformation::Declared(fields) => fields,\n\n };\n\n let required_abilities = sh\n\n .abilities\n\n .into_iter()\n\n .map(|a| a.requires())\n\n .fold(AbilitySet::EMPTY, |acc, required| acc | required);\n\n // Assume type parameters have all abilities, as the struct's abilities will be dependent on\n\n // them\n\n let type_parameter_abilities = sh\n\n .type_parameters\n\n .iter()\n\n .map(|_| AbilitySet::ALL)\n\n .collect::<Vec<_>>();\n", "file_path": "language/bytecode-verifier/src/ability_field_requirements.rs", "rank": 86, "score": 262320.6612866536 }, { "content": "fn malformed(type_: SignatureToken, data: Vec<u8>) {\n\n error(type_, data, StatusCode::MALFORMED_CONSTANT_DATA)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/constants_tests.rs", "rank": 87, "score": 261846.4811938764 }, { "content": "/// Fuzzing the handshake protocol, which negotiates protocols supported by both\n\n/// the client and the server.\n\n/// At the moment, fuzzing the client or the server leads to the same logic.\n\npub fn fuzz_network_handshake_protocol_exchange(self_handshake: &HandshakeMsg, data: Vec<u8>) {\n\n // fake socket to read the other peer's serialized HandshakeMsg from\n\n let mut fake_socket = ReadOnlyTestSocketVec::new(data);\n\n fake_socket.set_trailing();\n\n\n\n // fuzz the network exchange of HandshakeMsg first\n\n let _ = block_on(async move {\n\n if let Ok(remote_handshake_msg) = exchange_handshake(self_handshake, &mut fake_socket).await\n\n {\n\n // then perform the negotiation\n\n let _ = self_handshake.perform_handshake(&remote_handshake_msg);\n\n }\n\n });\n\n}\n\n\n", "file_path": "network/src/fuzzing.rs", "rank": 88, "score": 261541.1271727428 }, { "content": "fn compare_structs(\n\n context: &Context,\n\n idx1: StructHandleIndex,\n\n idx2: StructHandleIndex,\n\n def_module: &CompiledModule,\n\n) -> PartialVMResult<()> {\n\n // grab ModuleId and struct name for the module being verified\n\n let struct_handle = context.resolver.struct_handle_at(idx1);\n\n let module_handle = context.resolver.module_handle_at(struct_handle.module);\n\n let module_id = context.resolver.module_id_for_handle(module_handle);\n\n let struct_name = context.resolver.identifier_at(struct_handle.name);\n\n\n\n // grab ModuleId and struct name for the definition\n\n let def_struct_handle = def_module.struct_handle_at(idx2);\n\n let def_module_handle = def_module.module_handle_at(def_struct_handle.module);\n\n let def_module_id = def_module.module_id_for_handle(def_module_handle);\n\n let def_struct_name = def_module.identifier_at(def_struct_handle.name);\n\n\n\n if module_id != def_module_id || struct_name != def_struct_name {\n\n Err(PartialVMError::new(StatusCode::TYPE_MISMATCH))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 89, "score": 261497.7455175373 }, { "content": "fn verify_main_signature_impl(\n\n resolver: &BinaryIndexedView,\n\n parameters: SignatureIndex,\n\n return_type_opt: Option<SignatureIndex>,\n\n) -> PartialVMResult<()> {\n\n use SignatureToken as S;\n\n let arguments = &resolver.signature_at(parameters).0;\n\n // Check that all `signer` arguments occur before non-`signer` arguments\n\n // signer is a type that can only be populated by the Move VM. And its value is filled\n\n // based on the sender of the transaction\n\n let all_args_have_valid_type = if resolver.version() <= VERSION_1 {\n\n arguments\n\n .iter()\n\n .skip_while(|typ| matches!(typ, S::Reference(inner) if matches!(&**inner, S::Signer)))\n\n .all(|typ| typ.is_valid_for_constant())\n\n } else {\n\n arguments\n\n .iter()\n\n .skip_while(|typ| matches!(typ, S::Signer))\n\n .all(|typ| typ.is_valid_for_constant())\n", "file_path": "language/bytecode-verifier/src/script_signature.rs", "rank": 90, "score": 260985.0552993516 }, { "content": "fn decode_and_validate_checksum(mut buf: Vec<u8>) -> io::Result<(Vec<u8>, HashValue)> {\n\n let offset = buf\n\n .len()\n\n .checked_sub(HashValue::LENGTH)\n\n .ok_or_else(|| invalid_data(\"state file: empty or too small\"))?;\n\n let file_hash = HashValue::from_slice(&buf[offset..]).expect(\"cannot fail\");\n\n\n\n buf.truncate(offset);\n\n let computed_hash = TrustedStateHasher::hash_all(&buf);\n\n\n\n if file_hash != computed_hash {\n\n Err(invalid_data(format!(\n\n \"state file: corrupt: file checksum ({:x}) != computed checksum ({:x})\",\n\n file_hash, computed_hash\n\n )))\n\n } else {\n\n Ok((buf, file_hash))\n\n }\n\n}\n\n\n", "file_path": "sdk/client/src/verifying_client/file_state_store.rs", "rank": 91, "score": 260642.6683028022 }, { "content": "fn verify_constant(idx: usize, constant: &Constant) -> PartialVMResult<()> {\n\n verify_constant_type(idx, &constant.type_)?;\n\n verify_constant_data(idx, constant)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 92, "score": 260597.77635291923 }, { "content": "pub fn verify_module(module: &CompiledModule) -> VMResult<()> {\n\n verify_module_impl(module).map_err(|e| e.finish(Location::Module(module.self_id())))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/ability_field_requirements.rs", "rank": 93, "score": 259832.74854174434 }, { "content": "#[test]\n\nfn invalid_struct_in_field() {\n\n use SignatureToken::*;\n\n\n\n let mut m = basic_test_module();\n\n match &mut m.struct_defs[0].field_information {\n\n StructFieldInformation::Declared(ref mut fields) => {\n\n fields[0].signature.0 = Struct(StructHandleIndex::new(3));\n\n assert_eq!(\n\n BoundsChecker::verify_module(&m).unwrap_err().major_status(),\n\n StatusCode::INDEX_OUT_OF_BOUNDS\n\n );\n\n }\n\n _ => panic!(\"attempt to change a field that does not exist\"),\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/bounds_tests.rs", "rank": 94, "score": 259488.51329185622 }, { "content": "fn invalid_type(type_: SignatureToken, data: Vec<u8>) {\n\n error(type_, data, StatusCode::INVALID_CONSTANT_TYPE)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/constants_tests.rs", "rank": 95, "score": 258622.715296156 }, { "content": "fn verify_constant_data(idx: usize, constant: &Constant) -> PartialVMResult<()> {\n\n match constant.deserialize_constant() {\n\n Some(_) => Ok(()),\n\n None => Err(verification_error(\n\n StatusCode::MALFORMED_CONSTANT_DATA,\n\n IndexKind::ConstantPool,\n\n idx as TableIndex,\n\n )),\n\n }\n\n}\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 96, "score": 256791.84270123724 }, { "content": "fn compile_constant(_context: &mut Context, ty: Type, value: MoveValue) -> Result<Constant> {\n\n fn type_layout(ty: Type) -> Result<MoveTypeLayout> {\n\n Ok(match ty {\n\n Type::Address => MoveTypeLayout::Address,\n\n Type::Signer => MoveTypeLayout::Signer,\n\n Type::U8 => MoveTypeLayout::U8,\n\n Type::U64 => MoveTypeLayout::U64,\n\n Type::U128 => MoveTypeLayout::U128,\n\n Type::Bool => MoveTypeLayout::Bool,\n\n Type::Vector(inner_type) => MoveTypeLayout::Vector(Box::new(type_layout(*inner_type)?)),\n\n Type::Reference(_, _) => bail!(\"References are not supported in constant type layouts\"),\n\n Type::TypeParameter(_) => {\n\n bail!(\"Type parameters are not supported in constant type layouts\")\n\n }\n\n Type::Struct(_ident, _tys) => {\n\n bail!(\"TODO Structs are not *yet* supported in constant type layouts\")\n\n }\n\n })\n\n }\n\n\n\n Constant::serialize_constant(&type_layout(ty)?, &value)\n\n .ok_or_else(|| format_err!(\"Could not serialize constant\"))\n\n}\n\n\n\n//**************************************************************************************************\n\n// Bytecode\n\n//**************************************************************************************************\n\n\n", "file_path": "language/compiler/ir-to-bytecode/src/compiler.rs", "rank": 97, "score": 256575.21875357028 }, { "content": "/// Creates a new thread with a larger stack size.\n\n///\n\n/// Generating some proptest values can overflow the stack. This allows test authors to work around\n\n/// this limitation.\n\n///\n\n/// This is expected to be used with closure-style proptest invocations:\n\n///\n\n/// ```\n\n/// use proptest::prelude::*;\n\n/// use dijets_proptest_helpers::with_stack_size;\n\n///\n\n/// with_stack_size(4 * 1024 * 1024, || proptest!(|(x in 0usize..128)| {\n\n/// // assertions go here\n\n/// prop_assert!(x >= 0 && x < 128);\n\n/// }));\n\n/// ```\n\npub fn with_stack_size<'a, F, T>(size: usize, f: F) -> Result<T, Box<dyn Any + 'static + Send>>\n\nwhere\n\n F: FnOnce() -> T + Send + 'a,\n\n T: Send + 'a,\n\n{\n\n thread::scope(|s| {\n\n let handle = s.builder().stack_size(size).spawn(|_| f()).map_err(|err| {\n\n let any: Box<dyn Any + 'static + Send> = Box::new(err);\n\n any\n\n })?;\n\n handle.join()\n\n })?\n\n}\n\n\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 98, "score": 256441.7917010455 }, { "content": "// The local view must be a subset of (or equal to) the defined set of abilities. Conceptually, the\n\n// local view can be more constrained than the defined one. Removing abilities locally does nothing\n\n// but limit the local usage.\n\n// (Note this works because there are no negative constraints, i.e. you cannot constrain a type\n\n// parameter with the absence of an ability)\n\nfn compatible_struct_abilities(\n\n local_struct_abilities_declaration: AbilitySet,\n\n defined_struct_abilities: AbilitySet,\n\n) -> bool {\n\n local_struct_abilities_declaration.is_subset(defined_struct_abilities)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/dependencies.rs", "rank": 99, "score": 256122.58634699642 } ]
Rust
src/systems/collision.rs
Jazarro/evoli
fe817899c4381bc98b294ae124f12301a711c664
use amethyst::renderer::{debug_drawing::DebugLinesComponent, palette::Srgba}; use amethyst::shrev::{EventChannel, ReaderId}; use amethyst::{core::math::Point3, core::Transform, ecs::prelude::*}; use log::info; use std::f32; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; use crate::components::collider; use crate::components::creatures; use crate::resources::world_bounds::*; pub struct EnforceBoundsSystem; impl<'s> System<'s> for EnforceBoundsSystem { type SystemData = ( WriteStorage<'s, Transform>, ReadStorage<'s, creatures::CreatureTag>, ReadExpect<'s, WorldBounds>, ); fn run(&mut self, (mut locals, tags, bounds): Self::SystemData) { for (local, _) in (&mut locals, &tags).join() { let pos = local.translation().clone(); if pos.x > bounds.right { local.translation_mut().x = bounds.right; } else if pos.x < bounds.left { local.translation_mut().x = bounds.left; } if pos.y > bounds.top { local.translation_mut().y = bounds.top; } else if pos.y < bounds.bottom { local.translation_mut().y = bounds.bottom; } } } } #[derive(Debug, Clone)] pub struct CollisionEvent { pub entity_a: Entity, pub entity_b: Entity, } impl CollisionEvent { pub fn new(entity_a: Entity, entity_b: Entity) -> CollisionEvent { CollisionEvent { entity_a, entity_b } } } pub struct CollisionSystem; impl<'s> System<'s> for CollisionSystem { type SystemData = ( ReadStorage<'s, collider::Circle>, WriteStorage<'s, creatures::Movement>, WriteStorage<'s, Transform>, Entities<'s>, Write<'s, EventChannel<CollisionEvent>>, ); fn run( &mut self, (circles, mut movements, locals, entities, mut collision_events): Self::SystemData, ) { #[cfg(feature = "profiler")] profile_scope!("collision_system"); for (circle_a, movement, local_a, entity_a) in (&circles, &mut movements, &locals, &entities).join() { for (circle_b, local_b, entity_b) in (&circles, &locals, &entities).join() { if entity_a == entity_b { continue; } let allowed_distance = circle_a.radius + circle_b.radius; let direction = local_a.translation() - local_b.translation(); if direction.magnitude_squared() < allowed_distance * allowed_distance { collision_events.single_write(CollisionEvent::new(entity_a, entity_b)); if direction.magnitude() < f32::EPSILON { movement.velocity = -movement.velocity; } else { let norm_direction = direction.normalize(); movement.velocity = norm_direction * movement.velocity.magnitude(); } } } } } } pub struct DebugColliderSystem; impl<'s> System<'s> for DebugColliderSystem { type SystemData = ( ReadStorage<'s, collider::Circle>, ReadStorage<'s, Transform>, WriteStorage<'s, DebugLinesComponent>, ); fn run(&mut self, (circles, locals, mut debug_lines_comps): Self::SystemData) { for (circle, local, db_comp) in (&circles, &locals, &mut debug_lines_comps).join() { let mut position = local.global_matrix().column(3).xyz(); position[2] += 1.0; db_comp.add_circle_2d( Point3::from(position), circle.radius, 16, Srgba::new(1.0, 0.5, 0.5, 1.0), ); } } } #[derive(Default)] pub struct DebugCollisionEventSystem { event_reader: Option<ReaderId<CollisionEvent>>, } impl<'s> System<'s> for DebugCollisionEventSystem { type SystemData = (Write<'s, EventChannel<CollisionEvent>>,); fn run(&mut self, (collision_events,): Self::SystemData) { let event_reader = self .event_reader .as_mut() .expect("`DebugCollisionEventSystem::setup` was not called before `DebugCollisionEventSystem::run`"); for event in collision_events.read(event_reader) { info!("Received collision event {:?}", event) } } fn setup(&mut self, world: &mut World) { <Self as System<'_>>::SystemData::setup(world); self.event_reader = Some( world .fetch_mut::<EventChannel<CollisionEvent>>() .register_reader(), ); } }
use amethyst::renderer::{debug_drawing::DebugLinesComponent, palette::Srgba}; use amethyst::shrev::{EventChannel, ReaderId}; use amethyst::{core::math::Point3, core::Transform, ecs::prelude::*}; use log::info; use std::f32; #[cfg(feature = "profiler")] use thread_profiler::profile_scope; use crate::components::collider; use crate::components::creatures; use crate::resources::world_bounds::*; pub struct EnforceBoundsSystem; impl<'s> System<'s> for EnforceBoundsSystem { type SystemData = ( WriteStorage<'s, Transform>, ReadStorage<'s, creatures::CreatureTag>, ReadExpect<'s, WorldBounds>, ); fn run(&mut self, (mut locals, tags, bounds): Self::SystemData) { for (local, _) in (&mut locals, &tags).join() { let pos = loc
if pos.y > bounds.top { local.translation_mut().y = bounds.top; } else if pos.y < bounds.bottom { local.translation_mut().y = bounds.bottom; } } } } #[derive(Debug, Clone)] pub struct CollisionEvent { pub entity_a: Entity, pub entity_b: Entity, } impl CollisionEvent { pub fn new(entity_a: Entity, entity_b: Entity) -> CollisionEvent { CollisionEvent { entity_a, entity_b } } } pub struct CollisionSystem; impl<'s> System<'s> for CollisionSystem { type SystemData = ( ReadStorage<'s, collider::Circle>, WriteStorage<'s, creatures::Movement>, WriteStorage<'s, Transform>, Entities<'s>, Write<'s, EventChannel<CollisionEvent>>, ); fn run( &mut self, (circles, mut movements, locals, entities, mut collision_events): Self::SystemData, ) { #[cfg(feature = "profiler")] profile_scope!("collision_system"); for (circle_a, movement, local_a, entity_a) in (&circles, &mut movements, &locals, &entities).join() { for (circle_b, local_b, entity_b) in (&circles, &locals, &entities).join() { if entity_a == entity_b { continue; } let allowed_distance = circle_a.radius + circle_b.radius; let direction = local_a.translation() - local_b.translation(); if direction.magnitude_squared() < allowed_distance * allowed_distance { collision_events.single_write(CollisionEvent::new(entity_a, entity_b)); if direction.magnitude() < f32::EPSILON { movement.velocity = -movement.velocity; } else { let norm_direction = direction.normalize(); movement.velocity = norm_direction * movement.velocity.magnitude(); } } } } } } pub struct DebugColliderSystem; impl<'s> System<'s> for DebugColliderSystem { type SystemData = ( ReadStorage<'s, collider::Circle>, ReadStorage<'s, Transform>, WriteStorage<'s, DebugLinesComponent>, ); fn run(&mut self, (circles, locals, mut debug_lines_comps): Self::SystemData) { for (circle, local, db_comp) in (&circles, &locals, &mut debug_lines_comps).join() { let mut position = local.global_matrix().column(3).xyz(); position[2] += 1.0; db_comp.add_circle_2d( Point3::from(position), circle.radius, 16, Srgba::new(1.0, 0.5, 0.5, 1.0), ); } } } #[derive(Default)] pub struct DebugCollisionEventSystem { event_reader: Option<ReaderId<CollisionEvent>>, } impl<'s> System<'s> for DebugCollisionEventSystem { type SystemData = (Write<'s, EventChannel<CollisionEvent>>,); fn run(&mut self, (collision_events,): Self::SystemData) { let event_reader = self .event_reader .as_mut() .expect("`DebugCollisionEventSystem::setup` was not called before `DebugCollisionEventSystem::run`"); for event in collision_events.read(event_reader) { info!("Received collision event {:?}", event) } } fn setup(&mut self, world: &mut World) { <Self as System<'_>>::SystemData::setup(world); self.event_reader = Some( world .fetch_mut::<EventChannel<CollisionEvent>>() .register_reader(), ); } }
al.translation().clone(); if pos.x > bounds.right { local.translation_mut().x = bounds.right; } else if pos.x < bounds.left { local.translation_mut().x = bounds.left; }
function_block-random_span
[ { "content": "// Once the prefabs are loaded, this function is called to update the ekeys in the CreaturePrefabs struct.\n\n// We use the Named component of the entity to determine which key to use.\n\npub fn update_prefabs(world: &mut World) {\n\n let updated_prefabs = {\n\n let creature_prefabs = world.read_resource::<CreaturePrefabs>();\n\n let prefabs = creature_prefabs.get_prefabs();\n\n let mut prefab_resource =\n\n world.write_resource::<AssetStorage<Prefab<CreaturePrefabData>>>();\n\n let mut new_prefabs = HashMap::new();\n\n for (_key, handle) in prefabs.iter() {\n\n if let Some(prefab) = prefab_resource.get_mut(handle) {\n\n if let Some(prefab_data) = prefab.entity(0) {\n\n let name = prefab_data\n\n .data()\n\n .unwrap()\n\n .name\n\n .as_ref()\n\n .unwrap()\n\n .name\n\n .to_string();\n\n new_prefabs.insert(name, handle.clone());\n\n }\n\n }\n\n }\n\n new_prefabs\n\n };\n\n world\n\n .write_resource::<CreaturePrefabs>()\n\n .set_prefabs(updated_prefabs);\n\n}\n", "file_path": "src/resources/prefabs.rs", "rank": 0, "score": 96507.07259942414 }, { "content": "// Initialise audio in the world. This sets up the background music\n\npub fn initialise_audio(world: &mut World) {\n\n init_output(world);\n\n let music = {\n\n let loader = world.read_resource::<Loader>();\n\n let mut sink = world.write_resource::<AudioSink>();\n\n sink.set_volume(0.25);\n\n\n\n let music = BACKGROUND_MUSIC\n\n .iter()\n\n .map(|file| load_audio_track(&loader, &world, &file))\n\n .collect::<Vec<_>>()\n\n .into_iter()\n\n .cycle();\n\n\n\n Music { music }\n\n };\n\n\n\n // Add sounds to the world\n\n world.insert(music);\n\n}\n", "file_path": "src/resources/audio.rs", "rank": 1, "score": 96500.09225986633 }, { "content": "// The factions are stored inside the Ron file in a sorted way. They can only define\n\n// factions as prey that are on top of their definition. For example, 'Plants' cannot define 'Herbivores' as their prey\n\n// because 'Herbivores' is defined after 'Plants'.\n\npub fn load_factions(world: &mut World) {\n\n let prefab_handle = world.exec(|loader: PrefabLoader<'_, FactionPrefabData>| {\n\n loader.load(\"prefabs/factions.ron\", RonFormat, ())\n\n });\n\n\n\n world.create_entity().with(prefab_handle.clone()).build();\n\n}\n\n\n\n#[derive(Default, Debug, Clone, Deserialize, Serialize, PrefabData)]\n\n#[serde(default)]\n\n#[serde(deny_unknown_fields)]\n\npub struct CombatPrefabData {\n\n health: Option<Health>,\n\n speed: Option<Speed>,\n\n damage: Option<Damage>,\n\n has_faction: Option<HasFaction<String>>,\n\n}\n", "file_path": "src/components/combat.rs", "rank": 2, "score": 96500.09225986633 }, { "content": "struct CreatureTypeDistribution {\n\n creature_type: CreatureType,\n\n}\n\n\n\nimpl Distribution<CreatureTypeDistribution> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> CreatureTypeDistribution {\n\n match rng.gen_range(0, 3) {\n\n 0 => CreatureTypeDistribution {\n\n creature_type: \"Herbivore\".to_string(),\n\n },\n\n 1 => CreatureTypeDistribution {\n\n creature_type: \"Carnivore\".to_string(),\n\n },\n\n _ => CreatureTypeDistribution {\n\n creature_type: \"Plant\".to_string(),\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/systems/spawner.rs", "rank": 3, "score": 91608.77982398227 }, { "content": "// Here we load all prefabs for the different creatures in the game.\n\n// These prefabs are then stored in a resource of type CreaturePrefabs that is used by the spawner system.\n\n// At initialization time, we put temporary keys for the prefabs since they're not loaded yet.\n\n// When their loading is finished, we read the name of the entity inside to change the keys. This is done in the update_prefabs function.\n\npub fn initialize_prefabs(world: &mut World) -> ProgressCounter {\n\n let mut progress_counter = ProgressCounter::new();\n\n // load ui prefabs\n\n {\n\n let mut ui_prefab_registry = UiPrefabRegistry::default();\n\n let prefab_dir_path = application_root_dir()\n\n .unwrap()\n\n .into_os_string()\n\n .into_string()\n\n .unwrap()\n\n + \"/resources/prefabs/ui\";\n\n let prefab_iter = read_dir(prefab_dir_path).unwrap();\n\n ui_prefab_registry.prefabs = prefab_iter\n\n .map(|prefab_dir_entry| {\n\n world.exec(|loader: UiLoader<'_>| {\n\n loader.load(\n\n make_name(\"prefabs/ui/\", &prefab_dir_entry.unwrap()),\n\n &mut progress_counter,\n\n )\n\n })\n", "file_path": "src/resources/prefabs.rs", "rank": 4, "score": 89393.43248892286 }, { "content": "// delete the specified root entity and all of its descendents as specified\n\n// by the Parent component and maintained by the ParentHierarchy resource\n\npub fn delete_hierarchy(root: Entity, world: &mut World) -> Result<(), WrongGeneration> {\n\n let entities = {\n\n iter::once(root)\n\n .chain(\n\n world\n\n .read_resource::<ParentHierarchy>()\n\n .all_children_iter(root),\n\n )\n\n .collect::<Vec<Entity>>()\n\n };\n\n world.delete_entities(&entities)\n\n}\n", "file_path": "src/utils/hierarchy_util.rs", "rank": 5, "score": 76585.62485540676 }, { "content": "/// Determine the closest bounding wall based on a location\n\nfn closest_wall(location: &Vector3<f32>, bounds: &WorldBounds) -> Vector3<f32> {\n\n let mut bounds_left = location.clone();\n\n bounds_left.x = bounds.left.into();\n\n let mut bounds_right = location.clone();\n\n bounds_right.x = bounds.right.into();\n\n let mut bounds_top = location.clone();\n\n bounds_top.y = bounds.top.into();\n\n let mut bounds_bottom = location.clone();\n\n bounds_bottom.y = bounds.bottom.into();\n\n\n\n // Iterates through each bound\n\n [bounds_left, bounds_right, bounds_top, bounds_bottom]\n\n .iter()\n\n // Calculates the distance between the wall & the location\n\n .map(|v| v - location)\n\n // Returns the minimum distance\n\n .min_by(|a, b| {\n\n if a.magnitude_squared() < b.magnitude_squared() {\n\n Ordering::Less\n\n } else {\n", "file_path": "src/systems/behaviors/obstacle.rs", "rank": 6, "score": 69221.59737268323 }, { "content": "#[derive(Default)]\n\nstruct Button {\n\n info: &'static ButtonInfo,\n\n entity: Option<Entity>,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct MainGameUiSystem {\n\n ui_reader_id: Option<ReaderId<UiEvent>>,\n\n input_reader_id: Option<ReaderId<InputEvent<StringBindings>>>,\n\n buttons: Vec<Button>,\n\n pause_button_text: Option<Entity>,\n\n}\n\n\n", "file_path": "src/systems/main_game_ui.rs", "rank": 7, "score": 65512.18407710262 }, { "content": "use crate::resources::world_bounds::WorldBounds;\n\nuse amethyst::{core::transform::components::Transform, ecs::*};\n\n\n\nuse crate::components::creatures::DespawnWhenOutOfBoundsTag;\n\n\n\n/// Deletes any entity tagged with DespawnWhenOutOfBoundsTag if they are detected to be outside\n\n/// the world bounds.\n\n#[derive(Default)]\n\npub struct OutOfBoundsDespawnSystem;\n\n\n\nimpl<'s> System<'s> for OutOfBoundsDespawnSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, DespawnWhenOutOfBoundsTag>,\n\n ReadExpect<'s, WorldBounds>,\n\n );\n\n\n\n fn run(&mut self, (entities, locals, tags, bounds): Self::SystemData) {\n\n for (entity, local, _) in (&*entities, &locals, &tags).join() {\n", "file_path": "src/systems/experimental/out_of_bounds.rs", "rank": 8, "score": 54009.00152448413 }, { "content": " let pos = local.translation();\n\n if pos.x > bounds.right\n\n || pos.x < bounds.left\n\n || pos.y > bounds.top\n\n || pos.y < bounds.bottom\n\n {\n\n let _ = entities.delete(entity);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/experimental/out_of_bounds.rs", "rank": 9, "score": 53988.98549993084 }, { "content": "// implementation-specific, hidden way of producing the name of a button's child text widget, given the button name\n\nfn make_ui_text_name(button_name: &str) -> String {\n\n format!(\"{}_btn_txt\", button_name)\n\n}\n\n\n\nimpl<'s> MainGameUiSystem {\n\n fn find_ui_elements(&mut self, finder: &UiFinder) {\n\n if self.buttons.is_empty() {\n\n self.buttons = BUTTON_INFOS\n\n .iter()\n\n .map(|info| Button {\n\n info,\n\n entity: finder.find(info.name),\n\n })\n\n .collect::<Vec<Button>>();\n\n self.pause_button_text = finder.find(&make_ui_text_name(PAUSE_BUTTON.name));\n\n }\n\n }\n\n\n\n // translate ui button clicks into input actions for registered buttons\n\n fn translate_click(\n", "file_path": "src/systems/main_game_ui.rs", "rank": 10, "score": 44220.97872154714 }, { "content": "fn calc_wind_speed(input_signum: Option<f32>, wind: &Wind, time: &Time) -> f32 {\n\n let magnitude = wind.wind.magnitude();\n\n if let Some(signum) = input_signum {\n\n (magnitude + signum * WIND_ACCELERATION * time.delta_seconds())\n\n .max(MIN_WIND_SPEED)\n\n .min(MAX_WIND_SPEED)\n\n } else {\n\n magnitude\n\n }\n\n}\n", "file_path": "src/systems/experimental/wind_control.rs", "rank": 11, "score": 37554.44060026926 }, { "content": "fn calc_wind_angle(input_signum: Option<f32>, wind: &Wind, time: &Time) -> f32 {\n\n let old_wind_angle = wind.wind.y.atan2(wind.wind.x);\n\n if let Some(signum) = input_signum {\n\n old_wind_angle + signum * WIND_TURN_SPEED * time.delta_seconds()\n\n } else {\n\n old_wind_angle\n\n }\n\n}\n\n\n", "file_path": "src/systems/experimental/wind_control.rs", "rank": 12, "score": 37554.44060026926 }, { "content": "fn main() -> amethyst::Result<()> {\n\n amethyst::start_logger(Default::default());\n\n\n\n let resources = application_root_dir()\n\n .unwrap()\n\n .into_os_string()\n\n .into_string()\n\n .unwrap()\n\n + \"/resources\";\n\n let display_config_path = resources.clone() + \"/display_config.ron\";\n\n let key_bindings_path = resources.clone() + \"/input.ron\";\n\n\n\n let _display_config = DisplayConfig::load(display_config_path)?;\n\n\n\n let render_display_config = DisplayConfig {\n\n title: \"Evoli\".to_string(),\n\n dimensions: Some((1024, 768)),\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/main.rs", "rank": 13, "score": 35224.09081457221 }, { "content": "#[derive(Default)]\n\npub struct WorldBounds {\n\n pub left: f32,\n\n pub right: f32,\n\n pub bottom: f32,\n\n pub top: f32,\n\n}\n\n\n\nimpl WorldBounds {\n\n pub fn new(left: f32, right: f32, bottom: f32, top: f32) -> WorldBounds {\n\n WorldBounds {\n\n left,\n\n right,\n\n bottom,\n\n top,\n\n }\n\n }\n\n}\n", "file_path": "src/resources/world_bounds.rs", "rank": 14, "score": 28715.955198890242 }, { "content": "use amethyst::{core::transform::Transform, core::Time, ecs::*};\n\n\n\nuse crate::components::creatures::{CreatureTag, Movement};\n\n\n\npub struct MovementSystem;\n\nimpl<'s> System<'s> for MovementSystem {\n\n type SystemData = (\n\n WriteStorage<'s, Movement>,\n\n WriteStorage<'s, Transform>,\n\n ReadStorage<'s, CreatureTag>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (mut movements, mut transforms, creature_tags, time): Self::SystemData) {\n\n let delta_time = time.delta_seconds();\n\n for (movement, transform) in (&mut movements, &mut transforms).join() {\n\n let magnitude = movement.velocity.magnitude();\n\n if magnitude > movement.max_movement_speed {\n\n movement.velocity = movement.velocity * (movement.max_movement_speed / magnitude);\n\n }\n", "file_path": "src/systems/movement.rs", "rank": 16, "score": 26667.80235706616 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct DebugFullnessSystem;\n\n\n\nimpl<'s> System<'s> for DebugFullnessSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Fullness>,\n\n ReadStorage<'s, Transform>,\n\n Write<'s, DebugLines>,\n\n );\n\n\n\n fn run(&mut self, (fullnesses, locals, mut debug_lines): Self::SystemData) {\n\n for (fullness, local) in (&fullnesses, &locals).join() {\n\n let pos = local.global_matrix();\n\n debug_lines.draw_line(\n\n [pos[(3, 0)], pos[(3, 1)], 0.0].into(),\n\n [pos[(3, 0)] + fullness.value / 100.0, pos[(3, 1)], 0.0].into(),\n\n Srgba::new(0.0, 1.0, 0.0, 1.0),\n\n )\n\n }\n\n }\n\n}\n", "file_path": "src/systems/digestion.rs", "rank": 18, "score": 26665.385101102882 }, { "content": "use amethyst::{\n\n core::transform::Transform,\n\n ecs::*,\n\n renderer::{debug_drawing::DebugLinesComponent, palette::Srgba},\n\n};\n\n\n\nuse crate::components::combat::Health;\n\n\n\n#[derive(Default)]\n\npub struct DebugHealthSystem {}\n\n\n\nimpl<'s> System<'s> for DebugHealthSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Health>,\n\n ReadStorage<'s, Transform>,\n\n WriteStorage<'s, DebugLinesComponent>,\n\n );\n\n\n\n fn run(&mut self, (healths, transforms, mut debug_lines_comps): Self::SystemData) {\n\n for (health, transform, db_comp) in (&healths, &transforms, &mut debug_lines_comps).join() {\n", "file_path": "src/systems/health.rs", "rank": 19, "score": 26665.363561184073 }, { "content": "use amethyst::{\n\n ecs::{Entities, Join, ReadStorage, System, WriteStorage},\n\n renderer::debug_drawing::DebugLinesComponent,\n\n};\n\n\n\nuse crate::components::creatures::CreatureTag;\n\n\n\npub struct DebugSystem;\n\nimpl<'s> System<'s> for DebugSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n WriteStorage<'s, DebugLinesComponent>,\n\n ReadStorage<'s, CreatureTag>,\n\n );\n\n\n\n fn run(&mut self, (entities, mut debug_lines_comps, tags): Self::SystemData) {\n\n for (entity, _) in (&entities, &tags).join() {\n\n match debug_lines_comps.get(entity) {\n\n Some(_) => (),\n\n None => {\n\n debug_lines_comps\n\n .insert(entity, DebugLinesComponent::new())\n\n .expect(\"Unreachable\");\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/debug.rs", "rank": 21, "score": 26663.9272929805 }, { "content": "use amethyst::renderer::{debug_drawing::DebugLines, palette::Srgba};\n\nuse amethyst::{core::Time, core::Transform, ecs::*};\n\n\n\nuse crate::components::digestion::{Digestion, Fullness};\n\n\n\npub struct DigestionSystem;\n\n\n\nimpl<'s> System<'s> for DigestionSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Digestion>,\n\n WriteStorage<'s, Fullness>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (digestions, mut fullnesses, time): Self::SystemData) {\n\n let delta_time = time.delta_seconds();\n\n for (digestion, fullness) in (&digestions, &mut fullnesses).join() {\n\n let burned = digestion.nutrition_burn_rate * delta_time;\n\n let new_value = fullness.value - burned;\n\n fullness.value = new_value;\n", "file_path": "src/systems/digestion.rs", "rank": 22, "score": 26662.83772520258 }, { "content": "use amethyst::{core::transform::Transform, ecs::*, shrev::EventChannel};\n\nuse std::f32;\n\n\n\nuse crate::components::{combat::Health, creatures::Carcass, digestion::Fullness};\n\nuse crate::systems::spawner::CreatureSpawnEvent;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CreatureDeathEvent {\n\n pub deceased: Entity,\n\n}\n\n\n\npub struct StarvationSystem;\n\n\n\n// Entities die if their fullness reaches zero (or less).\n\nimpl<'s> System<'s> for StarvationSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Fullness>,\n\n Entities<'s>,\n\n Write<'s, EventChannel<CreatureDeathEvent>>,\n\n );\n", "file_path": "src/systems/death.rs", "rank": 23, "score": 26661.678332719224 }, { "content": " }\n\n\n\n fn run(&mut self, (_entities, spawn_events, prefabs, lazy_update): Self::SystemData) {\n\n for event in spawn_events.read(self.spawn_reader_id.as_mut().unwrap()) {\n\n if let Some(creature_prefab) = prefabs.get_prefab(&event.creature_type) {\n\n lazy_update.insert(event.entity, creature_prefab.clone());\n\n }\n\n }\n\n }\n\n}\n\n\n\n//\n\n//\n\n// For debugging purposes this system sends spawn events regularly\n\n#[derive(Default)]\n\npub struct DebugSpawnTriggerSystem {\n\n timer_to_next_spawn: f32,\n\n}\n\n\n\nimpl<'s> System<'s> for DebugSpawnTriggerSystem {\n", "file_path": "src/systems/spawner.rs", "rank": 24, "score": 26660.702470494638 }, { "content": " type SystemData = (\n\n Entities<'s>,\n\n Read<'s, LazyUpdate>,\n\n Write<'s, EventChannel<CreatureSpawnEvent>>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (entities, lazy_update, mut spawn_events, time): Self::SystemData) {\n\n let delta_seconds = time.delta_seconds();\n\n self.timer_to_next_spawn -= delta_seconds;\n\n if self.timer_to_next_spawn <= 0.0 {\n\n let mut creature_entity_builder = lazy_update.create_entity(&entities);\n\n self.timer_to_next_spawn = 1.5;\n\n let mut rng = thread_rng();\n\n let x = rng.gen_range(-5.0f32, 5.0f32);\n\n let y = rng.gen_range(-5.0f32, 5.0f32);\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(x, y, 0.02);\n\n let CreatureTypeDistribution { creature_type }: CreatureTypeDistribution =\n\n rand::random();\n", "file_path": "src/systems/spawner.rs", "rank": 26, "score": 26660.12435820264 }, { "content": "use amethyst::{\n\n core::{math::Vector3, timing::Time, transform::Transform},\n\n ecs::*,\n\n shrev::{EventChannel, ReaderId},\n\n};\n\n\n\nuse rand::{\n\n distributions::{Distribution, Standard},\n\n thread_rng, Rng,\n\n};\n\n\n\nuse std::f32::consts::PI;\n\n\n\nuse crate::{components::creatures::CreatureType, resources::prefabs::CreaturePrefabs};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CreatureSpawnEvent {\n\n pub creature_type: String,\n\n pub entity: Entity,\n\n}\n\n\n", "file_path": "src/systems/spawner.rs", "rank": 27, "score": 26659.94011514134 }, { "content": "#[derive(Default)]\n\npub struct CreatureSpawnerSystem {\n\n spawn_reader_id: Option<ReaderId<CreatureSpawnEvent>>,\n\n}\n\n\n\nimpl<'s> System<'s> for CreatureSpawnerSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n Read<'s, EventChannel<CreatureSpawnEvent>>,\n\n Read<'s, CreaturePrefabs>,\n\n Write<'s, LazyUpdate>,\n\n );\n\n\n\n fn setup(&mut self, world: &mut World) {\n\n <Self as System<'_>>::SystemData::setup(world);\n\n self.spawn_reader_id = Some(\n\n world\n\n .fetch_mut::<EventChannel<CreatureSpawnEvent>>()\n\n .register_reader(),\n\n );\n", "file_path": "src/systems/spawner.rs", "rank": 29, "score": 26659.458208286138 }, { "content": " Write<'s, EventChannel<CreatureSpawnEvent>>,\n\n Write<'s, LazyUpdate>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, Carcass>,\n\n );\n\n\n\n fn setup(&mut self, world: &mut World) {\n\n <Self as System<'_>>::SystemData::setup(world);\n\n self.death_reader_id = Some(\n\n world\n\n .fetch_mut::<EventChannel<CreatureDeathEvent>>()\n\n .register_reader(),\n\n );\n\n }\n\n\n\n fn run(\n\n &mut self,\n\n (entities, death_events, mut spawn_events, lazy_update, transforms, carcasses): Self::SystemData,\n\n ) {\n\n for event in death_events.read(self.death_reader_id.as_mut().unwrap()) {\n", "file_path": "src/systems/death.rs", "rank": 30, "score": 26659.14802884833 }, { "content": "\n\n fn run(&mut self, (healths, entities, mut death_events): Self::SystemData) {\n\n for (health, entity) in (&healths, &*entities).join() {\n\n if health.value < f32::EPSILON {\n\n death_events.single_write(CreatureDeathEvent { deceased: entity });\n\n let _ = entities.delete(entity);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct CarcassSystem {\n\n death_reader_id: Option<ReaderId<CreatureDeathEvent>>,\n\n}\n\n\n\nimpl<'s> System<'s> for CarcassSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n Read<'s, EventChannel<CreatureDeathEvent>>,\n", "file_path": "src/systems/death.rs", "rank": 31, "score": 26658.911719186926 }, { "content": "\n\n fn run(&mut self, (fullnesses, entities, mut death_events): Self::SystemData) {\n\n for (fullness, entity) in (&fullnesses, &*entities).join() {\n\n if fullness.value < f32::EPSILON {\n\n death_events.single_write(CreatureDeathEvent { deceased: entity });\n\n let _ = entities.delete(entity);\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct DeathByHealthSystem;\n\n\n\n// Entities die if their health reaches zero (or less).\n\nimpl<'s> System<'s> for DeathByHealthSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Health>,\n\n Entities<'s>,\n\n Write<'s, EventChannel<CreatureDeathEvent>>,\n\n );\n", "file_path": "src/systems/death.rs", "rank": 32, "score": 26658.911719186926 }, { "content": "use amethyst::{\n\n core::Time,\n\n ecs::*,\n\n shrev::{EventChannel, ReaderId},\n\n};\n\n\n\nuse crate::components::combat;\n\nuse crate::components::combat::{Cooldown, Damage, Health, Speed};\n\nuse crate::components::digestion::{Fullness, Nutrition};\n\nuse crate::systems::collision::CollisionEvent;\n\n//#[cfg(test)]\n\n//use amethyst::Error;\n\n//#[cfg(test)]\n\n//use amethyst_test::AmethystApplication;\n\nuse std::time::Duration;\n\n\n\npub struct CooldownSystem;\n\n\n\nimpl<'s> System<'s> for CooldownSystem {\n\n type SystemData = (WriteStorage<'s, Cooldown>, Entities<'s>, Read<'s, Time>);\n", "file_path": "src/systems/combat.rs", "rank": 34, "score": 26658.242174815634 }, { "content": " transform.prepend_translation_x(movement.velocity.x * delta_time);\n\n transform.prepend_translation_y(movement.velocity.y * delta_time);\n\n transform.prepend_translation_z(movement.velocity.z * delta_time);\n\n }\n\n for (movement, transform, _) in (&mut movements, &mut transforms, &creature_tags).join() {\n\n let angle = movement.velocity.y.atan2(movement.velocity.x);\n\n transform.set_rotation_2d(angle);\n\n }\n\n }\n\n}\n", "file_path": "src/systems/movement.rs", "rank": 36, "score": 26656.603829752457 }, { "content": "\n\n///\n\n///\n\n///\n\npub struct AttackEvent {\n\n pub attacker: Entity,\n\n pub defender: Entity,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct PerformDefaultAttackSystem {\n\n event_reader: Option<ReaderId<AttackEvent>>,\n\n}\n\n\n\nimpl<'s> System<'s> for PerformDefaultAttackSystem {\n\n type SystemData = (\n\n Read<'s, EventChannel<AttackEvent>>,\n\n ReadStorage<'s, Damage>,\n\n WriteStorage<'s, Cooldown>,\n\n ReadStorage<'s, Speed>,\n", "file_path": "src/systems/combat.rs", "rank": 37, "score": 26656.463593622233 }, { "content": " WriteStorage<'s, Health>,\n\n WriteStorage<'s, Fullness>,\n\n WriteStorage<'s, Nutrition>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (\n\n attack_events,\n\n damages,\n\n mut cooldowns,\n\n speeds,\n\n mut healths,\n\n mut fullnesses,\n\n mut nutritions,\n\n ): Self::SystemData,\n\n ) {\n\n let event_reader = self\n\n .event_reader\n\n .as_mut()\n", "file_path": "src/systems/combat.rs", "rank": 38, "score": 26656.043990565326 }, { "content": " let mut deceased = BitSet::new();\n\n deceased.add(event.deceased.id());\n\n\n\n for (_, carcass, transform) in (&deceased, &carcasses, &transforms).join() {\n\n let creature_entity_builder =\n\n lazy_update.create_entity(&entities).with(transform.clone());\n\n spawn_events.single_write(CreatureSpawnEvent {\n\n creature_type: carcass.creature_type.clone(),\n\n entity: creature_entity_builder.build(),\n\n });\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/death.rs", "rank": 39, "score": 26655.83895727479 }, { "content": " });\n\n }\n\n }\n\n\n\n let preys_b = faction_preys.get(faction_b.faction);\n\n if let Some(preys) = preys_b {\n\n if preys.is_prey(&faction_a.faction) {\n\n attack_events.single_write(AttackEvent {\n\n attacker: event.entity_b,\n\n defender: event.entity_a,\n\n });\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn setup(&mut self, world: &mut World) {\n\n <Self as System<'_>>::SystemData::setup(world);\n\n self.event_reader = Some(\n", "file_path": "src/systems/combat.rs", "rank": 40, "score": 26655.487693974785 }, { "content": " for (nutrition, _) in (&mut nutritions, &defender_set).join() {\n\n let delta = nutrition.value.min(damage.damage);\n\n nutrition.value = nutrition.value - delta;\n\n fullness.value = fullness.value + delta;\n\n }\n\n }\n\n\n\n if let Some(value) = cooldown {\n\n cooldowns\n\n .insert(event.attacker, value)\n\n .expect(\"Unreachable: we are inserting now.\");\n\n }\n\n }\n\n }\n\n\n\n fn setup(&mut self, world: &mut World) {\n\n <Self as System<'_>>::SystemData::setup(world);\n\n self.event_reader = Some(\n\n world\n\n .fetch_mut::<EventChannel<AttackEvent>>()\n", "file_path": "src/systems/combat.rs", "rank": 41, "score": 26655.25579891619 }, { "content": "\n\n fn run(&mut self, (mut cooldowns, entities, time): Self::SystemData) {\n\n let mut to_remove = Vec::new();\n\n\n\n for (mut cooldown, entity) in (&mut cooldowns, &*entities).join() {\n\n match cooldown.time_left.checked_sub(time.delta_time()) {\n\n Some(time_left) => {\n\n cooldown.time_left = time_left;\n\n }\n\n None => {\n\n to_remove.push(entity);\n\n }\n\n }\n\n }\n\n\n\n for entity in &to_remove {\n\n cooldowns.remove(*entity);\n\n }\n\n }\n\n}\n", "file_path": "src/systems/combat.rs", "rank": 42, "score": 26655.04767440868 }, { "content": " &mut self,\n\n (collision_events, mut attack_events, has_faction, faction_preys): Self::SystemData,\n\n ) {\n\n let event_reader = self\n\n .event_reader\n\n .as_mut()\n\n .expect(\"`FindAttackSystem::setup` was not called before `FindAttackSystem::run`\");\n\n\n\n for event in collision_events.read(event_reader) {\n\n let opt_factions = has_faction\n\n .get(event.entity_a)\n\n .and_then(|a| has_faction.get(event.entity_b).map(|b| (a, b)));\n\n\n\n if let Some((faction_a, faction_b)) = opt_factions {\n\n let preys_a = faction_preys.get(faction_a.faction);\n\n if let Some(preys) = preys_a {\n\n if preys.is_prey(&faction_b.faction) {\n\n attack_events.single_write(AttackEvent {\n\n attacker: event.entity_a,\n\n defender: event.entity_b,\n", "file_path": "src/systems/combat.rs", "rank": 43, "score": 26654.567054174036 }, { "content": " if creature_type == \"Carnivore\" || creature_type == \"Herbivore\" {\n\n transform.set_scale(Vector3::new(0.4, 0.4, 0.4));\n\n }\n\n if creature_type == \"Plant\" {\n\n let scale = rng.gen_range(0.8f32, 1.2f32);\n\n let rotation = rng.gen_range(0.0f32, PI);\n\n transform.set_translation_z(0.01);\n\n transform.set_scale(Vector3::new(scale, scale, scale));\n\n transform.set_rotation_euler(0.0, 0.0, rotation);\n\n }\n\n creature_entity_builder = creature_entity_builder.with(transform);\n\n spawn_events.single_write(CreatureSpawnEvent {\n\n creature_type,\n\n entity: creature_entity_builder.build(),\n\n });\n\n }\n\n }\n\n}\n", "file_path": "src/systems/spawner.rs", "rank": 44, "score": 26654.511463564246 }, { "content": " .register_reader(),\n\n )\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct FindAttackSystem {\n\n event_reader: Option<ReaderId<CollisionEvent>>,\n\n}\n\n\n\n// Determine if a collision will trigger an attack. If that is the case, generate an `AttackEvent`\n\nimpl<'s> System<'s> for FindAttackSystem {\n\n type SystemData = (\n\n Read<'s, EventChannel<CollisionEvent>>,\n\n Write<'s, EventChannel<AttackEvent>>,\n\n ReadStorage<'s, combat::HasFaction<Entity>>,\n\n ReadStorage<'s, combat::FactionPrey<Entity>>,\n\n );\n\n\n\n fn run(\n", "file_path": "src/systems/combat.rs", "rank": 45, "score": 26654.468982136645 }, { "content": "pub mod behaviors;\n\npub mod camera_movement;\n\npub mod collision;\n\npub mod combat;\n\npub mod death;\n\npub mod debug;\n\npub mod digestion;\n\npub mod health;\n\npub mod main_game_ui;\n\npub mod movement;\n\npub mod spawner;\n\npub mod swarm_behavior;\n\n\n\nmod experimental;\n\npub use experimental::*;\n", "file_path": "src/systems/mod.rs", "rank": 46, "score": 26653.668828658123 }, { "content": " .expect(\"`PerformDefaultAttackSystem::setup` was not called before `PerformDefaultAttackSystem::run`\");\n\n\n\n for event in attack_events.read(event_reader) {\n\n let mut attack_set = BitSet::new();\n\n attack_set.add(event.attacker.id());\n\n let mut defender_set = BitSet::new();\n\n defender_set.add(event.defender.id());\n\n\n\n let mut cooldown = None;\n\n\n\n for (damage, _, speed, _) in (&damages, !&cooldowns, &speeds, &attack_set).join() {\n\n for (mut health, _) in (&mut healths, &defender_set).join() {\n\n health.value = health.value - damage.damage;\n\n cooldown = Some(Cooldown::new(Duration::from_millis(\n\n (1000.0 / speed.attacks_per_second) as u64,\n\n )));\n\n }\n\n }\n\n\n\n for (mut fullness, _, damage) in (&mut fullnesses, &attack_set, &damages).join() {\n", "file_path": "src/systems/combat.rs", "rank": 47, "score": 26652.9739285873 }, { "content": " world\n\n .fetch_mut::<EventChannel<CollisionEvent>>()\n\n .register_reader(),\n\n )\n\n }\n\n}\n\n\n\n//#[test]\n\n//fn test_cooldown_is_reduced() -> Result<(), Error> {\n\n//AmethystApplication::blank()\n\n//.with_system(CooldownSystem, \"cooldown_system\", &[])\n\n//.with_setup(|world| {\n\n//world\n\n//.create_entity()\n\n//.with(Cooldown::new(Duration::from_millis(5000)))\n\n//.build();\n\n//})\n\n//.with_assertion(|world| {\n\n//let entity = world.entities().entity(0);\n\n//let cooldowns = world.read_storage::<Cooldown>();\n", "file_path": "src/systems/combat.rs", "rank": 48, "score": 26652.52149063596 }, { "content": "//let cooldown = cooldowns.get(entity).unwrap();\n\n//assert!(cooldown.time_left.as_millis() < 5000);\n\n//})\n\n//.run()\n\n//}\n\n\n\n//#[test]\n\n//fn test_cooldown_is_removed() -> Result<(), Error> {\n\n//AmethystApplication::blank()\n\n//.with_system(CooldownSystem, \"cooldown_system\", &[])\n\n//.with_setup(|world| {\n\n//world\n\n//.create_entity()\n\n//.with(Cooldown::new(Duration::from_millis(0)))\n\n//.build();\n\n//})\n\n//.with_assertion(|world| {\n\n//let entity = world.entities().entity(0);\n\n//let cooldowns = world.read_storage::<Cooldown>();\n\n//let cooldown = cooldowns.get(entity);\n\n//assert!(cooldown.is_none());\n\n//})\n\n//.run()\n\n//}\n", "file_path": "src/systems/combat.rs", "rank": 49, "score": 26650.30734559249 }, { "content": " let pos = transform.global_matrix().column(3).xyz();\n\n db_comp.add_line(\n\n [pos[0], pos[1] + 0.5, pos[2] + 0.5].into(),\n\n [pos[0] + health.value / 100.0, pos[1] + 0.5, pos[2] + 0.5].into(),\n\n Srgba::new(0.0, 1.0, 0.0, 1.0),\n\n )\n\n }\n\n }\n\n}\n", "file_path": "src/systems/health.rs", "rank": 50, "score": 26649.99037167729 }, { "content": "use amethyst::{core::Transform, ecs::*};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse crate::components::creatures;\n\nuse crate::resources::world_bounds::*;\n\n\n\npub struct RicochetSystem;\n\n\n\nimpl<'s> System<'s> for RicochetSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, creatures::RicochetTag>,\n\n WriteStorage<'s, creatures::Movement>,\n\n Read<'s, WorldBounds>,\n\n );\n\n\n\n fn run(&mut self, (transforms, ricochets, mut movements, bounds): Self::SystemData) {\n\n for (local, _ricochet, movement) in (&transforms, &ricochets, &mut movements).join() {\n", "file_path": "src/systems/behaviors/ricochet.rs", "rank": 51, "score": 25304.980401361463 }, { "content": " Ordering::Greater\n\n }\n\n })\n\n .unwrap()\n\n}\n\n\n\npub struct ClosestObstacleSystem;\n\nimpl<'s> System<'s> for ClosestObstacleSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, Movement>,\n\n ReadExpect<'s, WorldBounds>,\n\n ReadStorage<'s, AvoidObstaclesTag>,\n\n WriteStorage<'s, Closest<Obstacle>>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (entities, transforms, movements, world_bounds, avoid_obstacles, mut closest_obstacle): Self::SystemData,\n", "file_path": "src/systems/behaviors/obstacle.rs", "rank": 52, "score": 25296.475290972427 }, { "content": "use amethyst::core::{math::Point3, transform::Transform, Time};\n\nuse amethyst::ecs::*;\n\nuse amethyst::renderer::{debug_drawing::DebugLinesComponent, palette::Srgba};\n\n\n\nuse crate::components::creatures;\n\nuse rand::{thread_rng, Rng};\n\n\n\npub struct WanderSystem;\n\nimpl<'s> System<'s> for WanderSystem {\n\n type SystemData = (\n\n WriteStorage<'s, creatures::Wander>,\n\n WriteStorage<'s, creatures::Movement>,\n\n ReadStorage<'s, Transform>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (mut wanders, mut movements, locals, time): Self::SystemData) {\n\n let delta_time = time.delta_seconds();\n\n let mut rng = thread_rng();\n\n\n", "file_path": "src/systems/behaviors/wander.rs", "rank": 53, "score": 25295.827494185967 }, { "content": " type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, CreatureTag>,\n\n WriteExpect<'s, SpatialGrid>,\n\n );\n\n\n\n fn run(&mut self, (entities, transforms, tags, mut spatial_grid): Self::SystemData) {\n\n spatial_grid.reset();\n\n for (entity, transform, _) in (&entities, &transforms, &tags).join() {\n\n spatial_grid.insert(entity, transform);\n\n }\n\n }\n\n}\n\n\n\npub struct DebugEntityDetectionSystem;\n\n\n\nimpl<'s> System<'s> for DebugEntityDetectionSystem {\n\n type SystemData = (\n\n ReadStorage<'s, DetectedEntities>,\n", "file_path": "src/systems/experimental/perception.rs", "rank": 54, "score": 25295.14676591938 }, { "content": "use amethyst::{\n\n core::{Named, Time, Transform},\n\n ecs::*,\n\n input::{InputHandler, StringBindings},\n\n renderer::camera::Camera,\n\n};\n\n\n\n#[derive(Default)]\n\npub struct CameraMovementSystem {}\n\n\n\nimpl<'s> System<'s> for CameraMovementSystem {\n\n type SystemData = (\n\n ReadStorage<'s, Camera>,\n\n ReadStorage<'s, Named>,\n\n WriteStorage<'s, Transform>,\n\n Read<'s, InputHandler<StringBindings>>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (cameras, names, mut transforms, input_handler, time): Self::SystemData) {\n", "file_path": "src/systems/camera_movement.rs", "rank": 55, "score": 25292.978153436623 }, { "content": "use amethyst::{core::timing::Time, ecs::*};\n\n\n\nuse crate::{components::creatures::FallingTag, components::creatures::Movement};\n\n\n\n/// Acceleration due to gravity.\n\nconst GRAVITY: f32 = 4.0;\n\n\n\n/// Applies the force of gravity on all entities with the FallingTag.\n\n/// Will remove the tag if an entity has reached the ground again.\n\n#[derive(Default)]\n\npub struct GravitySystem;\n\n\n\nimpl<'s> System<'s> for GravitySystem {\n\n type SystemData = (\n\n WriteStorage<'s, Movement>,\n\n ReadStorage<'s, FallingTag>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (mut movements, falling_tags, time): Self::SystemData) {\n\n for (movement, _) in (&mut movements, &falling_tags).join() {\n\n //TODO: Add terminal velocity cap on falling speed.\n\n movement.velocity.z -= GRAVITY * time.delta_seconds();\n\n }\n\n }\n\n}\n", "file_path": "src/systems/experimental/gravity.rs", "rank": 56, "score": 25292.778602885854 }, { "content": "use amethyst::core::math::Vector3;\n\nuse amethyst::{\n\n core::Transform,\n\n ecs::{join::Join, Entities, ReadExpect, ReadStorage, System, WriteStorage},\n\n};\n\n\n\nuse std::cmp::Ordering;\n\n\n\nuse crate::components::creatures::{AvoidObstaclesTag, Movement};\n\nuse crate::resources::world_bounds::WorldBounds;\n\nuse crate::systems::behaviors::decision::Closest;\n\n\n\n#[derive(Default)]\n\npub struct Obstacle;\n\n\n\n/// Determine the closest bounding wall based on a location\n", "file_path": "src/systems/behaviors/obstacle.rs", "rank": 57, "score": 25291.777722735627 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct DebugWanderSystem;\n\nimpl<'s> System<'s> for DebugWanderSystem {\n\n type SystemData = (\n\n ReadStorage<'s, creatures::Wander>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, creatures::Movement>,\n\n WriteStorage<'s, DebugLinesComponent>,\n\n );\n\n\n\n fn run(&mut self, (wanders, transforms, movements, mut debug_lines_comps): Self::SystemData) {\n\n for (wander, transform, movement, db_comp) in\n\n (&wanders, &transforms, &movements, &mut debug_lines_comps).join()\n\n {\n\n let mut position = transform.global_matrix().column(3).xyz();\n\n position[2] += 0.5;\n\n let mut future_position = position + movement.velocity * 0.5;\n", "file_path": "src/systems/behaviors/wander.rs", "rank": 58, "score": 25291.708173886687 }, { "content": "/// Periodically schedules a Topplegrass entity to be spawned in through a CreatureSpawnEvent.\n\n#[derive(Default)]\n\npub struct TopplegrassSpawnSystem {\n\n secs_to_next_spawn: f32,\n\n}\n\n\n\nimpl<'s> System<'s> for TopplegrassSpawnSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n Read<'s, LazyUpdate>,\n\n Write<'s, EventChannel<CreatureSpawnEvent>>,\n\n Read<'s, Time>,\n\n Read<'s, WorldBounds>,\n\n Read<'s, Wind>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (entities, lazy_update, mut spawn_events, time, world_bounds, wind): Self::SystemData,\n\n ) {\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 59, "score": 25290.578892484613 }, { "content": "use crate::resources::world_bounds::WorldBounds;\n\nuse amethyst::{\n\n core::{\n\n math::{Vector2, Vector3},\n\n timing::Time,\n\n transform::components::Transform,\n\n },\n\n ecs::*,\n\n shrev::EventChannel,\n\n};\n\n\n\nuse rand::{thread_rng, Rng};\n\nuse std::f32;\n\n\n\nuse crate::{\n\n components::creatures::FallingTag, components::creatures::Movement,\n\n components::creatures::TopplegrassTag, resources::wind::Wind,\n\n systems::spawner::CreatureSpawnEvent,\n\n};\n\n\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 60, "score": 25290.532155433637 }, { "content": "use amethyst::{\n\n core::{math::Point3, transform::Transform},\n\n ecs::{BitSet, Entities, Join, ReadExpect, ReadStorage, System, WriteExpect, WriteStorage},\n\n renderer::{debug_drawing::DebugLinesComponent, palette::Srgba},\n\n};\n\n\n\nuse crate::components::{\n\n creatures::CreatureTag,\n\n perception::{DetectedEntities, Perception},\n\n};\n\nuse crate::resources::spatial_grid::SpatialGrid;\n\n\n\npub struct EntityDetectionSystem;\n\n\n\nimpl<'s> System<'s> for EntityDetectionSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, Perception>,\n\n WriteStorage<'s, DetectedEntities>,\n\n ReadExpect<'s, SpatialGrid>,\n", "file_path": "src/systems/experimental/perception.rs", "rank": 61, "score": 25290.116907286993 }, { "content": " if self.ready_to_spawn(time.delta_seconds()) {\n\n let mut transform = Transform::default();\n\n transform.set_scale(Vector3::new(\n\n TOPPLEGRASS_BASE_SCALE,\n\n TOPPLEGRASS_BASE_SCALE,\n\n TOPPLEGRASS_BASE_SCALE,\n\n ));\n\n transform.append_translation(Self::gen_spawn_location(&wind, &world_bounds));\n\n let entity = lazy_update.create_entity(&entities).with(transform).build();\n\n spawn_events.single_write(CreatureSpawnEvent {\n\n creature_type: \"Topplegrass\".to_string(),\n\n entity,\n\n });\n\n }\n\n }\n\n}\n\n\n\nimpl TopplegrassSpawnSystem {\n\n /// Checks the time elapsed since the last spawn. If the system is ready to spawn another\n\n /// entity, the timer will be reset and this function will return true.\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 62, "score": 25290.111080145914 }, { "content": " Read<'s, Time>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (entities, mut movements, mut transforms, topple_tags, mut falling_tags, wind, time): Self::SystemData,\n\n ) {\n\n let mut rng = thread_rng();\n\n // Set topplegrass velocity to equal wind velocity.\n\n // Rotate topplegrass.\n\n for (movement, transform, _) in (&mut movements, &mut transforms, &topple_tags).join() {\n\n transform.prepend_rotation_x_axis(\n\n -ANGULAR_V_MAGIC * movement.velocity.y * time.delta_seconds(),\n\n );\n\n transform.prepend_rotation_y_axis(\n\n ANGULAR_V_MAGIC * movement.velocity.x * time.delta_seconds(),\n\n );\n\n movement.velocity.x = wind.wind.x;\n\n movement.velocity.y = wind.wind.y;\n\n }\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 63, "score": 25289.585061124486 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct SwarmBehaviorSystem {}\n\n\n\nimpl<'s> System<'s> for SwarmBehaviorSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n Read<'s, Time>,\n\n ReadStorage<'s, Transform>,\n\n ReadStorage<'s, SwarmCenter>,\n\n ReadStorage<'s, SwarmBehavior>,\n\n WriteStorage<'s, Movement>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 64, "score": 25289.219850150526 }, { "content": "use amethyst::{\n\n core::{math::*, transform::Transform, Time},\n\n ecs::*,\n\n};\n\n\n\nuse std::f32;\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::components::combat::{FactionPrey, HasFaction};\n\nuse crate::components::creatures::*;\n\n\n\n/// A query is a component that contains the queried bit set that can be used to join with other components\n\npub struct Query<T>(BitSet, PhantomData<T>);\n\nimpl<T: shred::Resource> Component for Query<T> {\n\n type Storage = HashMapStorage<Self>;\n\n}\n\n\n\nimpl<T> Query<T> {\n\n pub fn new() -> Query<T> {\n\n Query(BitSet::new(), PhantomData {})\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 65, "score": 25288.793891708647 }, { "content": " T: shred::Resource + Default,\n\n{\n\n type Storage = DenseVecStorage<Self>;\n\n}\n\n\n\n/// A system that returns the closest entity of a query on the faction.\n\n/// To make use of this system, you should attach a `Query<T>` to a faction. The system will\n\n/// attach `Closest<T>` to all entities that have a faction where `Query<T>` is attached. The distance\n\n/// between the entity and the queried entity needs to be at least 5.0f32. If the distance is higher,\n\n/// `Closest<T>` will not be attached.\n\n#[derive(Default)]\n\npub struct ClosestSystem<T: Default>(PhantomData<T>);\n\n\n\nimpl<'s, T> System<'s> for ClosestSystem<T>\n\nwhere\n\n T: shred::Resource + Default,\n\n{\n\n type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, Transform>,\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 66, "score": 25288.134767235653 }, { "content": "\n\n#[derive(Default)]\n\npub struct SwarmSpawnSystem {\n\n swarm_timer: f32,\n\n}\n\n\n\nimpl<'s> System<'s> for SwarmSpawnSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n Read<'s, LazyUpdate>,\n\n Write<'s, EventChannel<CreatureSpawnEvent>>,\n\n Read<'s, Time>,\n\n );\n\n\n\n fn run(&mut self, (entities, lazy_update, mut spawn_events, time): Self::SystemData) {\n\n let delta_seconds = time.delta_seconds();\n\n self.swarm_timer -= delta_seconds;\n\n if self.swarm_timer <= 0.0 {\n\n let mut rng = thread_rng();\n\n self.swarm_timer = 10.0f32;\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 67, "score": 25288.077520305484 }, { "content": "impl<'s> System<'s> for SwarmCenterSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n Read<'s, Time>,\n\n WriteStorage<'s, SwarmCenter>,\n\n ReadStorage<'s, SwarmBehavior>,\n\n );\n\n\n\n fn run(&mut self, (entities, _time, mut swarm_centers, swarm_behaviors): Self::SystemData) {\n\n for (entity, mut swarm_center) in (&entities, &mut swarm_centers).join() {\n\n swarm_center.entities = swarm_center\n\n .entities\n\n .iter()\n\n .filter(|swarmling_entity| !(swarm_behaviors.get(**swarmling_entity).is_none()))\n\n .cloned()\n\n .collect();\n\n if swarm_center.entities.len() == 0 {\n\n entities\n\n .delete(entity)\n\n .expect(\"unreachable, the entity has been used just before\");\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 68, "score": 25287.442918000535 }, { "content": "use amethyst::{\n\n core::{\n\n math::Vector3,\n\n timing::Time,\n\n transform::components::{Parent, Transform},\n\n },\n\n ecs::*,\n\n shrev::EventChannel,\n\n};\n\n\n\nuse rand::{thread_rng, Rng};\n\nuse std::f32;\n\n\n\nuse crate::{\n\n components::{\n\n creatures::{AvoidObstaclesTag, Movement, Wander},\n\n swarm::{SwarmBehavior, SwarmCenter},\n\n },\n\n systems::spawner::CreatureSpawnEvent,\n\n};\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 69, "score": 25287.124140900753 }, { "content": " ReadStorage<'s, Transform>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (entities, perceptions, mut detected_entities, grid, transforms): Self::SystemData,\n\n ) {\n\n for (entity, _) in (&entities, &perceptions).join() {\n\n match detected_entities.get(entity) {\n\n Some(_) => (),\n\n None => {\n\n detected_entities\n\n .insert(entity, DetectedEntities::default())\n\n .expect(\"Unreachable, we just tested the entity exists\");\n\n }\n\n }\n\n }\n\n\n\n for (perception, mut detected, transform) in\n\n (&perceptions, &mut detected_entities, &transforms).join()\n", "file_path": "src/systems/experimental/perception.rs", "rank": 70, "score": 25287.111038841562 }, { "content": " ReadStorage<'s, Transform>,\n\n WriteStorage<'s, DebugLinesComponent>,\n\n );\n\n\n\n fn run(&mut self, (detected_entities, transforms, mut debug_lines_comps): Self::SystemData) {\n\n for (detected, transform, debug_comp) in\n\n (&detected_entities, &transforms, &mut debug_lines_comps).join()\n\n {\n\n let mut pos = transform.global_matrix().column(3).xyz();\n\n pos[2] += 0.3;\n\n for (other_transform, _) in (&transforms, &detected.entities).join() {\n\n let mut other_pos = other_transform.global_matrix().column(3).xyz();\n\n other_pos[2] += 0.3;\n\n debug_comp.add_line(\n\n Point3::from(pos),\n\n Point3::from(other_pos),\n\n Srgba::new(1.0, 1.0, 0.0, 1.0),\n\n );\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/experimental/perception.rs", "rank": 71, "score": 25287.010498429874 }, { "content": " }\n\n}\n\n\n\n/// A component that stores the distance to the closest entity. The type T is used to tag the entity.\n\npub struct Closest<T> {\n\n pub distance: Vector3<f32>,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T> Closest<T> {\n\n pub fn new(distance: Vector3<f32>) -> Closest<T> {\n\n Closest {\n\n distance,\n\n _phantom: PhantomData {},\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Component for Closest<T>\n\nwhere\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 72, "score": 25286.827639732266 }, { "content": " /// cardinal_direction vector, within a margin of a 1/4 PI RAD.\n\n fn wind_towards_direction(wind: Vector2<f32>, cardinal_direction: Vector2<f32>) -> bool {\n\n wind.angle(&cardinal_direction).abs() < f32::consts::FRAC_PI_4\n\n }\n\n}\n\n\n\n/// Controls the rolling animation of the Topplegrass.\n\n/// Also makes the entity skip up into the air every so often, to simulate it bumping into small\n\n/// rocks or the wind catching it or something.\n\n#[derive(Default)]\n\npub struct TopplingSystem;\n\n\n\nimpl<'s> System<'s> for TopplingSystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n WriteStorage<'s, Movement>,\n\n WriteStorage<'s, Transform>,\n\n ReadStorage<'s, TopplegrassTag>,\n\n WriteStorage<'s, FallingTag>,\n\n Read<'s, Wind>,\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 73, "score": 25286.71250537322 }, { "content": " {\n\n detected.entities = BitSet::new();\n\n let nearby_entities = grid.query(transform, perception.range);\n\n let pos = transform.global_matrix().column(3).xyz();\n\n let sq_range = perception.range * perception.range;\n\n for (other_entity, other_transform, _) in\n\n (&entities, &transforms, &nearby_entities).join()\n\n {\n\n let other_pos = other_transform.global_matrix().column(3).xyz();\n\n if (pos - other_pos).norm_squared() < sq_range {\n\n detected.entities.add(other_entity.id());\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct SpatialGridSystem;\n\n\n\nimpl<'s> System<'s> for SpatialGridSystem {\n", "file_path": "src/systems/experimental/perception.rs", "rank": 74, "score": 25286.47333952726 }, { "content": " }\n\n}\n\n\n\n/// Tags\n\n#[derive(Default)]\n\npub struct Prey;\n\n#[derive(Default)]\n\npub struct Predator;\n\n\n\n/// Write prey/predator queries to the faction entities. For each faction\n\n/// we calculate the set of entities that they consider prey and the set of entities they\n\n/// consider as predators.\n\npub struct QueryPredatorsAndPreySystem;\n\nimpl<'s> System<'s> for QueryPredatorsAndPreySystem {\n\n type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, HasFaction<Entity>>,\n\n ReadStorage<'s, FactionPrey<Entity>>,\n\n WriteStorage<'s, Query<Prey>>,\n\n WriteStorage<'s, Query<Predator>>,\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 75, "score": 25286.322852620076 }, { "content": " }\n\n // Check which entities are no longer falling (because they reached the ground); remove\n\n // their falling tag, set their vertical speed to zero (we don't bounce) and correct their position.\n\n let no_longer_falling = (\n\n &entities,\n\n &mut transforms,\n\n &mut movements,\n\n &falling_tags,\n\n &topple_tags,\n\n )\n\n .join()\n\n .filter_map(|(entity, transform, movement, _, _)| {\n\n if transform.translation().z <= HEIGHT && movement.velocity.z.is_sign_negative() {\n\n transform.translation_mut().z = HEIGHT;\n\n movement.velocity.z = 0.0;\n\n Some(entity)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<Entity>>();\n\n for entity in no_longer_falling {\n\n falling_tags.remove(entity);\n\n }\n\n }\n\n}\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 76, "score": 25286.173179163776 }, { "content": " (_entities, time, transforms, _swarm_centers, swarm_behaviors, mut movements): Self::SystemData,\n\n ) {\n\n let delta_seconds = time.delta_seconds();\n\n\n\n // avoid divide-by-zero when delta_seconds is zero\n\n if delta_seconds <= f32::EPSILON {\n\n return;\n\n }\n\n let time_step = 0.01;\n\n let iterations = (delta_seconds / time_step) as u32 + 1;\n\n for (transform, swarm_behavior, mut movement) in\n\n (&transforms, &swarm_behaviors, &mut movements).join()\n\n {\n\n let original_position = transform.translation();\n\n let mut current_position = original_position.clone();\n\n let mut current_velocity = movement.velocity.clone();\n\n let pull_factor = 10.0;\n\n let side_factor = 5.0;\n\n for t in 0..iterations {\n\n let iter_step = time_step.min(delta_seconds - time_step * t as f32);\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 77, "score": 25285.649149228266 }, { "content": " ReadStorage<'s, HasFaction<Entity>>,\n\n ReadStorage<'s, Query<T>>,\n\n WriteStorage<'s, Closest<T>>,\n\n );\n\n\n\n fn run(\n\n &mut self,\n\n (entities, transforms, factions, faction_query, mut closest): Self::SystemData,\n\n ) {\n\n for (entity, transform, faction) in (&entities, &transforms, &factions).join() {\n\n // Remove the old value. The referenced might have moved or has been deleted.\n\n closest.remove(entity);\n\n\n\n // If the query is not attached to the faction, we don't calculate the closest entity.\n\n let query_entities = faction_query.get(faction.faction);\n\n if query_entities.is_none() {\n\n continue;\n\n }\n\n\n\n let mut closest_opt = None;\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 78, "score": 25285.584301872 }, { "content": " }\n\n}\n\n\n\n/// Seek out the entity referenced by `Closest<T>` and apply a steering force\n\n/// towards that entity. The steering force can be modified using the `attraction_modifier` factor.\n\n/// By setting `attraction_modifier` to `-1` this system will behave like `Evade`.\n\npub struct SeekSystem<T> {\n\n attraction_modifier: Rotation3<f32>,\n\n attraction_magnitude: f32,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T> SeekSystem<T> {\n\n pub fn new(attraction_modifier: Rotation3<f32>, attraction_magnitude: f32) -> SeekSystem<T> {\n\n SeekSystem {\n\n attraction_modifier,\n\n attraction_magnitude,\n\n _phantom: PhantomData {},\n\n }\n\n }\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 79, "score": 25285.327133045877 }, { "content": "}\n\n\n\nimpl<'s, T> System<'s> for SeekSystem<T>\n\nwhere\n\n T: shred::Resource + Default,\n\n{\n\n type SystemData = (\n\n Entities<'s>,\n\n ReadStorage<'s, Closest<T>>,\n\n Read<'s, Time>,\n\n WriteStorage<'s, Movement>,\n\n );\n\n\n\n fn run(&mut self, (_entities, closest_things, time, mut movements): Self::SystemData) {\n\n let delta_time = time.delta_seconds();\n\n for (movement, closest) in (&mut movements, &closest_things).join() {\n\n if closest.distance.norm() < f32::EPSILON {\n\n continue;\n\n }\n\n let target_velocity = closest.distance.normalize() * self.attraction_magnitude;\n\n let steering_force = target_velocity - movement.velocity;\n\n movement.velocity += self.attraction_modifier * steering_force * delta_time;\n\n }\n\n }\n\n}\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 80, "score": 25285.244275819405 }, { "content": " .with_size(100.0, 36.0)\n\n .with_position(-55.0, 20.0)\n\n .with_font(font_handle.clone())\n\n .with_font_size(24.0f32)\n\n .with_text_color([0.0f32, 0.0, 0.0, 1.0])\n\n .with_hover_text_color([0.2f32, 0.2f32, 0.2f32, 1.0f32])\n\n .with_press_text_color([0.5, 0.5, 0.5, 1.0])\n\n .build(button_resources)\n\n .1,\n\n );\n\n }\n\n\n\n fn run(&mut self, (ui_events, mut ui_texts, mut input_events): Self::SystemData) {\n\n for event in ui_events.read(self.ui_reader_id.as_mut().unwrap()) {\n\n match event.event_type {\n\n UiEventType::Click => {\n\n let action_to_send = {\n\n if event.target == self.pause_button.as_ref().unwrap().image_entity {\n\n \"TogglePause\"\n\n } else if event.target\n", "file_path": "src/systems/time_control.rs", "rank": 81, "score": 25285.10494423918 }, { "content": " let mut swarm_entity_builder = lazy_update.create_entity(&entities);\n\n let x = rng.gen_range(-10.0, 10.0);\n\n let y = rng.gen_range(-10.0, 10.0);\n\n let mut transform = Transform::default();\n\n transform.set_translation_xyz(x, y, 2.0);\n\n swarm_entity_builder = swarm_entity_builder.with(transform);\n\n let movement = Movement {\n\n velocity: Vector3::new(0.0, 0.0, 0.0),\n\n max_movement_speed: 0.8,\n\n };\n\n swarm_entity_builder = swarm_entity_builder.with(movement);\n\n let wander = Wander {\n\n radius: 1.0,\n\n angle: 0.0,\n\n };\n\n swarm_entity_builder = swarm_entity_builder.with(wander);\n\n let avoid_obstacles_tag: AvoidObstaclesTag = AvoidObstaclesTag;\n\n swarm_entity_builder = swarm_entity_builder.with(avoid_obstacles_tag);\n\n let swarm_entity = swarm_entity_builder.build();\n\n let mut swarm_center = SwarmCenter::default();\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 82, "score": 25285.035910852912 }, { "content": "use amethyst::{\n\n assets::{AssetStorage, Loader},\n\n ecs::*,\n\n input::InputEvent,\n\n shrev::{EventChannel, ReaderId},\n\n ui::*,\n\n};\n\n\n\n#[derive(Default)]\n\npub struct TimeControlSystem {\n\n ui_reader_id: Option<ReaderId<UiEvent>>,\n\n input_reader_id: Option<ReaderId<InputEvent<String>>>,\n\n\n\n pause_button: Option<UiButton>,\n\n speed_up_button: Option<UiButton>,\n\n slow_down_button: Option<UiButton>,\n\n}\n\n\n\nimpl<'s> System<'s> for TimeControlSystem {\n\n type SystemData = (\n", "file_path": "src/systems/time_control.rs", "rank": 83, "score": 25284.633804032772 }, { "content": " if local.translation().x >= bounds.right || local.translation().x <= bounds.left {\n\n movement.velocity.x = -movement.velocity.x;\n\n }\n\n\n\n if local.translation().y >= bounds.top || local.translation().y <= bounds.bottom {\n\n movement.velocity.y = -movement.velocity.y;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/behaviors/ricochet.rs", "rank": 84, "score": 25284.543234061523 }, { "content": " };\n\n swarmling_entity_builder = swarmling_entity_builder\n\n .with(transform)\n\n .with(parent)\n\n .with(movement);\n\n let swarmling_entity = swarmling_entity_builder.build();\n\n swarm_center.entities.push(swarmling_entity);\n\n spawn_events.single_write(CreatureSpawnEvent {\n\n creature_type: \"Ixie\".to_string(),\n\n entity: swarmling_entity,\n\n });\n\n }\n\n lazy_update.insert(swarm_entity, swarm_center);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct SwarmCenterSystem {}\n\n\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 85, "score": 25284.072628742862 }, { "content": "pub mod gravity;\n\npub mod out_of_bounds;\n\npub mod perception;\n\npub mod topplegrass;\n\npub mod wind_control;\n", "file_path": "src/systems/experimental/mod.rs", "rank": 86, "score": 25283.249169702907 }, { "content": " fn ready_to_spawn(&mut self, delta_seconds: f32) -> bool {\n\n self.secs_to_next_spawn -= delta_seconds;\n\n if self.secs_to_next_spawn.is_sign_negative() {\n\n self.secs_to_next_spawn = SPAWN_INTERVAL;\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n /// Returns a Vector3<f32> representing the position in which to spawn the next entity.\n\n /// Entities will be spawned at a random point on one of the four world borders; specifically,\n\n /// the one that the wind direction is facing away from. In other words: upwind from the\n\n /// center of the world.\n\n fn gen_spawn_location(wind: &Wind, bounds: &WorldBounds) -> Vector3<f32> {\n\n let mut rng = thread_rng();\n\n if Self::wind_towards_direction(wind.wind, Vector2::new(1.0, 0.0)) {\n\n Vector3::new(\n\n bounds.left,\n\n rng.gen_range(bounds.bottom, bounds.top),\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 87, "score": 25283.202397128 }, { "content": " // Select some of the topplegrass that are on ground to jump up into the air slightly.\n\n let airborne = (&entities, &mut movements, &topple_tags, !&falling_tags)\n\n .join()\n\n .filter_map(|(entity, movement, _, _)| {\n\n if movement.velocity.magnitude() > JUMP_THRESHOLD\n\n && rng.gen::<f32>() < JUMP_PROBABILITY * time.delta_seconds()\n\n {\n\n movement.velocity.z = rng.gen_range(0.4, 0.7);\n\n Some(entity)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<Entity>>();\n\n // Attach the falling tag to the selected topplegrass entities, which lets the GravitySystem\n\n // know to start affecting it.\n\n for entity in airborne {\n\n falling_tags\n\n .insert(entity, FallingTag)\n\n .expect(\"Unable to add falling tag to entity\");\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 88, "score": 25283.167877454114 }, { "content": " HEIGHT,\n\n )\n\n } else if Self::wind_towards_direction(wind.wind, Vector2::new(0.0, 1.0)) {\n\n Vector3::new(\n\n rng.gen_range(bounds.left, bounds.right),\n\n bounds.bottom,\n\n HEIGHT,\n\n )\n\n } else if Self::wind_towards_direction(wind.wind, Vector2::new(-1.0, 0.0)) {\n\n Vector3::new(\n\n bounds.right,\n\n rng.gen_range(bounds.bottom, bounds.top),\n\n HEIGHT,\n\n )\n\n } else {\n\n Vector3::new(rng.gen_range(bounds.left, bounds.right), bounds.top, HEIGHT)\n\n }\n\n }\n\n\n\n /// Returns true if and only if the given wind vector is roughly in line with the given\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 89, "score": 25282.914798311922 }, { "content": " );\n\n\n\n fn run(\n\n &mut self,\n\n (entities, has_faction, faction_preys_set, mut preys_query, mut predators_query): Self::SystemData,\n\n ) {\n\n for (faction, _) in (&entities, &faction_preys_set).join() {\n\n if !preys_query.contains(faction) {\n\n preys_query\n\n .insert(faction, Query::<Prey>::new())\n\n .expect(\"unreachable: we just queried\");\n\n }\n\n if !predators_query.contains(faction) {\n\n predators_query\n\n .insert(faction, Query::<Predator>::new())\n\n .expect(\"unreachable: we just queried\");\n\n }\n\n\n\n predators_query.get_mut(faction).unwrap().0.clear();\n\n }\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 90, "score": 25282.842456706694 }, { "content": " Read<'s, EventChannel<UiEvent>>,\n\n WriteStorage<'s, UiText>,\n\n Write<'s, EventChannel<InputEvent<String>>>,\n\n );\n\n\n\n fn setup(&mut self, res: &mut Resources) {\n\n Self::SystemData::setup(res);\n\n self.ui_reader_id = Some(res.fetch_mut::<EventChannel<UiEvent>>().register_reader());\n\n self.input_reader_id = Some(\n\n res.fetch_mut::<EventChannel<InputEvent<String>>>()\n\n .register_reader(),\n\n );\n\n\n\n let font_handle = {\n\n let loader = res.fetch::<Loader>();\n\n let font_storage = res.fetch::<AssetStorage<FontAsset>>();\n\n loader.load(\n\n \"assets/fonts/OpenSans-Regular.ttf\",\n\n TtfFormat,\n\n (),\n", "file_path": "src/systems/time_control.rs", "rank": 91, "score": 25282.222816429356 }, { "content": " ) {\n\n // Right now the only obstacles are the world bound walls, so it's\n\n // safe to clear this out.\n\n closest_obstacle.clear();\n\n\n\n let threshold = 3.0f32.powi(2);\n\n for (entity, transform, _, _) in\n\n (&entities, &transforms, &avoid_obstacles, &movements).join()\n\n {\n\n // Find the closest wall to this entity\n\n let wall_dir = closest_wall(&transform.translation(), &world_bounds);\n\n if wall_dir.magnitude_squared() < threshold {\n\n let dir = Vector3::new(wall_dir[0], wall_dir[1], wall_dir[2]);\n\n closest_obstacle\n\n .insert(entity, Closest::<Obstacle>::new(dir))\n\n .expect(\"Unable to add obstacle to entity\");\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/behaviors/obstacle.rs", "rank": 92, "score": 25281.85266673012 }, { "content": " for (wander, movement, local) in (&mut wanders, &mut movements, &locals).join() {\n\n let position = local.translation();\n\n let future_position = position + movement.velocity * 0.5;\n\n\n\n let direction = wander.get_direction();\n\n let target = future_position + direction;\n\n\n\n let desired_velocity = target - position;\n\n\n\n movement.velocity += desired_velocity * delta_time;\n\n // Quick and dirty fix to keep entities from wandering into the ground if they target\n\n // an entity not on the same z-level as themselves.\n\n movement.velocity.z = 0.0;\n\n\n\n let change = 10.0;\n\n if rng.gen::<bool>() {\n\n wander.angle += change * delta_time; // Radians per second\n\n } else {\n\n wander.angle -= change * delta_time;\n\n }\n", "file_path": "src/systems/behaviors/wander.rs", "rank": 93, "score": 25281.70323012218 }, { "content": " let nb_swarm_individuals = rng.gen_range(3, 10);\n\n for _ in 0..nb_swarm_individuals {\n\n let mut swarmling_entity_builder = lazy_update.create_entity(&entities);\n\n let swarm_behavior = SwarmBehavior {\n\n swarm_center: Some(swarm_entity),\n\n attraction: 0.5f32,\n\n deviation: 0.5f32,\n\n };\n\n swarmling_entity_builder = swarmling_entity_builder.with(swarm_behavior);\n\n let mut transform = Transform::default();\n\n let x = rng.gen_range(-1.0, 1.0);\n\n let y = rng.gen_range(-1.0, 1.0);\n\n transform.set_translation_xyz(x, y, 0.0);\n\n transform.set_scale(Vector3::new(0.1, 0.1, 0.1));\n\n let parent = Parent {\n\n entity: swarm_entity,\n\n };\n\n let movement = Movement {\n\n velocity: Vector3::new(rng.gen_range(-1.0, 1.0), rng.gen_range(-1.0, 1.0), 0.0),\n\n max_movement_speed: 5.0,\n", "file_path": "src/systems/swarm_behavior.rs", "rank": 94, "score": 25281.490003473602 }, { "content": " let mut min_sq_distance = 5.0f32.powi(2);\n\n\n\n for (_, query_transform) in (&query_entities.unwrap().0, &transforms).join() {\n\n let position = transform.translation();\n\n let query_position = query_transform.translation();\n\n let difference = query_position - position;\n\n let sq_distance = difference.magnitude_squared();\n\n if sq_distance < min_sq_distance {\n\n min_sq_distance = sq_distance;\n\n closest_opt = Some(difference);\n\n }\n\n }\n\n\n\n if let Some(c) = closest_opt {\n\n let closest_component = Closest::new(c);\n\n closest\n\n .insert(entity, closest_component)\n\n .expect(\"unreachable: we just queried\");\n\n }\n\n }\n", "file_path": "src/systems/behaviors/decision.rs", "rank": 95, "score": 25281.1947200089 }, { "content": " let delta_time = time.delta_real_seconds();\n\n let move_factor = 12.0 * delta_time;\n\n for (_, name, transform) in (&cameras, &names, &mut transforms).join() {\n\n if name.name == \"Main camera\" {\n\n if input_handler.action_is_down(\"CameraMoveUp\").unwrap() {\n\n transform.move_up(move_factor);\n\n }\n\n if input_handler.action_is_down(\"CameraMoveDown\").unwrap() {\n\n transform.move_down(move_factor);\n\n }\n\n if input_handler.action_is_down(\"CameraMoveLeft\").unwrap() {\n\n transform.move_left(move_factor);\n\n }\n\n if input_handler.action_is_down(\"CameraMoveRight\").unwrap() {\n\n transform.move_right(move_factor);\n\n }\n\n if input_handler.action_is_down(\"CameraMoveForward\").unwrap() {\n\n transform.move_forward(move_factor);\n\n }\n\n if input_handler.action_is_down(\"CameraMoveBackward\").unwrap() {\n\n transform.move_backward(move_factor);\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/camera_movement.rs", "rank": 96, "score": 25281.130949073824 }, { "content": "pub mod decision;\n\npub mod obstacle;\n\npub mod ricochet;\n\npub mod wander;\n", "file_path": "src/systems/behaviors/mod.rs", "rank": 97, "score": 25280.075313177516 }, { "content": " for event in input_events.read(self.input_reader_id.as_mut().unwrap()) {\n\n match event {\n\n InputEvent::ActionPressed(action_name) => match action_name.as_ref() {\n\n \"TogglePause\" => {\n\n let pause_button_text_entity =\n\n self.pause_button.as_ref().unwrap().text_entity;\n\n if let Some(text) = ui_texts.get_mut(pause_button_text_entity) {\n\n if text.text == \"Pause\" {\n\n text.text = \"Play\".to_string();\n\n } else if text.text == \"Play\" {\n\n text.text = \"Pause\".to_string();\n\n }\n\n }\n\n }\n\n _ => (),\n\n },\n\n _ => (),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/systems/time_control.rs", "rank": 98, "score": 25278.476715941848 }, { "content": "/// A new topplegrass entity is spawned periodically, SPAWN_INTERVAL is the period in seconds.\n\n/// Spawn interval is currently set quite fast, for testing purposes. In the final game,\n\n/// a spawn internal of at least a few minutes might be better.\n\nconst SPAWN_INTERVAL: f32 = 10.0;\n\n/// The standard scaling to apply to the entity.\n\nconst TOPPLEGRASS_BASE_SCALE: f32 = 0.002;\n\n/// At which height the topplegrass entity should spawn.\n\nconst HEIGHT: f32 = 0.5;\n\n/// If we knew the radius of the toppleweed, we could calculate the perfect angular velocity,\n\n/// but instead we'll use this magic value we got through trial and error.\n\n/// It should be close enough to the actual value that the entity doesn't appear to slip.\n\nconst ANGULAR_V_MAGIC: f32 = 2.0;\n\n/// The minimum velocity that a topplegrass entity must have in order to start jumping up into the air.\n\n/// This is to prevent topplegrass from jumping in a weird way when there is (almost) no wind.\n\nconst JUMP_THRESHOLD: f32 = 1.0;\n\n/// The chance per elapsed second since last frame that any given non-falling\n\n/// topplegrass will jump up into the air slightly.\n\n/// Not a great way of doing it, but probably good enough until we get a physics system?\n\nconst JUMP_PROBABILITY: f32 = 4.0;\n\n\n", "file_path": "src/systems/experimental/topplegrass.rs", "rank": 99, "score": 25278.302941864094 } ]
Rust
src/operation/aggregate/change_stream.rs
moy2010/mongo-rust-driver
dc085119dac3943773115bf4a0f923d17156dd06
use crate::{ bson::{doc, Document}, change_stream::{event::ResumeToken, ChangeStreamData, WatchArgs}, cmap::{Command, RawCommandResponse, StreamDescription}, cursor::CursorSpecification, error::Result, operation::{append_options, Operation, Retryability}, options::{ChangeStreamOptions, SelectionCriteria, WriteConcern}, }; use super::Aggregate; pub(crate) struct ChangeStreamAggregate { inner: Aggregate, args: WatchArgs, resume_data: Option<ChangeStreamData>, } impl ChangeStreamAggregate { pub(crate) fn new(args: &WatchArgs, resume_data: Option<ChangeStreamData>) -> Result<Self> { Ok(Self { inner: Self::build_inner(args)?, args: args.clone(), resume_data, }) } fn build_inner(args: &WatchArgs) -> Result<Aggregate> { let mut bson_options = Document::new(); append_options(&mut bson_options, args.options.as_ref())?; let mut agg_pipeline = vec![doc! { "$changeStream": bson_options }]; agg_pipeline.extend(args.pipeline.iter().cloned()); Ok(Aggregate::new( args.target.clone(), agg_pipeline, args.options.as_ref().map(|o| o.aggregate_options()), )) } } impl Operation for ChangeStreamAggregate { type O = (CursorSpecification, ChangeStreamData); type Command = Document; const NAME: &'static str = "aggregate"; fn build(&mut self, description: &StreamDescription) -> Result<Command> { if let Some(data) = &mut self.resume_data { let mut new_opts = self.args.options.clone().unwrap_or_default(); if let Some(token) = data.resume_token.take() { if new_opts.start_after.is_some() && !data.document_returned { new_opts.start_after = Some(token); new_opts.start_at_operation_time = None; } else { new_opts.resume_after = Some(token); new_opts.start_after = None; new_opts.start_at_operation_time = None; } } else { let saved_time = new_opts .start_at_operation_time .as_ref() .or_else(|| data.initial_operation_time.as_ref()); if saved_time.is_some() && description.max_wire_version.map_or(false, |v| v >= 7) { new_opts.start_at_operation_time = saved_time.cloned(); } } self.inner = Self::build_inner(&WatchArgs { options: Some(new_opts), ..self.args.clone() })?; } self.inner.build(description) } fn extract_at_cluster_time( &self, response: &bson::RawDocument, ) -> Result<Option<bson::Timestamp>> { self.inner.extract_at_cluster_time(response) } fn handle_response( &self, response: RawCommandResponse, description: &StreamDescription, ) -> Result<Self::O> { let op_time = response .raw_body() .get("operationTime")? .and_then(bson::RawBsonRef::as_timestamp); let spec = self.inner.handle_response(response, description)?; let mut data = ChangeStreamData { resume_token: ResumeToken::initial(self.args.options.as_ref(), &spec), ..ChangeStreamData::default() }; let has_no_time = |o: &ChangeStreamOptions| { o.start_at_operation_time.is_none() && o.resume_after.is_none() && o.start_after.is_none() }; if self.args.options.as_ref().map_or(true, has_no_time) && description.max_wire_version.map_or(false, |v| v >= 7) && spec.initial_buffer.is_empty() && spec.post_batch_resume_token.is_none() { data.initial_operation_time = op_time; } Ok((spec, data)) } fn selection_criteria(&self) -> Option<&SelectionCriteria> { self.inner.selection_criteria() } fn supports_read_concern(&self, description: &StreamDescription) -> bool { self.inner.supports_read_concern(description) } fn write_concern(&self) -> Option<&WriteConcern> { self.inner.write_concern() } fn retryability(&self) -> Retryability { self.inner.retryability() } }
use crate::{ bson::{doc, Document}, change_stream::{event::ResumeToken, ChangeStreamData, WatchArgs}, cmap::{Command, RawCommandResponse, StreamDescription}, cursor::CursorSpecification, error::Result, operation::{append_options, Operation, Retryability}, options::{ChangeStreamOptions, SelectionCriteria, WriteConcern}, }; use super::Aggregate; pub(crate) struct ChangeStreamAggregate { inner: Aggregate, args: WatchArgs, resume_data: Option<ChangeStreamData>, } impl ChangeStreamAggregate { pub(crate) fn new(args: &WatchArgs, resume_data: Option<ChangeStreamData>) -> Result<Self> { Ok(Self { inner: Self::build_inner(args)?, args: args.clone(), resume_data, }) } fn build_inner(args: &WatchArgs) -> Result<Aggregate> { let mut bson_options = Document::new(); append_options(&mut bson_options, args.options.as_ref())?; let mut agg_pipeline = vec![doc! { "$changeStream": bson_options }]; agg_pipeline.extend(args.pipeline.iter().cloned()); Ok(Aggregate::new( args.target.clone(), agg_pipeline, args.options.as_ref().map(|o| o.aggregate_options()), )) } } impl Operation for ChangeStreamAggregate { type O = (CursorSpecification, ChangeStreamData); type Command = Document; const NAME: &'static str = "aggregate"; fn build(&mut self, description: &StreamDescription) -> Result<Command> { if let Some(data) = &mut self.resume_data { let mut new_opts = self.args.options.clone().unwrap_or_default(); if let Some(token) = data.resume_token.take() { if new_opts.start_after.is_some() && !data.document_returned { new_opts.start_after = Some(token); new_opts.start_at_operation_time = None; } else { new_opts.resume_after = Some(token); new_opts.start_after = None; new_opts.start_at_operation_time = None; } } else { let saved_time = new_opts .start_at_operation_time .as_ref() .or_else(|| data.initial_operation_time.as_ref()); if saved_time.is_some() && description.max_wire_version.map_or(false, |v| v >= 7) { new_opts.start_at_operation_time = saved_time.cloned(); } } self.inner = Self::build_inner(&WatchArgs { options: Some(new_opts), ..self.args.clone() })?; } self.inner.build(description) }
fn handle_response( &self, response: RawCommandResponse, description: &StreamDescription, ) -> Result<Self::O> { let op_time = response .raw_body() .get("operationTime")? .and_then(bson::RawBsonRef::as_timestamp); let spec = self.inner.handle_response(response, description)?; let mut data = ChangeStreamData { resume_token: ResumeToken::initial(self.args.options.as_ref(), &spec), ..ChangeStreamData::default() }; let has_no_time = |o: &ChangeStreamOptions| { o.start_at_operation_time.is_none() && o.resume_after.is_none() && o.start_after.is_none() }; if self.args.options.as_ref().map_or(true, has_no_time) && description.max_wire_version.map_or(false, |v| v >= 7) && spec.initial_buffer.is_empty() && spec.post_batch_resume_token.is_none() { data.initial_operation_time = op_time; } Ok((spec, data)) } fn selection_criteria(&self) -> Option<&SelectionCriteria> { self.inner.selection_criteria() } fn supports_read_concern(&self, description: &StreamDescription) -> bool { self.inner.supports_read_concern(description) } fn write_concern(&self) -> Option<&WriteConcern> { self.inner.write_concern() } fn retryability(&self) -> Retryability { self.inner.retryability() } }
fn extract_at_cluster_time( &self, response: &bson::RawDocument, ) -> Result<Option<bson::Timestamp>> { self.inner.extract_at_cluster_time(response) }
function_block-full_function
[ { "content": "fn build_test(db_name: &str, mut list_collections: ListCollections, mut expected_body: Document) {\n\n let mut cmd = list_collections\n\n .build(&StreamDescription::new_testing())\n\n .expect(\"build should succeed\");\n\n assert_eq!(cmd.name, \"listCollections\");\n\n assert_eq!(cmd.target_db, db_name);\n\n\n\n bson_util::sort_document(&mut cmd.body);\n\n bson_util::sort_document(&mut expected_body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build() {\n\n let list_collections = ListCollections::new(\"test_db\".to_string(), None, false, None);\n\n let expected_body = doc! {\n\n \"listCollections\": 1,\n\n \"nameOnly\": false,\n", "file_path": "src/operation/list_collections/test.rs", "rank": 0, "score": 291178.1652512546 }, { "content": "fn server_type_from_str(s: &str) -> Option<ServerType> {\n\n let t = match s {\n\n \"Standalone\" => ServerType::Standalone,\n\n \"Mongos\" => ServerType::Mongos,\n\n \"RSPrimary\" => ServerType::RsPrimary,\n\n \"RSSecondary\" => ServerType::RsSecondary,\n\n \"RSArbiter\" => ServerType::RsArbiter,\n\n \"RSOther\" => ServerType::RsOther,\n\n \"RSGhost\" => ServerType::RsGhost,\n\n \"LoadBalancer\" => ServerType::LoadBalancer,\n\n \"Unknown\" | \"PossiblePrimary\" => ServerType::Unknown,\n\n _ => return None,\n\n };\n\n\n\n Some(t)\n\n}\n\n\n\nasync fn run_test(test_file: TestFile) {\n\n let test_description = &test_file.description;\n\n\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 1, "score": 286140.01805356954 }, { "content": "fn return_document_to_bool(return_document: Option<ReturnDocument>) -> Option<bool> {\n\n if let Some(return_document) = return_document {\n\n return match return_document {\n\n ReturnDocument::After => Some(true),\n\n ReturnDocument::Before => Some(false),\n\n };\n\n }\n\n None\n\n}\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 2, "score": 237961.8715849701 }, { "content": "fn is_master_response_from_server_type(server_type: ServerType) -> Option<IsMasterCommandResponse> {\n\n let mut response = IsMasterCommandResponse::default();\n\n\n\n match server_type {\n\n ServerType::Unknown => {\n\n return None;\n\n }\n\n ServerType::Mongos => {\n\n response.msg = Some(\"isdbgrid\".into());\n\n }\n\n ServerType::RsPrimary => {\n\n response.set_name = Some(\"foo\".into());\n\n response.is_writable_primary = Some(true);\n\n }\n\n ServerType::RsOther => {\n\n response.set_name = Some(\"foo\".into());\n\n response.hidden = Some(true);\n\n }\n\n ServerType::RsSecondary => {\n\n response.set_name = Some(\"foo\".into());\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 3, "score": 231604.7910197403 }, { "content": "/// Splits a string into a section before a given index and a section exclusively after the index.\n\n/// Empty portions are returned as `None`.\n\nfn exclusive_split_at(s: &str, i: usize) -> (Option<&str>, Option<&str>) {\n\n let (l, r) = s.split_at(i);\n\n\n\n let lout = if !l.is_empty() { Some(l) } else { None };\n\n let rout = if r.len() > 1 { Some(&r[1..]) } else { None };\n\n\n\n (lout, rout)\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 4, "score": 229950.41617017024 }, { "content": "fn init_db_and_coll(client: &Client, db_name: &str, coll_name: &str) -> Collection<Document> {\n\n let coll = client.database(db_name).collection(coll_name);\n\n coll.drop(None).unwrap();\n\n coll\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 5, "score": 229579.2964976398 }, { "content": "fn validate_userinfo(s: &str, userinfo_type: &str) -> Result<()> {\n\n if s.chars().any(|c| USERINFO_RESERVED_CHARACTERS.contains(&c)) {\n\n return Err(ErrorKind::InvalidArgument {\n\n message: format!(\"{} must be URL encoded\", userinfo_type),\n\n }\n\n .into());\n\n }\n\n\n\n // All instances of '%' in the username must be part of an percent-encoded substring. This means\n\n // that there must be two hexidecimal digits following any '%' in the username.\n\n if s.split('%')\n\n .skip(1)\n\n .any(|part| part.len() < 2 || part[0..2].chars().any(|c| !c.is_ascii_hexdigit()))\n\n {\n\n return Err(ErrorKind::InvalidArgument {\n\n message: \"username/password cannot contain unescaped %\".to_string(),\n\n }\n\n .into());\n\n }\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 6, "score": 226505.83241018103 }, { "content": "pub fn merge_uri_options(given_uri: &str, uri_options: Option<&Document>) -> String {\n\n let uri_options = match uri_options {\n\n Some(opts) => opts,\n\n None => return given_uri.to_string(),\n\n };\n\n let mut given_uri_parts = given_uri.split('?');\n\n\n\n let mut uri = String::from(given_uri_parts.next().unwrap());\n\n // A connection string has two slashes before the host list and one slash before the auth db\n\n // name. If an auth db name is not provided the latter slash might not be present, so it needs\n\n // to be added manually.\n\n if uri.chars().filter(|c| *c == '/').count() < 3 {\n\n uri.push('/');\n\n }\n\n uri.push('?');\n\n\n\n if let Some(options) = given_uri_parts.next() {\n\n let options = options.split('&');\n\n for option in options {\n\n let key = option.split('=').next().unwrap();\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 7, "score": 224049.67095841537 }, { "content": "/// Validates that a `saslStart` or `saslContinue` command response is successful.\n\nfn validate_command_success(auth_mechanism: &str, response: &Document) -> Result<()> {\n\n let ok = match response.get(\"ok\") {\n\n Some(ok) => ok,\n\n None => return Ok(()),\n\n };\n\n\n\n match bson_util::get_int(ok) {\n\n Some(1) => Ok(()),\n\n Some(_) => Err(Error::authentication_error(\n\n auth_mechanism,\n\n response\n\n .get_str(\"errmsg\")\n\n .unwrap_or(\"Authentication failure\"),\n\n )),\n\n _ => Err(Error::invalid_authentication_response(auth_mechanism)),\n\n }\n\n}\n\n\n\n/// Encapsulates the parsing of the response to a `saslStart` or `saslContinue` command.\n\npub(super) struct SaslResponse {\n", "file_path": "src/client/auth/sasl.rs", "rank": 8, "score": 214928.8017787552 }, { "content": "fn verify_max_await_time(max_await_time: Option<Duration>, cursor_type: Option<CursorType>) {\n\n let ns = Namespace::empty();\n\n let find = Find::new(\n\n ns,\n\n None,\n\n Some(FindOptions {\n\n cursor_type,\n\n max_await_time,\n\n ..Default::default()\n\n }),\n\n );\n\n\n\n let spec = handle_response_test(\n\n &find,\n\n doc! {\n\n \"cursor\": {\n\n \"id\": 123,\n\n \"ns\": \"a.b\",\n\n \"firstBatch\": [],\n\n },\n", "file_path": "src/operation/find/test.rs", "rank": 9, "score": 212069.30054777997 }, { "content": "fn command_write_concerns(client: &EventClient, key: &str) -> Vec<Document> {\n\n client\n\n .get_command_started_events(&[key])\n\n .into_iter()\n\n .map(|d| d.command.get_document(\"writeConcern\").unwrap().clone())\n\n .collect()\n\n}\n", "file_path": "src/concern/test.rs", "rank": 10, "score": 210642.02414143016 }, { "content": "pub fn get_default_name(description: &str) -> String {\n\n let mut db_name = description\n\n .replace('$', \"%\")\n\n .replace(' ', \"_\")\n\n .replace('.', \"_\");\n\n // database names must have fewer than 38 characters\n\n db_name.truncate(37);\n\n db_name\n\n}\n\n\n", "file_path": "src/test/util/mod.rs", "rank": 11, "score": 207151.27445163284 }, { "content": "fn init_db_and_typed_coll<T>(client: &Client, db_name: &str, coll_name: &str) -> Collection<T> {\n\n let coll = client.database(db_name).collection(coll_name);\n\n coll.drop(None).unwrap();\n\n coll\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 12, "score": 206312.4181636831 }, { "content": "fn verify_max_staleness(max_staleness: Option<Duration>) -> crate::error::Result<()> {\n\n verify_max_staleness_inner(max_staleness)\n\n .map_err(|s| crate::error::ErrorKind::InvalidArgument { message: s }.into())\n\n}\n\n\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 13, "score": 203136.11733728414 }, { "content": "fn normalize_write_concern_doc(mut write_concern_doc: Document) -> Document {\n\n if let Some(w_timeout) = write_concern_doc.remove(\"wtimeout\") {\n\n write_concern_doc.insert(\"wtimeoutMS\", w_timeout);\n\n }\n\n\n\n if let Some(j) = write_concern_doc.remove(\"j\") {\n\n write_concern_doc.insert(\"journal\", j);\n\n }\n\n\n\n write_concern_doc\n\n}\n\n\n\nasync fn run_connection_string_test(test_file: TestFile) {\n\n for test_case in test_file.tests {\n\n match ClientOptions::parse(&test_case.uri).await {\n\n Ok(options) => {\n\n assert!(test_case.valid);\n\n\n\n if let Some(ref expected_read_concern) = test_case.read_concern {\n\n let mut actual_read_concern = Document::new();\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 14, "score": 191322.15748411536 }, { "content": "fn parse_i64_ext_json(doc: &Document) -> Option<i64> {\n\n let number_string = doc.get(\"$numberLong\").and_then(Bson::as_str)?;\n\n number_string.parse::<i64>().ok()\n\n}\n\n\n", "file_path": "src/test/util/matchable.rs", "rank": 15, "score": 182632.92981990453 }, { "content": "fn percent_decode(s: &str, err_message: &str) -> Result<String> {\n\n match percent_encoding::percent_decode_str(s).decode_utf8() {\n\n Ok(result) => Ok(result.to_string()),\n\n Err(_) => Err(ErrorKind::InvalidArgument {\n\n message: err_message.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 16, "score": 178720.14394710245 }, { "content": "fn verify_max_staleness_inner(max_staleness: Option<Duration>) -> std::result::Result<(), String> {\n\n if max_staleness\n\n .map(|staleness| staleness > Duration::from_secs(0) && staleness < Duration::from_secs(90))\n\n .unwrap_or(false)\n\n {\n\n return Err(\"max staleness cannot be both positive and below 90 seconds\".into());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 17, "score": 176533.97704521948 }, { "content": "/// Get an Insert operation and the documents/options used to construct it.\n\nfn fixtures(opts: Option<InsertManyOptions>) -> TestFixtures {\n\n lazy_static! {\n\n static ref DOCUMENTS: Vec<Document> = vec![\n\n Document::new(),\n\n doc! {\"_id\": 1234, \"a\": 1},\n\n doc! {\"a\": 123, \"b\": \"hello world\" },\n\n ];\n\n }\n\n\n\n let options = opts.unwrap_or(InsertManyOptions {\n\n ordered: Some(true),\n\n write_concern: Some(WriteConcern::builder().journal(true).build()),\n\n ..Default::default()\n\n });\n\n\n\n let op = Insert::new(\n\n Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n },\n", "file_path": "src/operation/insert/test.rs", "rank": 18, "score": 174348.4604835777 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Body {\n\n n: u64,\n\n}\n", "file_path": "src/operation/count_documents/mod.rs", "rank": 19, "score": 172799.78304347402 }, { "content": "fn build_test(\n\n target: impl Into<AggregateTarget>,\n\n pipeline: Vec<Document>,\n\n options: Option<AggregateOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let target = target.into();\n\n\n\n let mut aggregate = Aggregate::new(target.clone(), pipeline, options);\n\n\n\n let cmd = aggregate.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"aggregate\");\n\n assert_eq!(cmd.target_db.as_str(), target.db_name());\n\n\n\n let cmd_bytes = aggregate.serialize_command(cmd).unwrap();\n\n let mut cmd_doc = bson::from_slice(&cmd_bytes).unwrap();\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd_doc);\n", "file_path": "src/operation/aggregate/test.rs", "rank": 20, "score": 172595.96844057195 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct Documents<D> {\n\n documents: Vec<D>,\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn generate_ids() {\n\n let docs = vec![doc! { \"x\": 1 }, doc! { \"_id\": 1_i32, \"x\": 2 }];\n\n\n\n let mut insert = Insert::new(Namespace::empty(), docs.iter().collect(), None);\n\n let cmd = insert.build(&StreamDescription::new_testing()).unwrap();\n\n let serialized = insert.serialize_command(cmd).unwrap();\n\n\n\n #[derive(Debug, Serialize, Deserialize)]\n\n struct D {\n\n x: i32,\n\n\n\n #[serde(rename = \"_id\")]\n\n id: Bson,\n\n }\n", "file_path": "src/operation/insert/test.rs", "rank": 21, "score": 168955.78932474795 }, { "content": "/// A Closure that executes an operation and returns the resultant future.\n\ntype OperationFn =\n\n Box<dyn FnOnce(Collection<Document>, &mut ClientSession) -> BoxFuture<Result<()>>>;\n\n\n\nimpl Operation {\n\n /// Execute the operation using the provided collection and session.\n\n async fn execute(self, coll: Collection<Document>, session: &mut ClientSession) -> Result<()> {\n\n (self.f)(coll, session).await\n\n }\n\n}\n\n\n\n/// Shorthand macro for defining an Operation.\n\nmacro_rules! op {\n\n ($name:expr, $is_read: expr, |$coll:ident, $s:ident| $body:expr) => {\n\n Operation {\n\n f: Box::new({ move |$coll, $s| async move { $body.await.map(|_| ()) }.boxed() }),\n\n name: $name,\n\n is_read: $is_read,\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/client/session/test/causal_consistency.rs", "rank": 22, "score": 166896.42854855198 }, { "content": "fn all_session_ops() -> impl Iterator<Item = Operation> {\n\n let mut ops = vec![];\n\n\n\n ops.push(op!(\"insert\", false, |coll, session| {\n\n coll.insert_one_with_session(doc! { \"x\": 1 }, None, session)\n\n }));\n\n\n\n ops.push(op!(\"insert\", false, |coll, session| {\n\n coll.insert_many_with_session(vec![doc! { \"x\": 1 }], None, session)\n\n }));\n\n\n\n ops.push(op!(\"find\", true, |coll, session| coll\n\n .find_one_with_session(doc! { \"x\": 1 }, None, session)));\n\n\n\n ops.push(op!(\"find\", true, |coll, session| coll.find_with_session(\n\n doc! { \"x\": 1 },\n\n None,\n\n session\n\n )));\n\n\n", "file_path": "src/client/session/test/causal_consistency.rs", "rank": 23, "score": 161787.05910413197 }, { "content": "fn filter_servers_by_tag_sets(servers: &mut Vec<&ServerDescription>, tag_sets: &[TagSet]) {\n\n if tag_sets.is_empty() {\n\n return;\n\n }\n\n\n\n for tag_set in tag_sets {\n\n let matches_tag_set = |server: &&ServerDescription| server.matches_tag_set(tag_set);\n\n\n\n if servers.iter().any(matches_tag_set) {\n\n servers.retain(matches_tag_set);\n\n\n\n return;\n\n }\n\n }\n\n\n\n servers.clear();\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 24, "score": 150095.66961882127 }, { "content": "/// Returns a vector of documents that cannot be sent in one batch (35000 documents).\n\n/// Includes duplicate _id's across different batches.\n\nfn multibatch_documents_with_duplicate_keys() -> Vec<Document> {\n\n let large_doc = LARGE_DOC.clone();\n\n\n\n let mut docs: Vec<Document> = Vec::new();\n\n docs.extend(vec![large_doc.clone(); 7498]);\n\n\n\n docs.push(doc! { \"_id\": 1 });\n\n docs.push(doc! { \"_id\": 1 }); // error in first batch, index 7499\n\n\n\n docs.extend(vec![large_doc.clone(); 14999]);\n\n docs.push(doc! { \"_id\": 1 }); // error in second batch, index 22499\n\n\n\n docs.extend(vec![large_doc.clone(); 9999]);\n\n docs.push(doc! { \"_id\": 1 }); // error in third batch, index 32499\n\n\n\n docs.extend(vec![large_doc; 2500]);\n\n\n\n assert_eq!(docs.len(), 35000);\n\n docs\n\n}\n", "file_path": "src/test/coll.rs", "rank": 25, "score": 149631.13349896192 }, { "content": "fn write_concern_to_document(write_concern: &WriteConcern) -> Result<Document> {\n\n match bson::to_bson(&write_concern)? {\n\n Bson::Document(doc) => Ok(doc),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/test/spec/read_write_concern/mod.rs", "rank": 26, "score": 148800.38791284838 }, { "content": "fn entity_matches(id: &str, actual: Option<&Bson>, entities: &EntityMap) -> Result<(), String> {\n\n let bson = entities.get(id).unwrap().as_bson();\n\n results_match_inner(actual, bson, false, false, Some(entities))\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 27, "score": 148373.04007720118 }, { "content": "fn lists_eq(actual: &Option<Vec<String>>, expected: &[String]) -> bool {\n\n if let Some(actual) = actual {\n\n actual.as_slice() == expected\n\n } else {\n\n expected.is_empty()\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\npub struct TestTopologyDescription {\n\n topology_type: String,\n\n set_name: Option<String>,\n\n servers: Vec<TestServerDescription>,\n\n}\n\n\n\nimpl PartialEq<TestTopologyDescription> for TopologyDescription {\n\n fn eq(&self, other: &TestTopologyDescription) -> bool {\n\n if self.topology_type.as_str() != other.topology_type.as_str()\n\n || self.set_name != other.set_name\n", "file_path": "src/sdam/description/topology/test/event.rs", "rank": 28, "score": 145002.61855528445 }, { "content": "fn convert_read_preference(test_read_pref: TestReadPreference) -> Option<ReadPreference> {\n\n let max_staleness = test_read_pref\n\n .max_staleness_seconds\n\n .map(Duration::from_secs);\n\n let options = ReadPreferenceOptions::builder()\n\n .tag_sets(test_read_pref.tag_sets)\n\n .max_staleness(max_staleness)\n\n .build();\n\n\n\n let read_pref = match &test_read_pref.mode.as_ref()?[..] {\n\n \"Primary\" => ReadPreference::Primary,\n\n \"Secondary\" => ReadPreference::Secondary { options },\n\n \"PrimaryPreferred\" => ReadPreference::PrimaryPreferred { options },\n\n \"SecondaryPreferred\" => ReadPreference::SecondaryPreferred { options },\n\n \"Nearest\" => ReadPreference::Nearest { options },\n\n m => panic!(\"invalid read preference mode: {}\", m),\n\n };\n\n\n\n Some(read_pref)\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/test/logic.rs", "rank": 29, "score": 142094.23088743963 }, { "content": "fn payload_bytes(username: &str, password: &str) -> Vec<u8> {\n\n let mut bytes = vec![0];\n\n bytes.extend(username.as_bytes());\n\n\n\n bytes.push(0);\n\n bytes.extend(password.as_bytes());\n\n\n\n bytes\n\n}\n", "file_path": "src/client/auth/plain.rs", "rank": 30, "score": 138345.97780481217 }, { "content": "/// Choose a server from several suitable choices within the latency window according to\n\n/// the algorithm laid out in the server selection specification.\n\nfn select_server_in_latency_window(in_window: Vec<&Arc<Server>>) -> Option<Arc<Server>> {\n\n if in_window.is_empty() {\n\n return None;\n\n } else if in_window.len() == 1 {\n\n return Some(in_window[0].clone());\n\n }\n\n\n\n let mut rng = SmallRng::from_entropy();\n\n in_window\n\n .choose_multiple(&mut rng, 2)\n\n .min_by_key(|s| s.operation_count())\n\n .map(|server| (*server).clone())\n\n}\n\n\n\nimpl TopologyDescription {\n\n pub(crate) fn server_selection_timeout_error_message(\n\n &self,\n\n criteria: &SelectionCriteria,\n\n ) -> String {\n\n if self.has_available_servers() {\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 31, "score": 137816.85437489516 }, { "content": "/// Parses a string slice of the form \"<expected_key>=<body>\" into \"<body>\", if possible.\n\nfn parse_kvp(str: &str, expected_key: char) -> Result<String> {\n\n if !str.starts_with(expected_key) || str.chars().nth(1) != Some('=') {\n\n Err(Error::invalid_authentication_response(\"SCRAM\"))\n\n } else {\n\n Ok(str.chars().skip(2).collect())\n\n }\n\n}\n\n\n\n/// Model of the first message sent by the client.\n\n#[derive(Debug)]\n\npub(crate) struct ClientFirst {\n\n source: String,\n\n\n\n message: String,\n\n\n\n gs2_header: Range<usize>,\n\n\n\n bare: Range<usize>,\n\n\n\n nonce: String,\n", "file_path": "src/client/auth/scram.rs", "rank": 32, "score": 136530.64692248014 }, { "content": "#[derive(Debug, Default, PartialEq)]\n\nstruct ClientOptionsParser {\n\n pub hosts: Vec<ServerAddress>,\n\n pub srv: bool,\n\n pub app_name: Option<String>,\n\n pub tls: Option<Tls>,\n\n pub heartbeat_freq: Option<Duration>,\n\n pub local_threshold: Option<Duration>,\n\n pub read_concern: Option<ReadConcern>,\n\n pub selection_criteria: Option<SelectionCriteria>,\n\n pub repl_set_name: Option<String>,\n\n pub write_concern: Option<WriteConcern>,\n\n pub server_selection_timeout: Option<Duration>,\n\n pub max_pool_size: Option<u32>,\n\n pub min_pool_size: Option<u32>,\n\n pub max_idle_time: Option<Duration>,\n\n pub wait_queue_timeout: Option<Duration>,\n\n pub compressors: Option<Vec<Compressor>>,\n\n pub connect_timeout: Option<Duration>,\n\n pub retry_reads: Option<bool>,\n\n pub retry_writes: Option<bool>,\n", "file_path": "src/client/options/mod.rs", "rank": 33, "score": 135510.1649446861 }, { "content": "fn get_compressors() -> Option<Vec<Compressor>> {\n\n #[allow(unused_mut)]\n\n let mut compressors = vec![];\n\n\n\n if *SNAPPY_COMPRESSION_ENABLED {\n\n #[cfg(feature = \"snappy-compression\")]\n\n compressors.push(Compressor::Snappy);\n\n #[cfg(not(feature = \"snappy-compression\"))]\n\n panic!(\"To use snappy compression, the \\\"snappy-compression\\\" feature flag must be set.\");\n\n }\n\n if *ZLIB_COMPRESSION_ENABLED {\n\n #[cfg(feature = \"zlib-compression\")]\n\n compressors.push(Compressor::Zlib { level: None });\n\n #[cfg(not(feature = \"zlib-compression\"))]\n\n panic!(\"To use zlib compression, the \\\"zlib-compression\\\" feature flag must be set.\");\n\n }\n\n if *ZSTD_COMPRESSION_ENABLED {\n\n #[cfg(feature = \"zstd-compression\")]\n\n compressors.push(Compressor::Zstd { level: None });\n\n #[cfg(not(feature = \"zstd-compression\"))]\n\n panic!(\"To use zstd compression, the \\\"zstd-compression\\\" feature flag must be set.\");\n\n }\n\n if compressors.is_empty() {\n\n None\n\n } else {\n\n Some(compressors)\n\n }\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 34, "score": 133967.66076677723 }, { "content": "fn verify_description_outcome(\n\n outcome: DescriptionOutcome,\n\n topology_description: TopologyDescription,\n\n test_description: &str,\n\n phase_description: String,\n\n) {\n\n assert_eq!(\n\n topology_description.topology_type, outcome.topology_type,\n\n \"{}: {}\",\n\n test_description, phase_description\n\n );\n\n\n\n assert_eq!(\n\n topology_description.set_name, outcome.set_name,\n\n \"{}: {}\",\n\n test_description, phase_description,\n\n );\n\n\n\n let expected_timeout = outcome\n\n .logical_session_timeout_minutes\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 35, "score": 130965.84845596435 }, { "content": "#[derive(Derivative)]\n\n#[derivative(Debug)]\n\nstruct ClientInner {\n\n topology: Topology,\n\n options: ClientOptions,\n\n session_pool: ServerSessionPool,\n\n}\n\n\n\nimpl Drop for ClientInner {\n\n fn drop(&mut self) {\n\n self.topology.close()\n\n }\n\n}\n\n\n\nimpl Client {\n\n /// Creates a new `Client` connected to the cluster specified by `uri`. `uri` must be a valid\n\n /// MongoDB connection string.\n\n ///\n\n /// See the documentation on\n\n /// [`ClientOptions::parse`](options/struct.ClientOptions.html#method.parse) for more details.\n\n pub async fn with_uri_str(uri: impl AsRef<str>) -> Result<Self> {\n\n let options = ClientOptions::parse_uri(uri.as_ref(), None).await?;\n", "file_path": "src/client/mod.rs", "rank": 36, "score": 129176.36889712478 }, { "content": "#[derive(Debug)]\n\nstruct DatabaseInner {\n\n client: Client,\n\n name: String,\n\n selection_criteria: Option<SelectionCriteria>,\n\n read_concern: Option<ReadConcern>,\n\n write_concern: Option<WriteConcern>,\n\n}\n\n\n\nimpl Database {\n\n pub(crate) fn new(client: Client, name: &str, options: Option<DatabaseOptions>) -> Self {\n\n let options = options.unwrap_or_default();\n\n let selection_criteria = options\n\n .selection_criteria\n\n .or_else(|| client.selection_criteria().cloned());\n\n\n\n let read_concern = options\n\n .read_concern\n\n .or_else(|| client.read_concern().cloned());\n\n\n\n let write_concern = options\n", "file_path": "src/db/mod.rs", "rank": 37, "score": 129176.36889712478 }, { "content": "#[derive(Debug)]\n\nstruct CollectionInner {\n\n client: Client,\n\n db: Database,\n\n name: String,\n\n selection_criteria: Option<SelectionCriteria>,\n\n read_concern: Option<ReadConcern>,\n\n write_concern: Option<WriteConcern>,\n\n}\n\n\n\nimpl<T> Collection<T> {\n\n pub(crate) fn new(db: Database, name: &str, options: Option<CollectionOptions>) -> Self {\n\n let options = options.unwrap_or_default();\n\n let selection_criteria = options\n\n .selection_criteria\n\n .or_else(|| db.selection_criteria().cloned());\n\n\n\n let read_concern = options.read_concern.or_else(|| db.read_concern().cloned());\n\n\n\n let write_concern = options\n\n .write_concern\n", "file_path": "src/coll/mod.rs", "rank": 38, "score": 129176.36889712478 }, { "content": "#[derive(Clone, Serialize, Deserialize, PartialEq, Debug)]\n\nstruct UserType {\n\n x: i32,\n\n str: String,\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\n#[function_name::named]\n\nasync fn typed_insert_one() {\n\n let _guard: RwLockReadGuard<()> = LOCK.run_concurrently().await;\n\n let client = TestClient::new().await;\n\n\n\n let coll = client\n\n .init_db_and_typed_coll(function_name!(), function_name!())\n\n .await;\n\n let insert_data = UserType {\n\n x: 1,\n\n str: \"a\".into(),\n\n };\n\n insert_one_and_find(&coll, insert_data).await;\n", "file_path": "src/test/coll.rs", "rank": 39, "score": 129156.26799459363 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn typed_collection() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n let coll = init_db_and_typed_coll(&client, function_name!(), function_name!());\n\n\n\n #[derive(Serialize, Deserialize, Debug)]\n\n struct MyType {\n\n x: i32,\n\n str: String,\n\n }\n\n let my_type = MyType {\n\n x: 1,\n\n str: \"hello\".into(),\n\n };\n\n\n\n assert!(coll.insert_one(my_type, None).is_ok());\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 40, "score": 128776.3768311349 }, { "content": "#[test]\n\nfn client_options() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let mut options = ClientOptions::parse(\"mongodb://localhost:27017/\").unwrap();\n\n\n\n options.original_uri.take();\n\n\n\n assert_eq!(\n\n options,\n\n ClientOptions::builder()\n\n .hosts(vec![ServerAddress::Tcp {\n\n host: \"localhost\".into(),\n\n port: Some(27017)\n\n }])\n\n .build()\n\n );\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 41, "score": 128236.75718128303 }, { "content": "fn match_eq<V: PartialEq + std::fmt::Debug>(actual: &V, expected: &V) -> Result<(), String> {\n\n if actual == expected {\n\n Ok(())\n\n } else {\n\n expected_err(actual, expected)\n\n }\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 42, "score": 128212.41955512295 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\nstruct TestServerDescription {\n\n address: String,\n\n #[serde(rename = \"avg_rtt_ms\")]\n\n avg_rtt_ms: Option<f64>,\n\n #[serde(rename = \"type\")]\n\n server_type: TestServerType,\n\n tags: Option<TagSet>,\n\n last_update_time: Option<i32>,\n\n last_write: Option<LastWriteDate>,\n\n // We don't need to use this field, but it needs to be included during deserialization so that\n\n // we can use the deny_unknown_fields tag.\n\n _max_wire_version: Option<i32>,\n\n}\n\n\n\nimpl TestServerDescription {\n\n fn into_server_description(self) -> Option<ServerDescription> {\n\n let server_type = match self.server_type.into_server_type() {\n\n Some(server_type) => server_type,\n\n None => return None,\n\n };\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 43, "score": 127330.97314322913 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestTopologyDescription {\n\n #[serde(rename = \"type\")]\n\n topology_type: TopologyType,\n\n servers: Vec<TestServerDescription>,\n\n}\n\n\n\nimpl TestTopologyDescription {\n\n fn into_topology_description(\n\n self,\n\n heartbeat_frequency: Option<Duration>,\n\n ) -> TopologyDescription {\n\n let servers: HashMap<ServerAddress, ServerDescription> = self\n\n .servers\n\n .into_iter()\n\n .filter_map(|sd| {\n\n sd.into_server_description()\n\n .map(|sd| (sd.address.clone(), sd))\n\n })\n\n .collect();\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 44, "score": 127330.97314322913 }, { "content": "/// The number of digits in `n` in base 10.\n\n/// Useful for calculating the size of an array entry in BSON.\n\nfn num_decimal_digits(mut n: usize) -> u64 {\n\n let mut digits = 0;\n\n\n\n loop {\n\n n /= 10;\n\n digits += 1;\n\n\n\n if n == 0 {\n\n return digits;\n\n }\n\n }\n\n}\n\n\n\n/// Read a document's raw BSON bytes from the provided reader.\n\npub(crate) fn read_document_bytes<R: Read>(mut reader: R) -> Result<Vec<u8>> {\n\n let length = reader.read_i32()?;\n\n\n\n let mut bytes = Vec::with_capacity(length as usize);\n\n bytes.write_i32(length)?;\n\n\n", "file_path": "src/bson_util/mod.rs", "rank": 45, "score": 126025.42014662999 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n pub tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/client/options/test.rs", "rank": 46, "score": 125859.59755232159 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct TestCase {\n\n pub description: String,\n\n pub uri: String,\n\n pub valid: bool,\n\n pub warning: Option<bool>,\n\n pub hosts: Option<Vec<Document>>,\n\n pub auth: Option<Document>,\n\n pub options: Option<Document>,\n\n}\n\n\n\nasync fn run_test(test_file: TestFile) {\n\n for mut test_case in test_file.tests {\n\n if\n\n // TODO: RUST-229: Implement IPv6 Support\n\n test_case.description.contains(\"ipv6\")\n\n || test_case.description.contains(\"IP literal\")\n\n // TODO: RUST-226: Investigate whether tlsCertificateKeyFilePassword is supported in rustls\n\n || test_case\n\n .description\n\n .contains(\"tlsCertificateKeyFilePassword\")\n", "file_path": "src/client/options/test.rs", "rank": 47, "score": 125859.59755232159 }, { "content": "struct NoCertVerifier {}\n\n\n\nimpl ServerCertVerifier for NoCertVerifier {\n\n fn verify_server_cert(\n\n &self,\n\n _: &RootCertStore,\n\n _: &[Certificate],\n\n _: webpki::DNSNameRef,\n\n _: &[u8],\n\n ) -> std::result::Result<ServerCertVerified, TLSError> {\n\n Ok(ServerCertVerified::assertion())\n\n }\n\n}\n\n\n\nimpl TlsOptions {\n\n /// Converts `TlsOptions` into a rustls::ClientConfig.\n\n pub(crate) fn into_rustls_config(self) -> Result<rustls::ClientConfig> {\n\n let mut config = rustls::ClientConfig::new();\n\n\n\n if let Some(true) = self.allow_invalid_certificates {\n", "file_path": "src/client/options/mod.rs", "rank": 48, "score": 125859.59755232159 }, { "content": "fn get_int(value: &Bson) -> Option<i64> {\n\n bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json))\n\n}\n", "file_path": "src/test/util/matchable.rs", "rank": 49, "score": 125406.7120043675 }, { "content": "struct TestFixtures {\n\n op: Insert<'static, Document>,\n\n documents: Vec<Document>,\n\n options: InsertManyOptions,\n\n}\n\n\n", "file_path": "src/operation/insert/test.rs", "rank": 50, "score": 125292.31336917813 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n filter: Option<Document>,\n\n options: Option<FindOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let mut find = Find::new(ns.clone(), filter, options);\n\n\n\n let cmd = find.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"find\");\n\n assert_eq!(cmd.target_db.as_str(), ns.db.as_str());\n\n\n\n let cmd_bytes = find.serialize_command(cmd).unwrap();\n\n let mut cmd_doc = bson::from_slice(&cmd_bytes).unwrap();\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd_doc);\n\n\n\n assert_eq!(cmd_doc, expected_body);\n", "file_path": "src/operation/find/test.rs", "rank": 51, "score": 124906.8761681585 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n cursor_id: i64,\n\n address: ServerAddress,\n\n batch_size: Option<u32>,\n\n max_time: Option<Duration>,\n\n mut expected_body: Document,\n\n) {\n\n let info = CursorInformation {\n\n ns: ns.clone(),\n\n id: cursor_id,\n\n address,\n\n batch_size,\n\n max_time,\n\n };\n\n let mut get_more = GetMore::new(info, None);\n\n\n\n let build_result = get_more.build(&StreamDescription::new_testing());\n\n assert!(build_result.is_ok());\n\n\n", "file_path": "src/operation/get_more/test.rs", "rank": 52, "score": 124906.8761681585 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct Arguments {\n\n pub pipeline: Vec<Document>,\n\n pub batch_size: Option<u32>,\n\n pub collation: Option<Collation>,\n\n}\n\n\n\n#[function_name::named]\n\nasync fn run_aggregate_test(test_file: TestFile) {\n\n let _guard: RwLockReadGuard<()> = LOCK.run_concurrently().await;\n\n let client = TestClient::new().await;\n\n\n\n let data = test_file.data;\n\n\n\n for test_case in test_file.tests {\n\n if test_case.operation.name != \"aggregate\" {\n\n continue;\n\n }\n\n\n\n let coll = client\n\n .init_db_and_coll(\n", "file_path": "src/test/spec/crud_v1/aggregate.rs", "rank": 53, "score": 123690.9386407038 }, { "content": "#[test]\n\nfn metadata_no_options() {\n\n let handshaker = Handshaker::new(None);\n\n\n\n let metadata = handshaker.command.body.get_document(\"client\").unwrap();\n\n assert!(!metadata.contains_key(\"application\"));\n\n\n\n let driver = metadata.get_document(\"driver\").unwrap();\n\n assert_eq!(driver.keys().collect::<Vec<_>>(), vec![\"name\", \"version\"]);\n\n assert_eq!(driver.get_str(\"name\"), Ok(\"mongo-rust-driver\"));\n\n assert_eq!(driver.get_str(\"version\"), Ok(env!(\"CARGO_PKG_VERSION\")));\n\n\n\n let os = metadata.get_document(\"os\").unwrap();\n\n assert_eq!(os.get_str(\"type\"), Ok(std::env::consts::OS));\n\n assert_eq!(os.get_str(\"architecture\"), Ok(std::env::consts::ARCH));\n\n}\n\n\n", "file_path": "src/cmap/establish/handshake/test.rs", "rank": 54, "score": 122863.71104774933 }, { "content": "#[test]\n\nfn metadata_with_options() {\n\n let app_name = \"myspace 2.0\";\n\n let name = \"even better Rust driver\";\n\n let version = \"the best version, of course\";\n\n\n\n let options = ConnectionPoolOptions::from_client_options(\n\n &ClientOptions::builder()\n\n .app_name(app_name.to_string())\n\n .driver_info(\n\n DriverInfo::builder()\n\n .name(name.to_string())\n\n .version(version.to_string())\n\n .build(),\n\n )\n\n .build(),\n\n );\n\n\n\n let handshaker = Handshaker::new(Some(options.into()));\n\n\n\n let metadata = handshaker.command.body.get_document(\"client\").unwrap();\n", "file_path": "src/cmap/establish/handshake/test.rs", "rank": 55, "score": 122863.71104774933 }, { "content": "/// Strunct encapsulating an operation that takes a session in, as well as some associated\n\n/// information.\n\nstruct Operation {\n\n name: &'static str,\n\n f: OperationFn,\n\n is_read: bool,\n\n}\n\n\n", "file_path": "src/client/session/test/causal_consistency.rs", "rank": 56, "score": 122702.14925151432 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct NextBatchBody {\n\n id: i64,\n\n next_batch: VecDeque<RawDocumentBuf>,\n\n post_batch_resume_token: Option<RawDocumentBuf>,\n\n}\n", "file_path": "src/operation/get_more/mod.rs", "rank": 57, "score": 122696.49339143638 }, { "content": "#[derive(Deserialize)]\n\nstruct TestFile {\n\n data: Vec<Document>,\n\n collection_name: String,\n\n database_name: String,\n\n tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/test/spec/command_monitoring/mod.rs", "rank": 58, "score": 121170.26636629163 }, { "content": "#[derive(Deserialize)]\n\nstruct TestCase {\n\n description: String,\n\n #[serde(rename = \"ignore_if_server_version_greater_than\", default)]\n\n max_version: Option<String>,\n\n #[serde(rename = \"ignore_if_server_version_less_than\", default)]\n\n min_version: Option<String>,\n\n operation: Document,\n\n expectations: Vec<TestEvent>,\n\n}\n\n\n\nasync fn run_command_monitoring_test(test_file: TestFile) {\n\n let _guard: RwLockWriteGuard<()> = LOCK.run_exclusively().await;\n\n\n\n let client = TestClient::new().await;\n\n\n\n let skipped_tests = vec![\n\n // uses old count\n\n \"A successful command\",\n\n \"A failed command event\",\n\n \"A successful command with a non-primary read preference\",\n", "file_path": "src/test/spec/command_monitoring/mod.rs", "rank": 59, "score": 121170.26636629163 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Options {\n\n pub ordered: bool,\n\n}\n\n\n", "file_path": "src/test/spec/crud_v1/insert_many.rs", "rank": 60, "score": 120776.68338705816 }, { "content": "fn serialize_true<S: Serializer>(s: S) -> std::result::Result<S::Ok, S::Error> {\n\n s.serialize_bool(true)\n\n}\n\n\n\n#[serde_with::skip_serializing_none]\n\n#[derive(Clone, Debug, TypedBuilder, Serialize)]\n\n#[builder(field_defaults(setter(into)))]\n\n#[serde(rename_all = \"camelCase\")]\n\npub(super) struct FindAndModifyOptions {\n\n #[serde(flatten)]\n\n pub(crate) modification: Modification,\n\n\n\n #[builder(default)]\n\n pub(crate) sort: Option<Document>,\n\n\n\n #[builder(default)]\n\n pub(crate) new: Option<bool>,\n\n\n\n #[builder(default)]\n\n pub(crate) upsert: Option<bool>,\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 61, "score": 119869.32145007895 }, { "content": "/// Custom serializer used to serialize limit as its absolute value.\n\nfn serialize_absolute_value<S>(\n\n val: &Option<i64>,\n\n serializer: S,\n\n) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n match val {\n\n Some(v) => serializer.serialize_i64(v.abs()),\n\n None => serializer.serialize_none(),\n\n }\n\n}\n\n\n\n/// Specifies the options to a [`Collection::find_one`](../struct.Collection.html#method.find_one)\n\n/// operation.\n\n#[derive(Clone, Debug, Default, Deserialize, TypedBuilder)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[builder(field_defaults(default, setter(into)))]\n\n#[non_exhaustive]\n\npub struct FindOneOptions {\n", "file_path": "src/coll/options.rs", "rank": 62, "score": 119068.66913569966 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\nstruct TestCase {\n\n pub description: String,\n\n pub valid: bool,\n\n pub write_concern: Option<Document>,\n\n pub write_concern_document: Option<Document>,\n\n pub read_concern: Option<Document>,\n\n pub read_concern_document: Option<Document>,\n\n pub is_server_default: Option<bool>,\n\n pub is_acknowledged: Option<bool>,\n\n}\n\n\n\nasync fn run_document_test(test_file: TestFile) {\n\n for test_case in test_file.tests {\n\n let description = test_case.description.as_str();\n\n\n\n if let Some(specified_write_concern_document) = test_case.write_concern {\n\n let specified_write_concern =\n\n match bson::from_document::<WriteConcern>(specified_write_concern_document)\n\n .map_err(Error::from)\n\n .and_then(|wc| wc.validate().map(|_| wc))\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 63, "score": 118686.4873423207 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n pub tests: Vec<TestCase>,\n\n}\n\n\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 64, "score": 118686.4873423207 }, { "content": "fn emit_event<F>(topology: Option<&Topology>, handler: &Option<Arc<dyn SdamEventHandler>>, emit: F)\n\nwhere\n\n F: FnOnce(&Arc<dyn SdamEventHandler>),\n\n{\n\n if let Some(handler) = handler {\n\n if topology.is_some() {\n\n emit(handler);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct IsMasterReply {\n\n pub server_address: ServerAddress,\n\n pub command_response: IsMasterCommandResponse,\n\n pub round_trip_time: Duration,\n\n pub cluster_time: Option<ClusterTime>,\n\n}\n\n\n\n/// The response to a `hello` command.\n", "file_path": "src/is_master.rs", "rank": 65, "score": 118643.18474213638 }, { "content": "fn results_match_inner(\n\n actual: Option<&Bson>,\n\n expected: &Bson,\n\n returns_root_documents: bool,\n\n root: bool,\n\n entities: Option<&EntityMap>,\n\n) -> Result<(), String> {\n\n match expected {\n\n Bson::Document(expected_doc) => {\n\n if let Some((key, value)) = expected_doc.iter().next() {\n\n if key.starts_with(\"$$\") && expected_doc.len() == 1 {\n\n return special_operator_matches((key, value), actual, entities)\n\n .map_err(|e| format!(\"{}: {}\", key, e));\n\n }\n\n }\n\n\n\n let actual_doc = match actual {\n\n Some(Bson::Document(actual)) => actual,\n\n // The only case in which None is an acceptable value is if the expected document\n\n // is a special operator; otherwise, the two documents do not match.\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 66, "score": 118546.98236215439 }, { "content": "fn command_events_match(\n\n actual: &CommandEvent,\n\n expected: &ExpectedCommandEvent,\n\n entities: Option<&EntityMap>,\n\n) -> Result<(), String> {\n\n match (actual, expected) {\n\n (\n\n CommandEvent::Started(actual),\n\n ExpectedCommandEvent::Started {\n\n command_name: expected_command_name,\n\n database_name: expected_database_name,\n\n command: expected_command,\n\n has_service_id: expected_has_service_id,\n\n },\n\n ) => {\n\n match_opt(&actual.command_name, expected_command_name)?;\n\n match_opt(&actual.db, expected_database_name)?;\n\n match_opt(&actual.service_id.is_some(), expected_has_service_id)?;\n\n match_results_opt(&actual.command, expected_command, entities)?;\n\n Ok(())\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 67, "score": 118434.91368865316 }, { "content": "#[derive(Debug, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\nstruct ResolvedOptions {\n\n replica_set: Option<String>,\n\n auth_source: Option<String>,\n\n ssl: bool,\n\n load_balanced: Option<bool>,\n\n direct_connection: Option<bool>,\n\n}\n\n\n\nimpl ResolvedOptions {\n\n fn assert_eq(&self, options: &ClientOptions) {\n\n // When an `authSource` is provided without any other authentication information, we do\n\n // not keep track of it within a Credential. The options present in the spec tests\n\n // expect the `authSource` be present regardless of whether a Credential should be\n\n // created, so the value of the `authSource` is not asserted on to avoid this\n\n // discrepancy.\n\n assert_eq!(self.replica_set, options.repl_set_name);\n\n assert_eq!(self.ssl, options.tls_options().is_some());\n\n assert_eq!(self.load_balanced, options.load_balanced);\n\n assert_eq!(self.direct_connection, options.direct_connection);\n\n }\n\n}\n\n\n", "file_path": "src/test/spec/initial_dns_seedlist_discovery.rs", "rank": 68, "score": 118434.21960951484 }, { "content": "#[derive(Debug, Deserialize, Default, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct ParsedOptions {\n\n user: Option<String>,\n\n password: Option<String>,\n\n db: Option<String>,\n\n}\n\n\n\nasync fn run_test(mut test_file: TestFile) {\n\n // TODO DRIVERS-796: unskip this test\n\n if test_file.uri == \"mongodb+srv://test5.test.build.10gen.cc/?authSource=otherDB\" {\n\n log_uncaptured(\n\n \"skipping initial_dns_seedlist_discovery due to authSource being specified without \\\n\n credentials\",\n\n );\n\n return;\n\n }\n\n\n\n // TODO RUST-980 unskip these tests\n\n if test_file\n\n .options\n\n .as_ref()\n", "file_path": "src/test/spec/initial_dns_seedlist_discovery.rs", "rank": 69, "score": 118434.21960951484 }, { "content": "fn special_operator_matches(\n\n (key, value): (&String, &Bson),\n\n actual: Option<&Bson>,\n\n entities: Option<&EntityMap>,\n\n) -> Result<(), String> {\n\n match key.as_ref() {\n\n \"$$exists\" => match_eq(&value.as_bool().unwrap(), &actual.is_some()),\n\n \"$$type\" => type_matches(value, actual.unwrap()),\n\n \"$$unsetOrMatches\" => {\n\n if actual.is_some() {\n\n results_match_inner(actual, value, false, false, entities)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n \"$$matchesEntity\" => {\n\n let id = value.as_str().unwrap();\n\n entity_matches(id, actual, entities.unwrap())\n\n }\n\n \"$$matchesHexBytes\" => panic!(\"GridFS not implemented\"),\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 70, "score": 117528.73662830448 }, { "content": "#[allow(dead_code)]\n\nstruct TestFile {\n\n #[serde(rename = \"heartbeatFrequencyMS\")]\n\n heartbeat_frequency_ms: Option<u64>,\n\n topology_description: TestTopologyDescription,\n\n read_preference: TestReadPreference,\n\n suitable_servers: Option<Vec<TestServerDescription>>,\n\n in_latency_window: Option<Vec<TestServerDescription>>,\n\n error: Option<bool>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct TestReadPreference {\n\n pub mode: Option<String>,\n\n pub tag_sets: Option<Vec<TagSet>>,\n\n #[serde(rename = \"maxStalenessSeconds\")]\n\n pub max_staleness_seconds: Option<u64>,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test/logic.rs", "rank": 71, "score": 116374.22257660786 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestOutcome {\n\n tolerance: f64,\n\n expected_frequencies: HashMap<ServerAddress, f64>,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test/in_window.rs", "rank": 72, "score": 116374.22257660786 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestServer {\n\n address: ServerAddress,\n\n operation_count: u32,\n\n}\n\n\n\nasync fn run_test(test_file: TestFile) {\n\n println!(\"Running {}\", test_file.description);\n\n\n\n let mut tallies: HashMap<ServerAddress, u32> = HashMap::new();\n\n\n\n let servers: HashMap<ServerAddress, Arc<Server>> = test_file\n\n .mocked_topology_state\n\n .into_iter()\n\n .map(|desc| {\n\n (\n\n desc.address.clone(),\n\n Arc::new(Server::new_mocked(desc.address, desc.operation_count)),\n\n )\n\n })\n\n .collect();\n", "file_path": "src/sdam/description/topology/server_selection/test/in_window.rs", "rank": 73, "score": 116374.22257660786 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct TestFile {\n\n description: String,\n\n topology_description: TestTopologyDescription,\n\n mocked_topology_state: Vec<TestServer>,\n\n iterations: u32,\n\n outcome: TestOutcome,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test/in_window.rs", "rank": 74, "score": 116374.22257660786 }, { "content": "struct ExecutionOutput<T: Operation> {\n\n operation_output: T::O,\n\n connection: Connection,\n\n}\n", "file_path": "src/client/executor.rs", "rank": 75, "score": 115856.01675124299 }, { "content": "struct ExecutionDetails<T: Operation> {\n\n output: ExecutionOutput<T>,\n\n implicit_session: Option<ClientSession>,\n\n}\n\n\n", "file_path": "src/client/executor.rs", "rank": 76, "score": 115856.01675124299 }, { "content": "/// Log a message on stderr that won't be captured by `cargo test`. Panics if the write fails.\n\npub fn log_uncaptured<S: AsRef<str>>(text: S) {\n\n use std::io::Write;\n\n\n\n let mut stderr = std::io::stderr();\n\n stderr.write_all(text.as_ref().as_bytes()).unwrap();\n\n stderr.write_all(b\"\\n\").unwrap();\n\n}\n", "file_path": "src/test/util/mod.rs", "rank": 77, "score": 115585.04971199781 }, { "content": "#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct LastWriteDate {\n\n last_write_date: i64,\n\n}\n\n\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 78, "score": 114257.17001575079 }, { "content": "#[test]\n\nfn predicate_omits_unavailable() {\n\n let criteria = SelectionCriteria::Predicate(Arc::new(|si| {\n\n !matches!(si.server_type(), ServerType::RsPrimary)\n\n }));\n\n\n\n let desc = TestTopologyDescription {\n\n topology_type: TopologyType::ReplicaSetWithPrimary,\n\n servers: vec![\n\n TestServerDescription {\n\n address: \"localhost:27017\".to_string(),\n\n avg_rtt_ms: Some(12.0),\n\n server_type: TestServerType::RsPrimary,\n\n tags: None,\n\n last_update_time: None,\n\n last_write: None,\n\n _max_wire_version: None,\n\n },\n\n TestServerDescription {\n\n address: \"localhost:27018\".to_string(),\n\n avg_rtt_ms: Some(12.0),\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 79, "score": 113876.20071516337 }, { "content": "fn empty_update() -> FindAndModify {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {};\n\n let update = UpdateModifications::Document(doc! { \"$x\": { \"$inc\": 1 } });\n\n FindAndModify::with_update(ns, filter, update, None).unwrap()\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build_with_update_hint() {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! { \"x\": { \"$gt\": 1 } };\n\n let update = UpdateModifications::Document(doc! { \"$x\": { \"$inc\": 1 } });\n\n let options = FindOneAndUpdateOptions {\n", "file_path": "src/operation/find_and_modify/test.rs", "rank": 80, "score": 113728.03391350427 }, { "content": "fn empty_delete() -> FindAndModify {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {};\n\n FindAndModify::with_delete(ns, filter, None)\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build_with_delete_hint() {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {\n\n \"x\": 2,\n\n \"y\": { \"$gt\": 1 },\n\n };\n", "file_path": "src/operation/find_and_modify/test.rs", "rank": 81, "score": 113728.03391350427 }, { "content": "fn empty_replace() -> FindAndModify {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {};\n\n let replacement = doc! { \"x\": { \"inc\": 1 } };\n\n FindAndModify::with_replace(ns, filter, replacement, None).unwrap()\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build_with_replace_hint() {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! { \"x\": { \"$gt\": 1 } };\n\n let replacement = doc! { \"x\": { \"inc\": 1 } };\n\n let options = FindOneAndReplaceOptions {\n", "file_path": "src/operation/find_and_modify/test.rs", "rank": 82, "score": 113728.03391350427 }, { "content": "fn deserialize_uri_options_to_uri_string_option<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let uri_options = Document::deserialize(deserializer)?;\n\n Ok(Some(merge_uri_options(&DEFAULT_URI, Some(&uri_options))))\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Outcome {\n\n pub collection: CollectionOutcome,\n\n}\n\n\n\nimpl Outcome {\n\n pub async fn matches_actual(\n\n self,\n\n db_name: String,\n\n coll_name: String,\n", "file_path": "src/test/spec/v2_runner/test_file.rs", "rank": 83, "score": 112885.42795670927 }, { "content": "fn default_hosts() -> Vec<ServerAddress> {\n\n vec![ServerAddress::default()]\n\n}\n\n\n\nimpl Default for ClientOptions {\n\n fn default() -> Self {\n\n Self::builder().build()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Serialize for ClientOptions {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n #[derive(Serialize)]\n\n struct ClientOptionsHelper<'a> {\n\n appname: &'a Option<String>,\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 84, "score": 111288.1637942566 }, { "content": "fn type_matches(types: &Bson, actual: &Bson) -> Result<(), String> {\n\n match types {\n\n Bson::Array(types) => {\n\n if types.iter().any(|t| type_matches(t, actual).is_ok()) {\n\n Ok(())\n\n } else {\n\n Err(format!(\"expected any of {:?}, got {:?}\", types, actual))\n\n }\n\n }\n\n Bson::String(str) => {\n\n let expected = match str.as_ref() {\n\n \"double\" => ElementType::Double,\n\n \"string\" => ElementType::String,\n\n \"object\" => ElementType::EmbeddedDocument,\n\n \"array\" => ElementType::Array,\n\n \"binData\" => ElementType::Binary,\n\n \"undefined\" => ElementType::Undefined,\n\n \"objectId\" => ElementType::ObjectId,\n\n \"bool\" => ElementType::Boolean,\n\n \"date\" => ElementType::DateTime,\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 85, "score": 107805.35720133953 }, { "content": "fn scram_sasl_first_options(mechanism: AuthMechanism) {\n\n let sasl_first = SaslStart::new(String::new(), mechanism, Vec::new(), None);\n\n let command = sasl_first.into_command();\n\n let options = match command.body.get_document(\"options\") {\n\n Ok(options) => options,\n\n Err(_) => panic!(\"SaslStart should contain options document\"),\n\n };\n\n match options.get_bool(\"skipEmptyExchange\") {\n\n Ok(skip_empty_exchange) => assert!(\n\n skip_empty_exchange,\n\n \"skipEmptyExchange should be true for SCRAM authentication\"\n\n ),\n\n Err(_) => panic!(\"SaslStart options should contain skipEmptyExchange\"),\n\n }\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn sasl_first_options_specified() {\n\n scram_sasl_first_options(AuthMechanism::ScramSha1);\n", "file_path": "src/client/auth/test.rs", "rank": 86, "score": 106953.51010540317 }, { "content": "fn spawn<T>(future: T) -> impl Future<Output = <T as Future>::Output>\n\nwhere\n\n T: Future + Send + 'static,\n\n T::Output: Send + 'static,\n\n{\n\n #[cfg(feature = \"tokio-runtime\")]\n\n {\n\n tokio::task::spawn(future).map(|result| result.unwrap())\n\n }\n\n\n\n #[cfg(feature = \"async-std-runtime\")]\n\n {\n\n async_std::task::spawn(future)\n\n }\n\n}\n\n\n\nmod bench;\n\nmod fs;\n\nmod models;\n\nmod score;\n", "file_path": "benchmarks/src/main.rs", "rank": 87, "score": 105771.61511149729 }, { "content": "fn deserialize_op<'de, 'a, T: 'a + Deserialize<'de> + TestOperation>(\n\n value: Bson,\n\n) -> std::result::Result<Box<dyn TestOperation + 'a>, bson::de::Error> {\n\n T::deserialize(BsonDeserializer::new(value)).map(|op| Box::new(op) as Box<dyn TestOperation>)\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Operation {\n\n fn deserialize<D: Deserializer<'de>>(deserializer: D) -> std::result::Result<Self, D::Error> {\n\n #[derive(Debug, Deserialize)]\n\n #[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\n struct OperationDefinition {\n\n pub name: String,\n\n pub object: OperationObject,\n\n #[serde(default = \"default_arguments\")]\n\n pub arguments: Bson,\n\n pub expect_error: Option<ExpectError>,\n\n pub expect_result: Option<Bson>,\n\n pub save_result_as_entity: Option<String>,\n\n pub ignore_result_and_error: Option<bool>,\n\n }\n", "file_path": "src/test/spec/unified_runner/operation.rs", "rank": 88, "score": 105434.42752725736 }, { "content": "fn deserialize_command_started_events<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<Vec<CommandStartedEvent>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let docs = Vec::<Document>::deserialize(deserializer)?;\n\n Ok(Some(\n\n docs.iter()\n\n .map(|doc| {\n\n let event = doc.get_document(\"command_started_event\").unwrap();\n\n from_document(event.clone()).unwrap()\n\n })\n\n .collect(),\n\n ))\n\n}\n", "file_path": "src/test/spec/v2_runner/test_file.rs", "rank": 89, "score": 103375.68657385853 }, { "content": "fn assert_same_lsid_on_last_two_commands(client: &EventClient) {\n\n let events = client.get_all_command_started_events();\n\n let lsid1 = events[events.len() - 1].command.get(\"lsid\").unwrap();\n\n let lsid2 = events[events.len() - 2].command.get(\"lsid\").unwrap();\n\n assert_eq!(lsid1, lsid2);\n\n}\n", "file_path": "src/test/spec/v2_runner/mod.rs", "rank": 90, "score": 101541.1440644807 }, { "content": "fn parse_ids(matches: ArgMatches) -> HashSet<BenchmarkId> {\n\n let mut ids: HashSet<BenchmarkId> = match matches.value_of(\"ids\") {\n\n Some(\"all\") => (1..=MAX_ID)\n\n .map(|id| BenchmarkId::try_from(id as u8).unwrap())\n\n .collect(),\n\n Some(id_list) => id_list\n\n .split(',')\n\n .map(|str| {\n\n let n = str\n\n .parse::<u8>()\n\n .expect(\"invalid test IDs provided, see README\");\n\n BenchmarkId::try_from(n).expect(\"invalid test IDs provided, see README\")\n\n })\n\n .collect(),\n\n None => HashSet::new(),\n\n };\n\n\n\n if matches.is_present(\"single\") {\n\n ids.insert(BenchmarkId::RunCommand);\n\n ids.insert(BenchmarkId::FindOneById);\n", "file_path": "benchmarks/src/main.rs", "rank": 91, "score": 101090.15732974229 }, { "content": " pipeline: pipeline.into_iter().collect(),\n\n options,\n\n }\n\n }\n\n}\n\n\n\n// IMPORTANT: If new method implementations are added here, make sure `ChangeStreamAggregate` has\n\n// the equivalent delegations.\n\nimpl Operation for Aggregate {\n\n type O = CursorSpecification;\n\n type Command = Document;\n\n\n\n const NAME: &'static str = \"aggregate\";\n\n\n\n fn build(&mut self, _description: &StreamDescription) -> Result<Command> {\n\n let mut body = doc! {\n\n Self::NAME: self.target.to_bson(),\n\n \"pipeline\": bson_util::to_bson_array(&self.pipeline),\n\n \"cursor\": {}\n\n };\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 92, "score": 100079.65930402232 }, { "content": "mod change_stream;\n\n\n\n#[cfg(test)]\n\nmod test;\n\n\n\nuse crate::{\n\n bson::{doc, Bson, Document},\n\n bson_util,\n\n cmap::{Command, RawCommandResponse, StreamDescription},\n\n cursor::CursorSpecification,\n\n error::Result,\n\n operation::{append_options, remove_empty_write_concern, Operation, Retryability},\n\n options::{AggregateOptions, SelectionCriteria, WriteConcern},\n\n Namespace,\n\n};\n\n\n\nuse super::{CursorBody, WriteConcernOnlyBody, SERVER_4_2_0_WIRE_VERSION};\n\n\n\npub(crate) use change_stream::ChangeStreamAggregate;\n\n\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 93, "score": 100065.80565106582 }, { "content": "#[derive(Debug)]\n\npub(crate) struct Aggregate {\n\n target: AggregateTarget,\n\n pipeline: Vec<Document>,\n\n options: Option<AggregateOptions>,\n\n}\n\n\n\nimpl Aggregate {\n\n #[cfg(test)]\n\n fn empty() -> Self {\n\n Self::new(Namespace::empty(), Vec::new(), None)\n\n }\n\n\n\n pub(crate) fn new(\n\n target: impl Into<AggregateTarget>,\n\n pipeline: impl IntoIterator<Item = Document>,\n\n options: Option<AggregateOptions>,\n\n ) -> Self {\n\n Self {\n\n target: target.into(),\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 94, "score": 100061.92324673614 }, { "content": "use std::time::Duration;\n\n\n\nuse super::AggregateTarget;\n\nuse crate::{\n\n bson::{doc, Document},\n\n bson_util,\n\n cmap::StreamDescription,\n\n concern::{ReadConcern, ReadConcernLevel},\n\n error::{ErrorKind, WriteFailure},\n\n operation::{\n\n test::{self, handle_response_test},\n\n Aggregate,\n\n Operation,\n\n },\n\n options::{AggregateOptions, Hint},\n\n Namespace,\n\n};\n\n\n", "file_path": "src/operation/aggregate/test.rs", "rank": 95, "score": 100057.91109643756 }, { "content": "\n\n remove_empty_write_concern!(self.options);\n\n append_options(&mut body, self.options.as_ref())?;\n\n\n\n if self.is_out_or_merge() {\n\n if let Ok(cursor_doc) = body.get_document_mut(\"cursor\") {\n\n cursor_doc.remove(\"batchSize\");\n\n }\n\n }\n\n\n\n Ok(Command::new_read(\n\n Self::NAME.to_string(),\n\n self.target.db_name().to_string(),\n\n self.options.as_ref().and_then(|o| o.read_concern.clone()),\n\n body,\n\n ))\n\n }\n\n\n\n fn extract_at_cluster_time(\n\n &self,\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 96, "score": 100050.95135521173 }, { "content": " match self {\n\n AggregateTarget::Database(ref s) => s.as_str(),\n\n AggregateTarget::Collection(ref ns) => ns.db.as_str(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Namespace> for AggregateTarget {\n\n fn from(ns: Namespace) -> Self {\n\n AggregateTarget::Collection(ns)\n\n }\n\n}\n\n\n\nimpl From<String> for AggregateTarget {\n\n fn from(db_name: String) -> Self {\n\n AggregateTarget::Database(db_name)\n\n }\n\n}\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 97, "score": 100050.66488105133 }, { "content": " .and_then(|opts| opts.write_concern.as_ref())\n\n }\n\n\n\n fn retryability(&self) -> Retryability {\n\n if self.is_out_or_merge() {\n\n Retryability::None\n\n } else {\n\n Retryability::Read\n\n }\n\n }\n\n}\n\n\n\nimpl Aggregate {\n\n /// Returns whether this is a $out or $merge aggregation operation.\n\n fn is_out_or_merge(&self) -> bool {\n\n self.pipeline\n\n .last()\n\n .map(|stage| {\n\n let stage = bson_util::first_key(stage);\n\n stage == Some(\"$out\") || stage == Some(\"$merge\")\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 98, "score": 100047.27012091277 }, { "content": " })\n\n .unwrap_or(false)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub(crate) enum AggregateTarget {\n\n Database(String),\n\n Collection(Namespace),\n\n}\n\n\n\nimpl AggregateTarget {\n\n fn to_bson(&self) -> Bson {\n\n match self {\n\n AggregateTarget::Database(_) => Bson::Int32(1),\n\n AggregateTarget::Collection(ref ns) => Bson::String(ns.coll.to_string()),\n\n }\n\n }\n\n\n\n fn db_name(&self) -> &str {\n", "file_path": "src/operation/aggregate/mod.rs", "rank": 99, "score": 100045.69424069842 } ]
Rust
sync/src/synchronizer/headers_process.rs
Taem42/ckb
9d43dee13a350fe75f62b75915c3c932f129a3f5
use crate::block_status::BlockStatus; use crate::synchronizer::Synchronizer; use crate::types::{ActiveChain, SyncShared}; use crate::{Status, StatusCode, MAX_HEADERS_LEN}; use ckb_error::{Error, ErrorKind}; use ckb_logger::{debug, log_enabled, warn, Level}; use ckb_network::{CKBProtocolContext, PeerIndex}; use ckb_traits::BlockMedianTimeContext; use ckb_types::{ core::{self, BlockNumber}, packed::{self, Byte32}, prelude::*, }; use ckb_verification::{HeaderError, HeaderErrorKind, HeaderResolver, HeaderVerifier, Verifier}; pub struct HeadersProcess<'a> { message: packed::SendHeadersReader<'a>, synchronizer: &'a Synchronizer, peer: PeerIndex, nc: &'a dyn CKBProtocolContext, active_chain: ActiveChain, } pub struct VerifierResolver<'a> { shared: &'a SyncShared, header: &'a core::HeaderView, parent: Option<&'a core::HeaderView>, } impl<'a> VerifierResolver<'a> { pub fn new( parent: Option<&'a core::HeaderView>, header: &'a core::HeaderView, shared: &'a SyncShared, ) -> Self { VerifierResolver { parent, header, shared, } } } impl<'a> ::std::clone::Clone for VerifierResolver<'a> { fn clone(&self) -> Self { VerifierResolver { parent: self.parent, header: self.header, shared: self.shared, } } } impl<'a> BlockMedianTimeContext for VerifierResolver<'a> { fn median_block_count(&self) -> u64 { self.shared.consensus().median_time_block_count() as u64 } fn timestamp_and_parent(&self, block_hash: &Byte32) -> (u64, BlockNumber, Byte32) { let header = self .shared .get_header(&block_hash) .expect("[VerifierResolver] blocks used for median time exist"); ( header.timestamp(), header.number(), header.data().raw().parent_hash(), ) } } impl<'a> HeaderResolver for VerifierResolver<'a> { fn header(&self) -> &core::HeaderView { self.header } fn parent(&self) -> Option<&core::HeaderView> { self.parent } } impl<'a> HeadersProcess<'a> { pub fn new( message: packed::SendHeadersReader<'a>, synchronizer: &'a Synchronizer, peer: PeerIndex, nc: &'a dyn CKBProtocolContext, ) -> Self { let active_chain = synchronizer.shared.active_chain(); HeadersProcess { message, nc, synchronizer, peer, active_chain, } } fn is_continuous(&self, headers: &[core::HeaderView]) -> bool { for window in headers.windows(2) { if let [parent, header] = &window { if header.data().raw().parent_hash() != parent.hash() { debug!( "header.parent_hash {} parent.hash {}", header.parent_hash(), parent.hash() ); return false; } } } true } pub fn accept_first(&self, first: &core::HeaderView) -> ValidationResult { let shared = self.synchronizer.shared(); let parent = shared.get_header(&first.data().raw().parent_hash()); let resolver = VerifierResolver::new(parent.as_ref(), &first, &shared); let verifier = HeaderVerifier::new(&resolver, &shared.consensus()); let acceptor = HeaderAcceptor::new( first, self.peer, resolver.clone(), verifier, self.active_chain.clone(), ); acceptor.accept() } pub fn execute(self) -> Status { debug!("HeadersProcess begin"); let shared = self.synchronizer.shared(); let headers = self .message .headers() .to_entity() .into_iter() .map(packed::Header::into_view) .collect::<Vec<_>>(); if headers.len() > MAX_HEADERS_LEN { shared.state().misbehavior(self.peer, 20); warn!("HeadersProcess is_oversize"); return Status::ok(); } if headers.is_empty() { self.synchronizer .peers() .state .write() .get_mut(&self.peer) .expect("Peer must exists") .headers_sync_timeout = None; debug!("HeadersProcess is_empty (synchronized)"); return Status::ok(); } if !self.is_continuous(&headers) { shared.state().misbehavior(self.peer, 20); debug!("HeadersProcess is not continuous"); return Status::ok(); } let result = self.accept_first(&headers[0]); if !result.is_valid() { if result.misbehavior > 0 { shared.state().misbehavior(self.peer, result.misbehavior); } debug!( "HeadersProcess accept_first is_valid {:?} headers = {:?}", result, headers[0] ); return Status::ok(); } for window in headers.windows(2) { if let [parent, header] = &window { let resolver = VerifierResolver::new(Some(&parent), &header, &shared); let verifier = HeaderVerifier::new(&resolver, &shared.consensus()); let acceptor = HeaderAcceptor::new( &header, self.peer, resolver.clone(), verifier, self.active_chain.clone(), ); let result = acceptor.accept(); if !result.is_valid() { if result.misbehavior > 0 { shared.state().misbehavior(self.peer, result.misbehavior); } debug!("HeadersProcess accept is invalid {:?}", result); return Status::ok(); } } } if log_enabled!(Level::Debug) { let shared_best_known = self.synchronizer.shared.state().shared_best_header(); let peer_best_known = self.synchronizer.peers().get_best_known_header(self.peer); debug!( "chain: num={}, diff={:#x};", self.active_chain.tip_number(), self.active_chain.total_difficulty() ); debug!( "shared best_known_header: num={}, diff={:#x}, hash={};", shared_best_known.number(), shared_best_known.total_difficulty(), shared_best_known.hash(), ); if let Some(header) = peer_best_known { debug!( "peer's best_known_header: peer: {}, num={}; diff={:#x}, hash={};", self.peer, header.number(), header.total_difficulty(), header.hash() ); } else { debug!("state: null;"); } debug!("peer: {}", self.peer); } if headers.len() == MAX_HEADERS_LEN { let start = headers.last().expect("empty checked"); self.active_chain .send_getheaders_to_peer(self.nc, self.peer, start); } let peer_flags = self .synchronizer .peers() .state .read() .get(&self.peer) .map(|state| state.peer_flags) .unwrap_or_default(); if self.active_chain.is_initial_block_download() && headers.len() != MAX_HEADERS_LEN && (!peer_flags.is_protect && !peer_flags.is_whitelist && peer_flags.is_outbound) { debug!("Disconnect peer({}) is unprotected outbound", self.peer); if let Err(err) = self .nc .disconnect(self.peer, "useless outbound peer in IBD") { return StatusCode::Network.with_context(format!("Disconnect error: {:?}", err)); } } Status::ok() } } #[derive(Clone)] pub struct HeaderAcceptor<'a, V: Verifier> { header: &'a core::HeaderView, active_chain: ActiveChain, peer: PeerIndex, resolver: V::Target, verifier: V, } impl<'a, V> HeaderAcceptor<'a, V> where V: Verifier<Target = VerifierResolver<'a>>, { pub fn new( header: &'a core::HeaderView, peer: PeerIndex, resolver: VerifierResolver<'a>, verifier: V, active_chain: ActiveChain, ) -> Self { HeaderAcceptor { header, peer, resolver, verifier, active_chain, } } pub fn prev_block_check(&self, state: &mut ValidationResult) -> Result<(), ()> { if self.active_chain.contains_block_status( &self.header.data().raw().parent_hash(), BlockStatus::BLOCK_INVALID, ) { state.dos(Some(ValidationError::InvalidParent), 100); return Err(()); } Ok(()) } pub fn non_contextual_check(&self, state: &mut ValidationResult) -> Result<(), ()> { self.verifier.verify(&self.resolver).map_err(|error| { debug!( "HeadersProcess accept {:?} error {:?}", self.header.number(), error ); if error.kind() == &ErrorKind::Header { let header_error = error .downcast_ref::<HeaderError>() .expect("error kind checked"); match header_error.kind() { HeaderErrorKind::Pow => state.dos(Some(ValidationError::Verify(error)), 100), HeaderErrorKind::Epoch => state.dos(Some(ValidationError::Verify(error)), 50), _ => state.invalid(Some(ValidationError::Verify(error))), } } else { state.invalid(Some(ValidationError::Verify(error))); } }) } pub fn version_check(&self, state: &mut ValidationResult) -> Result<(), ()> { if self.header.version() != 0 { state.invalid(Some(ValidationError::Version)); Err(()) } else { Ok(()) } } pub fn accept(&self) -> ValidationResult { let mut result = ValidationResult::default(); let shared = self.active_chain.shared(); let state = shared.state(); if self .active_chain .contains_block_status(&self.header.hash(), BlockStatus::HEADER_VALID) { let header_view = shared .get_header_view(&self.header.hash()) .expect("header with HEADER_VALID should exist"); state.peers().new_header_received(self.peer, &header_view); return result; } if self.prev_block_check(&mut result).is_err() { debug!( "HeadersProcess reject invalid-parent header: {} {}", self.header.number(), self.header.hash(), ); state.insert_block_status(self.header.hash(), BlockStatus::BLOCK_INVALID); return result; } if self.non_contextual_check(&mut result).is_err() { debug!( "HeadersProcess reject non-contextual header: {} {}", self.header.number(), self.header.hash(), ); state.insert_block_status(self.header.hash(), BlockStatus::BLOCK_INVALID); return result; } if self.version_check(&mut result).is_err() { debug!( "HeadersProcess reject invalid-version header {} {}", self.header.number(), self.header.hash(), ); state.insert_block_status(self.header.hash(), BlockStatus::BLOCK_INVALID); return result; } shared.insert_valid_header(self.peer, &self.header); result } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ValidationState { VALID, INVALID, } impl Default for ValidationState { fn default() -> Self { ValidationState::VALID } } #[derive(Debug)] pub enum ValidationError { Verify(Error), Version, InvalidParent, } #[derive(Debug, Default)] pub struct ValidationResult { pub error: Option<ValidationError>, pub misbehavior: u32, pub state: ValidationState, } impl ValidationResult { pub fn invalid(&mut self, error: Option<ValidationError>) { self.dos(error, 0); } pub fn dos(&mut self, error: Option<ValidationError>, misbehavior: u32) { self.error = error; self.misbehavior += misbehavior; self.state = ValidationState::INVALID; } pub fn is_valid(&self) -> bool { self.state == ValidationState::VALID } }
use crate::block_status::BlockStatus; use crate::synchronizer::Synchronizer; use crate::types::{ActiveChain, SyncShared}; use crate::{Status, StatusCode, MAX_HEADERS_LEN}; use ckb_error::{Error, ErrorKind}; use ckb_logger::{debug, log_enabled, warn, Level}; use ckb_network::{CKBProtocolContext, PeerIndex}; use ckb_traits::BlockMedianTimeContext; use ckb_types::{ core::{self, BlockNumber}, packed::{self, Byte32}, prelude::*, }; use ckb_verification::{HeaderError, HeaderErrorKind, HeaderResolver, HeaderVerifier, Verifier}; pub struct HeadersProcess<'a> { message: packed::SendHeadersReader<'a>, synchronizer: &'a Synchronizer, peer: PeerIndex, nc: &'a dyn CKBProtocolContext, active_chain: ActiveChain, } pub struct VerifierResolver<'a> { shared: &'a SyncShared, header: &'a core::HeaderView, parent: Option<&'a core::HeaderView>, } impl<'a> VerifierResolver<'a> { pub fn new( parent: Option<&'a core::HeaderView>, header: &'a core::HeaderView, shared: &'a SyncShared, ) -> Self { VerifierResolver { parent, header, shared, } } } impl<'a> ::std::clone::Clone for VerifierResolver<'a> { fn clone(&self) -> Self { VerifierResolver { parent: self.parent, header: self.header, shared: self.shared, } } } impl<'a> BlockMedianTimeContext for VerifierResolver<'a> { fn median_block_count(&self) -> u64 { self.shared.consensus().median_time_block_count() as u64 } fn timestamp_and_parent(&self, block_hash: &Byte32) -> (u64, BlockNumber, Byte32) { let header = self .shared .get_header(&block_hash) .expect("[VerifierResolver] blocks used for median time exist"); ( header.timestamp(), header.number(), header.data().raw().parent_hash(), ) } } impl<'a> HeaderResolver for VerifierResolver<'a> { fn header(&self) -> &core::HeaderView { self.header } fn parent(&self) -> Option<&core::HeaderView> { self.parent } } impl<'a> HeadersProcess<'a> { pub fn new( message: packed::SendHeadersReader<'a>, synchronizer: &'a Synchronizer, peer: PeerIndex, nc: &'a dyn CKBProtocolContext, ) -> Self { let active_chain = synchronizer.shared.active_chain(); HeadersProcess { message, nc, synchronizer, peer, active_chain, } } fn is_continuous(&self, headers: &[core::HeaderView]) -> bool { for window in headers.windows(2) { if let [parent, header] = &window { if header.data().raw().parent_hash() != parent.hash() { debug!( "header.parent_hash {} parent.hash {}", header.parent_hash(), parent.hash() ); return false; } } } true } pub fn accept_first(&self, first: &core::HeaderView) -> ValidationResult { let shared = self.synchronizer.shared(); let parent = shared.get_header(&first.data().raw().parent_hash()); let resolver = VerifierResolver::new(parent.as_ref(), &first, &shared); let verifier = HeaderVerifier::new(&resolver, &shared.consensus()); let acceptor = HeaderAcceptor::new( first, self.peer, resolver.clone(), verifier, self.active_chain.clone(), ); acceptor.accept() } pub fn execute(self) -> Status { debug!("HeadersProcess begin"); let shared = self.synchronizer.shared(); let headers = self .message .headers() .to_entity() .into_iter() .map(packed::Header::into_view) .collect::<Vec<_>>(); if headers.len() > MAX_HEADERS_LE
.nc .disconnect(self.peer, "useless outbound peer in IBD") { return StatusCode::Network.with_context(format!("Disconnect error: {:?}", err)); } } Status::ok() } } #[derive(Clone)] pub struct HeaderAcceptor<'a, V: Verifier> { header: &'a core::HeaderView, active_chain: ActiveChain, peer: PeerIndex, resolver: V::Target, verifier: V, } impl<'a, V> HeaderAcceptor<'a, V> where V: Verifier<Target = VerifierResolver<'a>>, { pub fn new( header: &'a core::HeaderView, peer: PeerIndex, resolver: VerifierResolver<'a>, verifier: V, active_chain: ActiveChain, ) -> Self { HeaderAcceptor { header, peer, resolver, verifier, active_chain, } } pub fn prev_block_check(&self, state: &mut ValidationResult) -> Result<(), ()> { if self.active_chain.contains_block_status( &self.header.data().raw().parent_hash(), BlockStatus::BLOCK_INVALID, ) { state.dos(Some(ValidationError::InvalidParent), 100); return Err(()); } Ok(()) } pub fn non_contextual_check(&self, state: &mut ValidationResult) -> Result<(), ()> { self.verifier.verify(&self.resolver).map_err(|error| { debug!( "HeadersProcess accept {:?} error {:?}", self.header.number(), error ); if error.kind() == &ErrorKind::Header { let header_error = error .downcast_ref::<HeaderError>() .expect("error kind checked"); match header_error.kind() { HeaderErrorKind::Pow => state.dos(Some(ValidationError::Verify(error)), 100), HeaderErrorKind::Epoch => state.dos(Some(ValidationError::Verify(error)), 50), _ => state.invalid(Some(ValidationError::Verify(error))), } } else { state.invalid(Some(ValidationError::Verify(error))); } }) } pub fn version_check(&self, state: &mut ValidationResult) -> Result<(), ()> { if self.header.version() != 0 { state.invalid(Some(ValidationError::Version)); Err(()) } else { Ok(()) } } pub fn accept(&self) -> ValidationResult { let mut result = ValidationResult::default(); let shared = self.active_chain.shared(); let state = shared.state(); if self .active_chain .contains_block_status(&self.header.hash(), BlockStatus::HEADER_VALID) { let header_view = shared .get_header_view(&self.header.hash()) .expect("header with HEADER_VALID should exist"); state.peers().new_header_received(self.peer, &header_view); return result; } if self.prev_block_check(&mut result).is_err() { debug!( "HeadersProcess reject invalid-parent header: {} {}", self.header.number(), self.header.hash(), ); state.insert_block_status(self.header.hash(), BlockStatus::BLOCK_INVALID); return result; } if self.non_contextual_check(&mut result).is_err() { debug!( "HeadersProcess reject non-contextual header: {} {}", self.header.number(), self.header.hash(), ); state.insert_block_status(self.header.hash(), BlockStatus::BLOCK_INVALID); return result; } if self.version_check(&mut result).is_err() { debug!( "HeadersProcess reject invalid-version header {} {}", self.header.number(), self.header.hash(), ); state.insert_block_status(self.header.hash(), BlockStatus::BLOCK_INVALID); return result; } shared.insert_valid_header(self.peer, &self.header); result } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum ValidationState { VALID, INVALID, } impl Default for ValidationState { fn default() -> Self { ValidationState::VALID } } #[derive(Debug)] pub enum ValidationError { Verify(Error), Version, InvalidParent, } #[derive(Debug, Default)] pub struct ValidationResult { pub error: Option<ValidationError>, pub misbehavior: u32, pub state: ValidationState, } impl ValidationResult { pub fn invalid(&mut self, error: Option<ValidationError>) { self.dos(error, 0); } pub fn dos(&mut self, error: Option<ValidationError>, misbehavior: u32) { self.error = error; self.misbehavior += misbehavior; self.state = ValidationState::INVALID; } pub fn is_valid(&self) -> bool { self.state == ValidationState::VALID } }
N { shared.state().misbehavior(self.peer, 20); warn!("HeadersProcess is_oversize"); return Status::ok(); } if headers.is_empty() { self.synchronizer .peers() .state .write() .get_mut(&self.peer) .expect("Peer must exists") .headers_sync_timeout = None; debug!("HeadersProcess is_empty (synchronized)"); return Status::ok(); } if !self.is_continuous(&headers) { shared.state().misbehavior(self.peer, 20); debug!("HeadersProcess is not continuous"); return Status::ok(); } let result = self.accept_first(&headers[0]); if !result.is_valid() { if result.misbehavior > 0 { shared.state().misbehavior(self.peer, result.misbehavior); } debug!( "HeadersProcess accept_first is_valid {:?} headers = {:?}", result, headers[0] ); return Status::ok(); } for window in headers.windows(2) { if let [parent, header] = &window { let resolver = VerifierResolver::new(Some(&parent), &header, &shared); let verifier = HeaderVerifier::new(&resolver, &shared.consensus()); let acceptor = HeaderAcceptor::new( &header, self.peer, resolver.clone(), verifier, self.active_chain.clone(), ); let result = acceptor.accept(); if !result.is_valid() { if result.misbehavior > 0 { shared.state().misbehavior(self.peer, result.misbehavior); } debug!("HeadersProcess accept is invalid {:?}", result); return Status::ok(); } } } if log_enabled!(Level::Debug) { let shared_best_known = self.synchronizer.shared.state().shared_best_header(); let peer_best_known = self.synchronizer.peers().get_best_known_header(self.peer); debug!( "chain: num={}, diff={:#x};", self.active_chain.tip_number(), self.active_chain.total_difficulty() ); debug!( "shared best_known_header: num={}, diff={:#x}, hash={};", shared_best_known.number(), shared_best_known.total_difficulty(), shared_best_known.hash(), ); if let Some(header) = peer_best_known { debug!( "peer's best_known_header: peer: {}, num={}; diff={:#x}, hash={};", self.peer, header.number(), header.total_difficulty(), header.hash() ); } else { debug!("state: null;"); } debug!("peer: {}", self.peer); } if headers.len() == MAX_HEADERS_LEN { let start = headers.last().expect("empty checked"); self.active_chain .send_getheaders_to_peer(self.nc, self.peer, start); } let peer_flags = self .synchronizer .peers() .state .read() .get(&self.peer) .map(|state| state.peer_flags) .unwrap_or_default(); if self.active_chain.is_initial_block_download() && headers.len() != MAX_HEADERS_LEN && (!peer_flags.is_protect && !peer_flags.is_whitelist && peer_flags.is_outbound) { debug!("Disconnect peer({}) is unprotected outbound", self.peer); if let Err(err) = self
random
[ { "content": "pub fn inherit_block(shared: &Shared, parent_hash: &Byte32) -> BlockBuilder {\n\n let snapshot = shared.snapshot();\n\n let parent = snapshot.get_block(parent_hash).unwrap();\n\n let parent_epoch = snapshot.get_block_epoch(parent_hash).unwrap();\n\n let parent_number = parent.header().number();\n\n let epoch = snapshot\n\n .next_epoch_ext(snapshot.consensus(), &parent_epoch, &parent.header())\n\n .unwrap_or(parent_epoch);\n\n let cellbase = {\n\n let (_, reward) = snapshot.finalize_block_reward(&parent.header()).unwrap();\n\n always_success_cellbase(parent_number + 1, reward.total, snapshot.consensus())\n\n };\n\n let dao = {\n\n let resolved_cellbase = resolve_transaction(\n\n cellbase,\n\n &mut HashSet::new(),\n\n snapshot.as_ref(),\n\n snapshot.as_ref(),\n\n )\n\n .unwrap();\n", "file_path": "sync/src/tests/util.rs", "rank": 0, "score": 312634.65379055607 }, { "content": "pub fn dao_data(shared: &Shared, parent: &HeaderView, txs: &[TransactionView]) -> Byte32 {\n\n let mut seen_inputs = HashSet::new();\n\n // In case of resolving errors, we just output a dummp DAO field,\n\n // since those should be the cases where we are testing invalid\n\n // blocks\n\n let transactions_provider = TransactionsProvider::new(txs.iter());\n\n let snapshot: &Snapshot = &shared.snapshot();\n\n let overlay_cell_provider = OverlayCellProvider::new(&transactions_provider, snapshot);\n\n let rtxs = txs.iter().cloned().try_fold(vec![], |mut rtxs, tx| {\n\n let rtx = resolve_transaction(tx, &mut seen_inputs, &overlay_cell_provider, snapshot);\n\n match rtx {\n\n Ok(rtx) => {\n\n rtxs.push(rtx);\n\n Ok(rtxs)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n });\n\n let rtxs = rtxs.expect(\"dao_data resolve_transaction\");\n\n let calculator = DaoCalculator::new(shared.consensus(), snapshot);\n", "file_path": "benches/benches/benchmarks/util.rs", "rank": 1, "score": 296076.0144508093 }, { "content": "#[test]\n\npub fn test_block_with_one_cellbase_at_first() {\n\n let transaction = create_normal_transaction();\n\n\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(1u64.pack()).build())\n\n .transaction(create_cellbase_transaction_with_block_number(1))\n\n .transaction(transaction)\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 2, "score": 281679.68514792365 }, { "content": "/// The invoker should only rely on `block_median_time` function\n\n/// the other functions only use to help the default `block_median_time`, and maybe unimplemented.\n\npub trait BlockMedianTimeContext {\n\n fn median_block_count(&self) -> u64;\n\n\n\n /// Return timestamp and block_number of the corresponding block_hash, and hash of parent block\n\n ///\n\n /// Fake implementation:\n\n /// ```ignore\n\n /// let current_header = get_block_header(block_hash);\n\n /// let parent_header = current_header.timestamp_and_parent().header();\n\n /// return (parent_header.timestamp(), current_header.number(), parent_header.hash());\n\n /// ```\n\n fn timestamp_and_parent(&self, block_hash: &Byte32) -> (u64, BlockNumber, Byte32);\n\n\n\n /// Return past block median time, **including the timestamp of the given one**\n\n fn block_median_time(&self, block_hash: &Byte32) -> u64 {\n\n let median_time_span = self.median_block_count();\n\n let mut timestamps: Vec<u64> = Vec::with_capacity(median_time_span as usize);\n\n let mut block_hash = block_hash.clone();\n\n for _ in 0..median_time_span {\n\n let (timestamp, block_number, parent_hash) = self.timestamp_and_parent(&block_hash);\n", "file_path": "traits/src/block_median_time_context.rs", "rank": 3, "score": 272576.6661694369 }, { "content": "// Construct the next block based the given `parent`\n\nfn next_block(shared: &Shared, parent: &HeaderView) -> BlockView {\n\n let snapshot: &Snapshot = &shared.snapshot();\n\n let epoch = {\n\n let last_epoch = snapshot\n\n .get_block_epoch(&parent.hash())\n\n .expect(\"current epoch exists\");\n\n snapshot\n\n .next_epoch_ext(shared.consensus(), &last_epoch, parent)\n\n .unwrap_or(last_epoch)\n\n };\n\n let (_, reward) = snapshot.finalize_block_reward(parent).unwrap();\n\n let cellbase = always_success_cellbase(parent.number() + 1, reward.total, shared.consensus());\n\n\n\n // We store a cellbase for constructing a new transaction later\n\n if parent.number() > shared.consensus().finalization_delay_length() {\n\n UNSPENT.with(|unspent| {\n\n *unspent.borrow_mut() = cellbase.hash().unpack();\n\n });\n\n }\n\n\n", "file_path": "rpc/src/test.rs", "rank": 4, "score": 269410.3643051241 }, { "content": "pub fn wait_get_blocks(secs: u64, net: &Net) -> bool {\n\n wait_until(secs, || {\n\n if let Ok((_, _, data)) = net.receive_timeout(Duration::from_secs(1)) {\n\n if let Ok(message) = SyncMessage::from_slice(&data) {\n\n return message.to_enum().item_name() == GetBlocks::NAME;\n\n }\n\n }\n\n false\n\n })\n\n}\n", "file_path": "test/src/specs/sync/utils.rs", "rank": 5, "score": 265498.716888042 }, { "content": "fn mock_median_time_context() -> MockMedianTime {\n\n let now = unix_time_as_millis();\n\n let timestamps = (0..100).map(|_| now).collect();\n\n MockMedianTime::new(timestamps)\n\n}\n\n\n", "file_path": "verification/src/tests/header_verifier.rs", "rank": 6, "score": 258815.14963289964 }, { "content": "pub fn create_secp_cellbase(shared: &Shared, parent: &HeaderView) -> TransactionView {\n\n let (_, _, secp_script) = secp_cell();\n\n let capacity = calculate_reward(shared, parent);\n\n\n\n let builder = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(parent.number() + 1))\n\n .witness(secp_script.clone().into_witness());\n\n\n\n if (parent.number() + 1) <= shared.consensus().finalization_delay_length() {\n\n builder.build()\n\n } else {\n\n builder\n\n .output(\n\n CellOutput::new_builder()\n\n .capacity(capacity.pack())\n\n .lock(secp_script.clone())\n\n .build(),\n\n )\n\n .output_data(Bytes::new().pack())\n\n .build()\n\n }\n\n}\n\n\n", "file_path": "benches/benches/benchmarks/util.rs", "rank": 7, "score": 255800.45587272575 }, { "content": "#[test]\n\npub fn test_max_block_bytes_verifier() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .build();\n\n\n\n {\n\n let verifier =\n\n BlockBytesVerifier::new(block.data().serialized_size_without_uncle_proposals() as u64);\n\n assert!(verifier.verify(&block).is_ok());\n\n }\n\n\n\n {\n\n let verifier = BlockBytesVerifier::new(\n\n block.data().serialized_size_without_uncle_proposals() as u64 - 1,\n\n );\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n BlockErrorKind::ExceededMaximumBlockBytes,\n\n );\n\n }\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 8, "score": 255062.7635693574 }, { "content": "pub fn create_always_success_cellbase(shared: &Shared, parent: &HeaderView) -> TransactionView {\n\n let (_, _, always_success_script) = always_success_cell();\n\n let capacity = calculate_reward(shared, parent);\n\n\n\n let builder = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(parent.number() + 1))\n\n .witness(always_success_script.clone().into_witness());\n\n\n\n if (parent.number() + 1) <= shared.consensus().finalization_delay_length() {\n\n builder.build()\n\n } else {\n\n builder\n\n .output(\n\n CellOutput::new_builder()\n\n .capacity(capacity.pack())\n\n .lock(always_success_script.clone())\n\n .build(),\n\n )\n\n .output_data(Bytes::new().pack())\n\n .build()\n\n }\n\n}\n\n\n", "file_path": "benches/benches/benchmarks/util.rs", "rank": 9, "score": 251141.71843576102 }, { "content": "#[test]\n\npub fn test_max_block_bytes_verifier_skip_genesis() {\n\n let block = BlockBuilder::default().build();\n\n {\n\n let verifier =\n\n BlockBytesVerifier::new(block.data().serialized_size_without_uncle_proposals() as u64);\n\n assert!(verifier.verify(&block).is_ok());\n\n }\n\n\n\n {\n\n let verifier = BlockBytesVerifier::new(\n\n block.data().serialized_size_without_uncle_proposals() as u64 - 1,\n\n );\n\n assert!(verifier.verify(&block).is_ok());\n\n }\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 10, "score": 246370.84515905622 }, { "content": "#[test]\n\npub fn test_block_without_cellbase() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(1u64.pack()).build())\n\n .transaction(TransactionBuilder::default().build())\n\n .build();\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidQuantity,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 11, "score": 245443.98038070498 }, { "content": "#[test]\n\npub fn test_block_with_duplicated_proposals() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .proposal(ProposalShortId::zero())\n\n .proposal(ProposalShortId::zero())\n\n .build();\n\n\n\n let verifier = DuplicateVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n BlockErrorKind::ProposalTransactionDuplicate,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 12, "score": 245443.98038070498 }, { "content": "#[test]\n\npub fn test_block_with_duplicated_txs() {\n\n let tx = create_normal_transaction();\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(tx.clone())\n\n .transaction(tx)\n\n .build();\n\n\n\n let verifier = DuplicateVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n BlockErrorKind::CommitTransactionDuplicate,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 13, "score": 245443.98038070498 }, { "content": "#[test]\n\npub fn test_block_with_two_cellbases() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_cellbase_transaction())\n\n .transaction(create_cellbase_transaction())\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidQuantity,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 14, "score": 245443.98038070498 }, { "content": "pub fn since_from_relative_block_number(block_number: BlockNumber) -> u64 {\n\n FLAG_SINCE_RELATIVE | FLAG_SINCE_BLOCK_NUMBER | block_number\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 15, "score": 245265.53882056748 }, { "content": "pub fn since_from_absolute_block_number(block_number: BlockNumber) -> u64 {\n\n FLAG_SINCE_BLOCK_NUMBER | block_number\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 16, "score": 245265.53882056748 }, { "content": "#[test]\n\npub fn test_version() {\n\n let header = HeaderBuilder::default()\n\n .version((BLOCK_VERSION + 1).pack())\n\n .build();\n\n let verifier = VersionVerifier::new(&header, BLOCK_VERSION);\n\n\n\n assert_error_eq!(verifier.verify().unwrap_err(), BlockErrorKind::Version);\n\n}\n\n\n", "file_path": "verification/src/tests/header_verifier.rs", "rank": 17, "score": 242301.9629224729 }, { "content": "#[test]\n\npub fn test_max_proposals_limit_verifier() {\n\n let block = BlockBuilder::default()\n\n .proposal(ProposalShortId::zero())\n\n .build();\n\n\n\n {\n\n let verifier = BlockProposalsLimitVerifier::new(1);\n\n assert!(verifier.verify(&block).is_ok());\n\n }\n\n\n\n {\n\n let verifier = BlockProposalsLimitVerifier::new(0);\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n BlockErrorKind::ExceededMaximumProposalsLimit,\n\n );\n\n }\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 18, "score": 240676.70748031905 }, { "content": "#[test]\n\npub fn test_block_with_correct_cellbase_number() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_cellbase_transaction_with_block_number(2))\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 19, "score": 240655.21031737735 }, { "content": "#[test]\n\npub fn test_block_with_one_cellbase_at_last() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_normal_transaction())\n\n .transaction(create_cellbase_transaction())\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidPosition,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 20, "score": 240655.21031737735 }, { "content": "#[test]\n\npub fn test_block_with_incorrect_cellbase_number() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_cellbase_transaction_with_block_number(3))\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidInput,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 21, "score": 240655.21031737735 }, { "content": "pub fn build_get_blocks(hashes: &[Byte32]) -> Bytes {\n\n let get_blocks = GetBlocks::new_builder()\n\n .block_hashes(hashes.iter().map(ToOwned::to_owned).pack())\n\n .build();\n\n\n\n SyncMessage::new_builder()\n\n .set(get_blocks)\n\n .build()\n\n .as_bytes()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 22, "score": 237068.1532690741 }, { "content": "#[test]\n\npub fn test_cellbase_with_fee() {\n\n let transaction = create_normal_transaction();\n\n\n\n let block = BlockBuilder::default()\n\n .transaction(create_cellbase_transaction_with_capacity(capacity_bytes!(\n\n 110\n\n )))\n\n .transaction(transaction)\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 23, "score": 236375.77358804227 }, { "content": "#[test]\n\npub fn test_proposals_root() {\n\n let header = HeaderBuilder::default()\n\n .number(2u64.pack())\n\n .proposals_hash(h256!(\"0x1\").pack())\n\n .build();\n\n let block = BlockBuilder::default()\n\n .header(header)\n\n .transaction(create_normal_transaction())\n\n .build_unchecked();\n\n\n\n let verifier = MerkleRootVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n BlockErrorKind::TransactionsRoot,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 24, "score": 236375.77358804227 }, { "content": "#[test]\n\npub fn test_transaction_root() {\n\n let header = HeaderBuilder::default()\n\n .number(2u64.pack())\n\n .transactions_root(Byte32::zero())\n\n .build();\n\n let block = BlockBuilder::default()\n\n .header(header)\n\n .transaction(create_normal_transaction())\n\n .build_unchecked();\n\n\n\n let verifier = MerkleRootVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n BlockErrorKind::TransactionsRoot,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 25, "score": 236375.77358804227 }, { "content": "pub fn wait_until<F>(secs: u64, f: F) -> bool\n\nwhere\n\n F: Fn() -> bool,\n\n{\n\n let start = Instant::now();\n\n let timeout = Duration::new(secs, 0);\n\n while Instant::now().duration_since(start) <= timeout {\n\n if f() {\n\n return true;\n\n }\n\n thread::sleep(Duration::new(1, 0));\n\n }\n\n false\n\n}\n\n\n", "file_path": "network/src/protocols/test.rs", "rank": 26, "score": 235578.68202639275 }, { "content": "fn sync_header(net: &Net, peer_id: PeerIndex, block: &BlockView) {\n\n net.send(\n\n NetworkProtocol::SYNC.into(),\n\n peer_id,\n\n build_header(&block.header()),\n\n );\n\n}\n\n\n", "file_path": "test/src/specs/sync/block_sync.rs", "rank": 27, "score": 234653.36344602675 }, { "content": "pub trait HeaderResolver {\n\n fn header(&self) -> &HeaderView;\n\n /// resolves parent header\n\n fn parent(&self) -> Option<&HeaderView>;\n\n}\n\n\n\npub struct HeaderVerifier<'a, T, M> {\n\n block_median_time_context: &'a M,\n\n consensus: &'a Consensus,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<'a, T, M: BlockMedianTimeContext> HeaderVerifier<'a, T, M> {\n\n pub fn new(block_median_time_context: &'a M, consensus: &'a Consensus) -> Self {\n\n HeaderVerifier {\n\n consensus,\n\n block_median_time_context,\n\n _phantom: PhantomData,\n\n }\n\n }\n", "file_path": "verification/src/header_verifier.rs", "rank": 28, "score": 231433.46456786903 }, { "content": "#[test]\n\npub fn test_cellbase_with_less_reward() {\n\n let transaction = create_normal_transaction();\n\n\n\n let block = BlockBuilder::default()\n\n .transaction(create_cellbase_transaction_with_capacity(capacity_bytes!(\n\n 50\n\n )))\n\n .transaction(transaction)\n\n .build();\n\n\n\n let verifier = CellbaseVerifier::new();\n\n assert!(verifier.verify(&block).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 29, "score": 231170.25425326405 }, { "content": "#[test]\n\npub fn test_cellbase_without_output() {\n\n // without_output\n\n let cellbase_without_output = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(2u64))\n\n .witness(Script::default().into_witness())\n\n .build();\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(cellbase_without_output)\n\n .build();\n\n let result = CellbaseVerifier::new().verify(&block);\n\n assert!(result.is_ok(), \"Unexpected error {:?}\", result);\n\n\n\n // only output_data\n\n let cellbase_without_output = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(2u64))\n\n .witness(Script::default().into_witness())\n\n .output_data(Bytes::new().pack())\n\n .build();\n\n let block = BlockBuilder::default()\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 30, "score": 231170.25425326405 }, { "content": "#[test]\n\npub fn test_cellbase_with_two_output() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_cellbase_transaction_with_two_output())\n\n .build();\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidOutputQuantity,\n\n )\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 31, "score": 231170.25425326405 }, { "content": "pub fn wait_until<F>(secs: u64, mut f: F) -> bool\n\nwhere\n\n F: FnMut() -> bool,\n\n{\n\n let timeout = tweaked_duration(secs);\n\n let start = Instant::now();\n\n while Instant::now().duration_since(start) <= timeout {\n\n if f() {\n\n return true;\n\n }\n\n thread::sleep(Duration::new(1, 0));\n\n }\n\n false\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 32, "score": 230607.09853734088 }, { "content": "fn sync_get_blocks(net: &Net, peer_id: PeerIndex, hashes: &[Byte32]) {\n\n net.send(\n\n NetworkProtocol::SYNC.into(),\n\n peer_id,\n\n build_get_blocks(hashes),\n\n );\n\n}\n", "file_path": "test/src/specs/sync/block_sync.rs", "rank": 33, "score": 226627.94480409447 }, { "content": "#[test]\n\npub fn test_cellbase_with_two_output_data() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_cellbase_transaction_with_two_output_data())\n\n .build();\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidOutputQuantity,\n\n )\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 34, "score": 226269.154228339 }, { "content": "#[test]\n\npub fn test_exceeded_maximum_block_bytes() {\n\n let data: Bytes = vec![1; 500].into();\n\n let transaction = TransactionBuilder::default()\n\n .version((Version::default() + 1).pack())\n\n .output(\n\n CellOutput::new_builder()\n\n .capacity(capacity_bytes!(50).pack())\n\n .build(),\n\n )\n\n .output_data(data.pack())\n\n .build();\n\n let verifier = SizeVerifier::new(&transaction, 100);\n\n\n\n assert_error_eq!(\n\n verifier.verify().unwrap_err(),\n\n TransactionError::ExceededMaximumBlockBytes,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 35, "score": 226269.15422833897 }, { "content": "#[test]\n\npub fn test_absolute_block_number_lock() {\n\n // absolute lock until block number 0xa\n\n let tx = create_tx_with_lock(0x0000_0000_0000_000a);\n\n let rtx = create_resolve_tx_with_transaction_info(\n\n &tx,\n\n MockMedianTime::get_transaction_info(1, EpochNumberWithFraction::new(0, 0, 10), 1),\n\n );\n\n let median_time_context = MockMedianTime::new(vec![0; 11]);\n\n\n\n assert_error_eq!(\n\n verify_since(&rtx, &median_time_context, 5, 1).unwrap_err(),\n\n TransactionError::Immature,\n\n );\n\n // spent after 10 height\n\n assert!(verify_since(&rtx, &median_time_context, 10, 1).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 36, "score": 226269.15422833897 }, { "content": "pub fn new_block_with_template(template: BlockTemplate) -> BlockView {\n\n Block::from(template)\n\n .as_advanced_builder()\n\n .nonce(rand::random::<u128>().pack())\n\n .build()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 37, "score": 225822.79336131457 }, { "content": "#[test]\n\npub fn test_genesis_non_zero_parent_hash() {\n\n let genesis_block = Consensus::default().genesis_block().to_owned();\n\n let genesis_block = genesis_block\n\n .as_advanced_builder()\n\n .parent_hash([42u8; 32].pack())\n\n .build();\n\n let consensus = ConsensusBuilder::default()\n\n .genesis_block(genesis_block)\n\n .build();\n\n let verifier = GenesisVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&consensus).unwrap_err(),\n\n UnknownParentError {\n\n parent_hash: [42u8; 32].pack()\n\n },\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/genesis_verifier.rs", "rank": 38, "score": 222040.07621438644 }, { "content": "#[test]\n\npub fn test_cellbase_with_non_empty_output_data() {\n\n let block = BlockBuilder::default()\n\n .header(HeaderBuilder::default().number(2u64.pack()).build())\n\n .transaction(create_cellbase_transaction_with_non_empty_output_data())\n\n .build();\n\n let verifier = CellbaseVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&block).unwrap_err(),\n\n CellbaseError::InvalidOutputData,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/block_verifier.rs", "rank": 39, "score": 221646.52843941504 }, { "content": "#[test]\n\npub fn test_should_have_no_output_in_cellbase_no_finalization_target() {\n\n let (_chain, shared) = start_chain(None);\n\n let context = dummy_context(&shared);\n\n\n\n let parent = shared.consensus().genesis_block().header();\n\n let number = parent.number() + 1;\n\n let cellbase = TransactionBuilder::default()\n\n .input(CellInput::new_cellbase_input(number))\n\n .output(Default::default())\n\n .output_data(Default::default())\n\n .build();\n\n\n\n let cellbase = ResolvedTransaction {\n\n transaction: cellbase,\n\n resolved_cell_deps: vec![],\n\n resolved_inputs: vec![],\n\n resolved_dep_groups: vec![],\n\n };\n\n\n\n let ret = RewardVerifier::new(&context, &[cellbase], &parent).verify();\n\n\n\n assert_error_eq!(ret.unwrap_err(), CellbaseError::InvalidRewardTarget,);\n\n}\n", "file_path": "verification/src/tests/contextual_block_verifier.rs", "rank": 40, "score": 221646.52843941504 }, { "content": "pub fn since_from_relative_timestamp(timestamp: u64) -> u64 {\n\n FLAG_SINCE_RELATIVE | FLAG_SINCE_TIMESTAMP | timestamp\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 41, "score": 221486.97638400522 }, { "content": "pub fn since_from_absolute_timestamp(timestamp: u64) -> u64 {\n\n FLAG_SINCE_TIMESTAMP | timestamp\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 42, "score": 221486.97638400522 }, { "content": "pub fn transferred_byte_cycles(bytes: u64) -> u64 {\n\n // Compiler will optimize the divisin here to shifts.\n\n (bytes + BYTES_PER_CYCLE - 1) / BYTES_PER_CYCLE\n\n}\n\n\n", "file_path": "script/src/cost_model.rs", "rank": 43, "score": 217912.6066515816 }, { "content": "pub fn build_header(header: &HeaderView) -> Bytes {\n\n build_headers(&[header.clone()])\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 44, "score": 216876.65345084516 }, { "content": "pub fn build_block(block: &BlockView) -> Bytes {\n\n SyncMessage::new_builder()\n\n .set(SendBlock::new_builder().block(block.data()).build())\n\n .build()\n\n .as_bytes()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 45, "score": 216297.97439077427 }, { "content": "fn gen_block(parent_header: &HeaderView, nonce: u128, epoch: &EpochExt) -> BlockView {\n\n let now = parent_header.timestamp() + 1;\n\n let number = parent_header.number() + 1;\n\n let cellbase = create_cellbase(number);\n\n BlockBuilder::default()\n\n .transaction(cellbase)\n\n .proposal(ProposalShortId::from_slice(&[1; 10]).unwrap())\n\n .parent_hash(parent_header.hash())\n\n .timestamp(now.pack())\n\n .epoch(epoch.number_with_fraction(number).pack())\n\n .number(number.pack())\n\n .compact_target(epoch.compact_target().pack())\n\n .nonce(nonce.pack())\n\n .build()\n\n}\n\n\n", "file_path": "verification/src/tests/uncle_verifier.rs", "rank": 46, "score": 215826.86219301412 }, { "content": "fn wait_get_blocks_point(secs: u64, net: &Net) -> (Instant, bool) {\n\n let flag = wait_get_blocks(secs, net);\n\n (Instant::now(), flag)\n\n}\n", "file_path": "test/src/specs/sync/get_blocks.rs", "rank": 47, "score": 215440.01814168954 }, { "content": "fn gen_empty_block(parent: &HeaderView) -> BlockView {\n\n let mut rng = thread_rng();\n\n let nonce: u128 = rng.gen();\n\n let uncles_count: u32 = rng.gen_range(0, 2);\n\n let uncles: Vec<_> = (0..uncles_count)\n\n .map(|_| {\n\n BlockBuilder::default()\n\n .nonce(nonce.pack())\n\n .build()\n\n .as_uncle()\n\n })\n\n .collect();\n\n BlockBuilder::default()\n\n .parent_hash(parent.hash())\n\n .number((parent.number() + 1).pack())\n\n .uncles(uncles)\n\n .compact_target(parent.compact_target().pack())\n\n .timestamp((parent.timestamp() + MIN_BLOCK_INTERVAL * 1000).pack())\n\n .build()\n\n}\n\n\n", "file_path": "benches/benches/benchmarks/next_epoch_ext.rs", "rank": 48, "score": 215380.25440324785 }, { "content": "pub fn sleep(secs: u64) {\n\n thread::sleep(tweaked_duration(secs));\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 49, "score": 214960.55075958645 }, { "content": "#[cfg(not(disable_faketime))]\n\n#[test]\n\nfn test_timestamp_too_new() {\n\n let faketime_file = faketime::millis_tempfile(100_000).expect(\"create faketime file\");\n\n faketime::enable(&faketime_file);\n\n let fake_block_median_time_context = mock_median_time_context();\n\n\n\n let max = unix_time_as_millis() + ALLOWED_FUTURE_BLOCKTIME;\n\n let timestamp = max + 1;\n\n let header = HeaderBuilder::default()\n\n .number(10u64.pack())\n\n .timestamp(timestamp.pack())\n\n .build();\n\n let timestamp_verifier = TimestampVerifier::new(&fake_block_median_time_context, &header);\n\n assert_error_eq!(\n\n timestamp_verifier.verify().unwrap_err(),\n\n TimestampError::BlockTimeTooNew {\n\n max,\n\n actual: timestamp,\n\n },\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/header_verifier.rs", "rank": 50, "score": 214846.48682383454 }, { "content": "pub fn merkle_root(leaves: &[Byte32]) -> Byte32 {\n\n CBMT::build_merkle_root(leaves)\n\n}\n", "file_path": "util/types/src/utilities/merkle_tree.rs", "rank": 51, "score": 214614.86027072405 }, { "content": "// Build compact block based on core block\n\npub fn build_compact_block(block: &BlockView) -> Bytes {\n\n build_compact_block_with_prefilled(block, Vec::new())\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 52, "score": 212904.3696239277 }, { "content": "pub fn build_block_transactions(block: &BlockView) -> Bytes {\n\n // compact block has always prefilled cellbase\n\n let block_txs = BlockTransactions::new_builder()\n\n .block_hash(block.header().hash())\n\n .transactions(\n\n block\n\n .transactions()\n\n .into_iter()\n\n .map(|view| view.data())\n\n .skip(1)\n\n .pack(),\n\n )\n\n .build();\n\n\n\n RelayMessage::new_builder()\n\n .set(block_txs)\n\n .build()\n\n .as_bytes()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 53, "score": 212897.59207377772 }, { "content": "pub fn pow_message(pow_hash: &Byte32, nonce: u128) -> [u8; 48] {\n\n let mut message = [0; 48];\n\n message[0..32].copy_from_slice(pow_hash.as_slice());\n\n LittleEndian::write_u128(&mut message[32..48], nonce);\n\n message\n\n}\n\n\n", "file_path": "pow/src/lib.rs", "rank": 54, "score": 212573.34573698734 }, { "content": "struct DaoHeaderVerifier<'a, 'b, 'c, CS> {\n\n context: &'a VerifyContext<'a, CS>,\n\n resolved: &'a [ResolvedTransaction],\n\n parent: &'b HeaderView,\n\n header: &'c HeaderView,\n\n}\n\n\n\nimpl<'a, 'b, 'c, CS: ChainStore<'a>> DaoHeaderVerifier<'a, 'b, 'c, CS> {\n\n pub fn new(\n\n context: &'a VerifyContext<'a, CS>,\n\n resolved: &'a [ResolvedTransaction],\n\n parent: &'b HeaderView,\n\n header: &'c HeaderView,\n\n ) -> Self {\n\n DaoHeaderVerifier {\n\n context,\n\n resolved,\n\n parent,\n\n header,\n\n }\n", "file_path": "verification/src/contextual_block_verifier.rs", "rank": 55, "score": 211152.83545852886 }, { "content": "struct CompactBlockMedianTimeView<'a> {\n\n fn_get_pending_header: Box<dyn Fn(packed::Byte32) -> Option<core::HeaderView> + 'a>,\n\n store: &'a ChainDB,\n\n consensus: &'a Consensus,\n\n}\n\n\n\nimpl<'a> CompactBlockMedianTimeView<'a> {\n\n fn get_header(&self, hash: &packed::Byte32) -> Option<core::HeaderView> {\n\n (self.fn_get_pending_header)(hash.to_owned()).or_else(|| self.store.get_block_header(hash))\n\n }\n\n}\n\n\n\nimpl<'a> BlockMedianTimeContext for CompactBlockMedianTimeView<'a> {\n\n fn median_block_count(&self) -> u64 {\n\n self.consensus.median_time_block_count() as u64\n\n }\n\n\n\n fn timestamp_and_parent(\n\n &self,\n\n block_hash: &packed::Byte32,\n", "file_path": "sync/src/relayer/compact_block_process.rs", "rank": 56, "score": 211083.90912309877 }, { "content": "fn setup_node(height: u64) -> (TestNode, Shared) {\n\n let (always_success_cell, always_success_cell_data, always_success_script) =\n\n always_success_cell();\n\n let always_success_tx = TransactionBuilder::default()\n\n .witness(always_success_script.clone().into_witness())\n\n .input(CellInput::new(OutPoint::null(), 0))\n\n .output(always_success_cell.clone())\n\n .output_data(always_success_cell_data.pack())\n\n .build();\n\n\n\n let dao = genesis_dao_data(vec![&always_success_tx]).unwrap();\n\n\n\n let mut block = BlockBuilder::default()\n\n .timestamp(unix_time_as_millis().pack())\n\n .compact_target(difficulty_to_compact(U256::from(1000u64)).pack())\n\n .dao(dao)\n\n .transaction(always_success_tx)\n\n .build();\n\n\n\n let consensus = ConsensusBuilder::default()\n", "file_path": "sync/src/tests/synchronizer.rs", "rank": 57, "score": 209367.25016974966 }, { "content": "#[test]\n\nfn test_insert_new_block() {\n\n let (shared, chain) = build_chain(2);\n\n let new_block = {\n\n let tip_hash = shared.active_chain().tip_header().hash();\n\n let next_block = inherit_block(shared.shared(), &tip_hash).build();\n\n Arc::new(next_block)\n\n };\n\n\n\n assert_eq!(\n\n shared\n\n .insert_new_block(&chain, PeerIndex::new(1), Arc::clone(&new_block))\n\n .expect(\"insert valid block\"),\n\n true,\n\n );\n\n assert_eq!(\n\n shared\n\n .insert_new_block(&chain, PeerIndex::new(1), Arc::clone(&new_block))\n\n .expect(\"insert duplicated valid block\"),\n\n false,\n\n );\n\n}\n\n\n", "file_path": "sync/src/tests/sync_shared.rs", "rank": 58, "score": 209247.57552086512 }, { "content": "pub fn pack_dao_data(ar: u64, c: Capacity, s: Capacity, u: Capacity) -> Byte32 {\n\n let mut buf = [0u8; 32];\n\n LittleEndian::write_u64(&mut buf[0..8], c.as_u64());\n\n LittleEndian::write_u64(&mut buf[8..16], ar);\n\n LittleEndian::write_u64(&mut buf[16..24], s.as_u64());\n\n LittleEndian::write_u64(&mut buf[24..32], u.as_u64());\n\n Byte32::from_slice(&buf).expect(\"impossible: fail to read array\")\n\n}\n\n\n\nmod tests {\n\n pub use super::{extract_dao_data, pack_dao_data};\n\n pub use ckb_types::core::Capacity;\n\n pub use ckb_types::packed::Byte32;\n\n pub use ckb_types::prelude::Pack;\n\n pub use ckb_types::{h256, H256};\n\n\n\n #[test]\n\n #[allow(clippy::unreadable_literal)]\n\n fn test_dao_data() {\n\n let cases = vec![\n", "file_path": "util/dao/utils/src/lib.rs", "rank": 59, "score": 205364.84480850876 }, { "content": "fn sync_block(net: &Net, peer_id: PeerIndex, block: &BlockView) {\n\n net.send(NetworkProtocol::SYNC.into(), peer_id, build_block(block));\n\n}\n\n\n", "file_path": "test/src/specs/sync/block_sync.rs", "rank": 60, "score": 204917.8761831447 }, { "content": "#[test]\n\nfn test_insert_parent_unknown_block() {\n\n let (shared1, _) = build_chain(2);\n\n let (shared, chain) = {\n\n let (shared, table) = SharedBuilder::default()\n\n .consensus(shared1.consensus().clone())\n\n .build()\n\n .unwrap();\n\n let chain_controller = {\n\n let chain_service = ChainService::new(shared.clone(), table);\n\n chain_service.start::<&str>(None)\n\n };\n\n (SyncShared::new(shared), chain_controller)\n\n };\n\n\n\n let block = shared1\n\n .store()\n\n .get_block(&shared1.active_chain().tip_header().hash())\n\n .unwrap();\n\n let parent = {\n\n let parent = shared1\n", "file_path": "sync/src/tests/sync_shared.rs", "rank": 61, "score": 204026.6004117318 }, { "content": "pub fn extract_dao_data(dao: Byte32) -> Result<(u64, Capacity, Capacity, Capacity), Error> {\n\n let data = dao.raw_data();\n\n let c = Capacity::shannons(LittleEndian::read_u64(&data[0..8]));\n\n let ar = LittleEndian::read_u64(&data[8..16]);\n\n let s = Capacity::shannons(LittleEndian::read_u64(&data[16..24]));\n\n let u = Capacity::shannons(LittleEndian::read_u64(&data[24..32]));\n\n Ok((ar, c, s, u))\n\n}\n\n\n", "file_path": "util/dao/utils/src/lib.rs", "rank": 62, "score": 201890.88254011833 }, { "content": "pub fn generate_blocks(\n\n shared: &Shared,\n\n chain_controller: &ChainController,\n\n target_tip: BlockNumber,\n\n) {\n\n let snapshot = shared.snapshot();\n\n let parent_number = snapshot.tip_number();\n\n let mut parent_hash = snapshot.tip_header().hash();\n\n for _ in parent_number..target_tip {\n\n let block = inherit_block(shared, &parent_hash).build();\n\n parent_hash = block.header().hash();\n\n chain_controller\n\n .internal_process_block(Arc::new(block), Switch::DISABLE_ALL)\n\n .expect(\"processing block should be ok\");\n\n }\n\n}\n\n\n", "file_path": "sync/src/tests/util.rs", "rank": 63, "score": 201763.79224579188 }, { "content": "fn dummy_context(shared: &Shared) -> VerifyContext<'_, ChainDB> {\n\n VerifyContext::new(shared.store(), shared.consensus())\n\n}\n\n\n", "file_path": "verification/src/tests/contextual_block_verifier.rs", "rank": 64, "score": 199342.8221498115 }, { "content": "pub fn instruction_cycles(i: Instruction) -> u64 {\n\n match extract_opcode(i) {\n\n insts::OP_JALR => 3,\n\n insts::OP_LD => 2,\n\n insts::OP_LW => 3,\n\n insts::OP_LH => 3,\n\n insts::OP_LB => 3,\n\n insts::OP_LWU => 3,\n\n insts::OP_LHU => 3,\n\n insts::OP_LBU => 3,\n\n insts::OP_SB => 3,\n\n insts::OP_SH => 3,\n\n insts::OP_SW => 3,\n\n insts::OP_SD => 2,\n\n insts::OP_BEQ => 3,\n\n insts::OP_BGE => 3,\n\n insts::OP_BGEU => 3,\n\n insts::OP_BLT => 3,\n\n insts::OP_BLTU => 3,\n\n insts::OP_BNE => 3,\n", "file_path": "script/src/cost_model.rs", "rank": 65, "score": 199087.45154976827 }, { "content": "pub fn track_current_process(_: u64) {\n\n info!(\"track current process: unsupported\");\n\n}\n", "file_path": "util/memory-tracker/src/process-mock.rs", "rank": 66, "score": 198744.0982452386 }, { "content": "pub fn track_current_process(interval: u64) {\n\n if interval == 0 {\n\n info!(\"track current process: disable\");\n\n } else {\n\n info!(\"track current process: enable\");\n\n let wait_secs = time::Duration::from_secs(interval);\n\n\n\n let je_epoch = je_mib!(epoch);\n\n // Bytes allocated by the application.\n\n let allocated = je_mib!(stats::allocated);\n\n // Bytes in physically resident data pages mapped by the allocator.\n\n let resident = je_mib!(stats::resident);\n\n // Bytes in active pages allocated by the application.\n\n let active = je_mib!(stats::active);\n\n // Bytes in active extents mapped by the allocator.\n\n let mapped = je_mib!(stats::mapped);\n\n // Bytes in virtual memory mappings that were retained\n\n // rather than being returned to the operating system\n\n let retained = je_mib!(stats::retained);\n\n // Bytes dedicated to jemalloc metadata.\n", "file_path": "util/memory-tracker/src/process.rs", "rank": 67, "score": 198744.0982452386 }, { "content": "pub fn gen_txs_from_block(block: &BlockView) -> Vec<TransactionView> {\n\n let tx = create_secp_tx();\n\n let secp_cell_deps = vec![\n\n CellDep::new_builder()\n\n .out_point(OutPoint::new(tx.hash(), 0))\n\n .build(),\n\n CellDep::new_builder()\n\n .out_point(OutPoint::new(tx.hash(), 1))\n\n .build(),\n\n ];\n\n let (_, _, secp_script) = secp_cell();\n\n // spent n-2 block's tx and proposal n-1 block's tx\n\n if block.transactions().len() > 1 {\n\n block\n\n .transactions()\n\n .iter()\n\n .skip(1)\n\n .map(|tx| {\n\n create_2out_transaction(\n\n tx.output_pts(),\n\n secp_script.clone(),\n\n secp_cell_deps.clone(),\n\n )\n\n })\n\n .collect()\n\n } else {\n\n vec![]\n\n }\n\n}\n\n\n", "file_path": "benches/benches/benchmarks/overall.rs", "rank": 68, "score": 197646.4706071936 }, { "content": "#[test]\n\npub fn test_since_both() {\n\n // both\n\n let tx = TransactionBuilder::default()\n\n .inputs(vec![\n\n // absolute lock until epoch number 0xa\n\n CellInput::new(OutPoint::new(h256!(\"0x1\").pack(), 0), 0x0000_0000_0000_000a),\n\n // relative lock until after 2 blocks\n\n CellInput::new(OutPoint::new(h256!(\"0x1\").pack(), 0), 0xc000_0000_0000_0002),\n\n ])\n\n .build();\n\n\n\n let rtx = create_resolve_tx_with_transaction_info(\n\n &tx,\n\n MockMedianTime::get_transaction_info(1, EpochNumberWithFraction::new(0, 0, 10), 1),\n\n );\n\n // spent after 1024 seconds and 4 blocks (less than 10 blocks)\n\n // fake median time: 1124\n\n let median_time_context =\n\n MockMedianTime::new(vec![0, 100_000, 1_124_000, 2_000_000, 3_000_000]);\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 69, "score": 197400.4707405464 }, { "content": "#[test]\n\npub fn test_version() {\n\n let transaction = TransactionBuilder::default()\n\n .version((TX_VERSION + 1).pack())\n\n .build();\n\n let verifier = VersionVerifier::new(&transaction, TX_VERSION);\n\n\n\n assert_error_eq!(\n\n verifier.verify().unwrap_err(),\n\n TransactionError::MismatchedVersion,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 70, "score": 197400.4707405464 }, { "content": "#[test]\n\npub fn test_empty() {\n\n let transaction = TransactionBuilder::default().build();\n\n let verifier = EmptyVerifier::new(&transaction);\n\n\n\n assert_error_eq!(verifier.verify().unwrap_err(), TransactionError::Empty);\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 71, "score": 197400.4707405464 }, { "content": "pub fn gen_secp_block(\n\n blocks: &mut Vec<BlockView>,\n\n p_block: &BlockView,\n\n shared: &Shared,\n\n) -> BlockView {\n\n let tx = create_secp_tx();\n\n let secp_cell_deps = vec![\n\n CellDep::new_builder()\n\n .out_point(OutPoint::new(tx.hash(), 0))\n\n .build(),\n\n CellDep::new_builder()\n\n .out_point(OutPoint::new(tx.hash(), 1))\n\n .build(),\n\n ];\n\n let (_, _, secp_script) = secp_cell();\n\n let (number, timestamp) = (\n\n p_block.header().number() + 1,\n\n p_block.header().timestamp() + 10000,\n\n );\n\n let cellbase = create_secp_cellbase(shared, &p_block.header());\n", "file_path": "benches/benches/benchmarks/util.rs", "rank": 72, "score": 197333.9524584676 }, { "content": "// grep \"panicked at\" $node_log_path\n\npub fn nodes_panicked(node_dirs: &[String]) -> bool {\n\n node_dirs.iter().any(|node_dir| {\n\n read_to_string(&node_log(&node_dir))\n\n .expect(\"failed to read node's log\")\n\n .contains(\"panicked at\")\n\n })\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 73, "score": 195222.4220633107 }, { "content": "pub fn is_committed(tx_status: &TransactionWithStatus) -> bool {\n\n let committed_status = TxStatus::committed(h256!(\"0x0\"));\n\n tx_status.tx_status.status == committed_status.status\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 74, "score": 195222.4220633107 }, { "content": "/// verify m of n signatures\n\n/// Example 2 of 3 sigs: [s1, s3], pks: [pk1, pk2, pk3]\n\npub fn verify_m_of_n<S>(\n\n message: &Message,\n\n m_threshold: usize,\n\n sigs: &[Signature],\n\n pks: &HashSet<Pubkey, S>,\n\n) -> Result<(), Error>\n\nwhere\n\n S: BuildHasher,\n\n{\n\n if sigs.len() > pks.len() {\n\n return Err(ErrorKind::SigCountOverflow.into());\n\n }\n\n if m_threshold > sigs.len() {\n\n return Err(ErrorKind::SigNotEnough.into());\n\n }\n\n\n\n let mut used_pks: HashSet<Pubkey> = HashSet::with_capacity(m_threshold);\n\n let verified_sig_count = sigs\n\n .iter()\n\n .filter_map(|sig| {\n", "file_path": "util/multisig/src/secp256k1.rs", "rank": 75, "score": 194369.60910647476 }, { "content": "#[test]\n\npub fn test_capacity_invalid() {\n\n // The outputs capacity is 50 + 100 = 150\n\n let transaction = TransactionBuilder::default()\n\n .outputs(vec![\n\n CellOutput::new_builder()\n\n .capacity(capacity_bytes!(50).pack())\n\n .build(),\n\n CellOutput::new_builder()\n\n .capacity(capacity_bytes!(100).pack())\n\n .build(),\n\n ])\n\n .outputs_data(vec![Bytes::new().pack(); 2])\n\n .build();\n\n\n\n // The inputs capacity is 49 + 100 = 149,\n\n // is less than outputs capacity\n\n let rtx = ResolvedTransaction {\n\n transaction,\n\n resolved_cell_deps: Vec::new(),\n\n resolved_inputs: vec![\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 76, "score": 193243.7133304944 }, { "content": "#[test]\n\npub fn test_chain_specs() {\n\n use ckb_chain_spec::ChainSpec;\n\n use ckb_resource::{Resource, AVAILABLE_SPECS};\n\n fn load_spec_by_name(name: &str) -> ChainSpec {\n\n let res = Resource::bundled(format!(\"specs/{}.toml\", name));\n\n ChainSpec::load_from(&res).expect(\"load spec by name\")\n\n }\n\n for name in AVAILABLE_SPECS {\n\n let spec = load_spec_by_name(name);\n\n let consensus = spec.build_consensus().expect(\"build consensus\");\n\n let verifier = GenesisVerifier::new();\n\n verifier.verify(&consensus).expect(\"pass verification\");\n\n }\n\n}\n", "file_path": "verification/src/tests/genesis_verifier.rs", "rank": 77, "score": 193243.7133304944 }, { "content": "#[test]\n\npub fn test_default_genesis() {\n\n let consensus = ConsensusBuilder::default().build();\n\n let verifier = GenesisVerifier::new();\n\n verifier.verify(&consensus).expect(\"pass verification\");\n\n}\n\n\n", "file_path": "verification/src/tests/genesis_verifier.rs", "rank": 78, "score": 193243.7133304944 }, { "content": "#[test]\n\npub fn test_capacity_outofbound() {\n\n let data = Bytes::from(vec![1; 51]);\n\n let transaction = TransactionBuilder::default()\n\n .output(\n\n CellOutput::new_builder()\n\n .capacity(capacity_bytes!(50).pack())\n\n .build(),\n\n )\n\n .output_data(data.pack())\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction,\n\n resolved_cell_deps: Vec::new(),\n\n resolved_inputs: vec![CellMetaBuilder::from_cell_output(\n\n CellOutput::new_builder()\n\n .capacity(capacity_bytes!(50).pack())\n\n .build(),\n\n Bytes::new(),\n\n )\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 79, "score": 193243.7133304944 }, { "content": "#[test]\n\npub fn test_relative_epoch() {\n\n // next epoch\n\n let tx = create_tx_with_lock(0xa000_1000_0000_0002);\n\n let rtx = create_resolve_tx_with_transaction_info(\n\n &tx,\n\n MockMedianTime::get_transaction_info(1, EpochNumberWithFraction::new(0, 0, 10), 1),\n\n );\n\n\n\n let median_time_context = MockMedianTime::new(vec![0; 11]);\n\n\n\n assert_error_eq!(\n\n verify_since(&rtx, &median_time_context, 4, 1).unwrap_err(),\n\n TransactionError::Immature,\n\n );\n\n\n\n assert!(verify_since(&rtx, &median_time_context, 4, 2).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 80, "score": 193243.7133304944 }, { "content": "#[test]\n\npub fn test_duplicate_deps() {\n\n let out_point = OutPoint::new(h256!(\"0x1\").pack(), 0);\n\n let cell_dep = CellDep::new_builder().out_point(out_point).build();\n\n let transaction = TransactionBuilder::default()\n\n .cell_deps(vec![cell_dep.clone(), cell_dep])\n\n .build();\n\n\n\n let verifier = DuplicateDepsVerifier::new(&transaction);\n\n\n\n assert_error_eq!(\n\n verifier.verify().unwrap_err(),\n\n TransactionError::DuplicateDeps,\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 81, "score": 193243.7133304944 }, { "content": "pub fn gen_always_success_block(\n\n blocks: &mut Vec<BlockView>,\n\n p_block: &BlockView,\n\n shared: &Shared,\n\n) -> BlockView {\n\n let tx = create_always_success_tx();\n\n let always_success_out_point = OutPoint::new(tx.hash(), 0);\n\n let (_, _, always_success_script) = always_success_cell();\n\n let (number, timestamp) = (\n\n p_block.header().number() + 1,\n\n p_block.header().timestamp() + 10000,\n\n );\n\n let cellbase = create_always_success_cellbase(shared, &p_block.header());\n\n\n\n let snapshot = shared.snapshot();\n\n\n\n // spent n-2 block's tx and proposal n-1 block's tx\n\n let transactions: Vec<TransactionView> = if blocks.len() > 1 {\n\n let pp_block = snapshot\n\n .get_block(&p_block.data().header().raw().parent_hash())\n", "file_path": "benches/benches/benchmarks/util.rs", "rank": 82, "score": 193179.2608550121 }, { "content": "pub fn build_headers(headers: &[HeaderView]) -> Bytes {\n\n let send_headers = SendHeaders::new_builder()\n\n .headers(\n\n headers\n\n .iter()\n\n .map(|view| view.data())\n\n .collect::<Vec<_>>()\n\n .pack(),\n\n )\n\n .build();\n\n\n\n SyncMessage::new_builder()\n\n .set(send_headers)\n\n .build()\n\n .as_bytes()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 83, "score": 193157.04689571896 }, { "content": "pub fn new_executor<F, R>(block: F) -> (Executor, thread::JoinHandle<()>)\n\nwhere\n\n F: FnOnce(Executor) -> R + Send + 'static,\n\n R: Future<Item = (), Error = ()> + Send + 'static,\n\n{\n\n let (tx, rx) = crossbeam_channel::bounded(1);\n\n let handler = thread::Builder::new()\n\n .spawn(move || {\n\n let mut entered = enter().expect(\"nested tokio::run\");\n\n let mut runtime = Builder::new()\n\n .core_threads(num_cpus::get())\n\n .name_prefix(\"GlobalRuntime-\")\n\n .build()\n\n .expect(\"Global tokio runtime init\");\n\n\n\n let executor = runtime.executor();\n\n let future = block(executor.clone());\n\n tx.send(executor).expect(\"Send global tokio runtime\");\n\n\n\n runtime.spawn(future);\n\n entered\n\n .block_on(runtime.shutdown_on_idle())\n\n .expect(\"shutdown cannot error\")\n\n })\n\n .expect(\"Start Global tokio runtime\");\n\n let executor = rx.recv().expect(\"Recv global tokio runtime\");\n\n (executor, handler)\n\n}\n", "file_path": "util/future-executor/src/lib.rs", "rank": 84, "score": 192313.4089289848 }, { "content": "pub fn build_relay_tx_hashes(hashes: &[Byte32]) -> Bytes {\n\n let content = RelayTransactionHashes::new_builder()\n\n .tx_hashes(hashes.iter().map(ToOwned::to_owned).pack())\n\n .build();\n\n\n\n RelayMessage::new_builder().set(content).build().as_bytes()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 85, "score": 191547.01405015984 }, { "content": "pub fn build_chain(tip: BlockNumber) -> (SyncShared, ChainController) {\n\n let (shared, table) = SharedBuilder::default()\n\n .consensus(always_success_consensus())\n\n .build()\n\n .unwrap();\n\n let chain_controller = {\n\n let chain_service = ChainService::new(shared.clone(), table);\n\n chain_service.start::<&str>(None)\n\n };\n\n generate_blocks(&shared, &chain_controller, tip);\n\n let sync_shared = SyncShared::new(shared);\n\n (sync_shared, chain_controller)\n\n}\n\n\n", "file_path": "sync/src/tests/util.rs", "rank": 86, "score": 191347.3324693049 }, { "content": "pub fn new_blake2b() -> Blake2b {\n\n Blake2bBuilder::new(32)\n\n .personal(CKB_HASH_PERSONALIZATION)\n\n .build()\n\n}\n\n\n", "file_path": "util/hash/src/lib.rs", "rank": 87, "score": 190309.2231100551 }, { "content": "fn gen_block(parent_header: &HeaderView, nonce: u128, epoch: &EpochExt) -> BlockView {\n\n let number = parent_header.number() + 1;\n\n let cellbase = create_cellbase(number, epoch);\n\n // This just make sure we can generate a valid block template,\n\n // the actual DAO validation logic will be ensured in other\n\n // tests\n\n let dao = genesis_dao_data(vec![&cellbase]).unwrap();\n\n let header = HeaderBuilder::default()\n\n .parent_hash(parent_header.hash())\n\n .timestamp((parent_header.timestamp() + 10).pack())\n\n .number(number.pack())\n\n .epoch(epoch.number().pack())\n\n .compact_target(epoch.compact_target().pack())\n\n .nonce(nonce.pack())\n\n .dao(dao)\n\n .build();\n\n\n\n BlockBuilder::default()\n\n .header(header)\n\n .transaction(cellbase)\n\n .proposal([1; 10].pack())\n\n .build_unchecked()\n\n}\n\n\n", "file_path": "chain/src/tests/block_assembler.rs", "rank": 88, "score": 190210.82457568412 }, { "content": "#[test]\n\npub fn test_inputs_cellbase_maturity() {\n\n let transaction = TransactionBuilder::default().build();\n\n let output = CellOutput::new_builder()\n\n .capacity(capacity_bytes!(50).pack())\n\n .build();\n\n let base_epoch = EpochNumberWithFraction::new(10, 0, 10);\n\n let cellbase_maturity = EpochNumberWithFraction::new(5, 0, 1);\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction,\n\n resolved_cell_deps: Vec::new(),\n\n resolved_dep_groups: Vec::new(),\n\n resolved_inputs: vec![CellMetaBuilder::from_cell_output(output, Bytes::new())\n\n .transaction_info(MockMedianTime::get_transaction_info(30, base_epoch, 0))\n\n .build()],\n\n };\n\n\n\n let mut current_epoch = EpochNumberWithFraction::new(0, 0, 10);\n\n let threshold = cellbase_maturity.to_rational() + base_epoch.to_rational();\n\n while current_epoch.number() < cellbase_maturity.number() + base_epoch.number() + 5 {\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 89, "score": 189337.3655137908 }, { "content": "#[test]\n\npub fn test_deps_cellbase_maturity() {\n\n let transaction = TransactionBuilder::default().build();\n\n let output = CellOutput::new_builder()\n\n .capacity(capacity_bytes!(50).pack())\n\n .build();\n\n\n\n let base_epoch = EpochNumberWithFraction::new(0, 0, 10);\n\n let cellbase_maturity = EpochNumberWithFraction::new(5, 0, 1);\n\n\n\n // The 1st dep is cellbase, the 2nd one is not.\n\n let rtx = ResolvedTransaction {\n\n transaction,\n\n resolved_cell_deps: vec![\n\n CellMetaBuilder::from_cell_output(output.clone(), Bytes::new())\n\n .transaction_info(MockMedianTime::get_transaction_info(30, base_epoch, 0))\n\n .build(),\n\n CellMetaBuilder::from_cell_output(output, Bytes::new())\n\n .transaction_info(MockMedianTime::get_transaction_info(40, base_epoch, 1))\n\n .build(),\n\n ],\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 90, "score": 189337.3655137908 }, { "content": "#[test]\n\npub fn test_relative_timestamp_lock() {\n\n // relative lock timestamp lock\n\n let tx = create_tx_with_lock(0xc000_0000_0000_0002);\n\n let rtx = create_resolve_tx_with_transaction_info(\n\n &tx,\n\n MockMedianTime::get_transaction_info(1, EpochNumberWithFraction::new(0, 0, 10), 1),\n\n );\n\n\n\n let median_time_context = MockMedianTime::new(vec![0; 11]);\n\n assert_error_eq!(\n\n verify_since(&rtx, &median_time_context, 4, 1).unwrap_err(),\n\n TransactionError::Immature,\n\n );\n\n\n\n // spent after 1024 seconds\n\n // fake median time: 1124\n\n let median_time_context =\n\n MockMedianTime::new(vec![0, 100_000, 1_124_000, 2_000_000, 3_000_000]);\n\n assert!(verify_since(&rtx, &median_time_context, 4, 1).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 91, "score": 189337.3655137908 }, { "content": "fn prepare() -> (Shared, Vec<BlockView>, Vec<BlockView>) {\n\n let faketime_file = faketime::millis_tempfile(0).expect(\"create faketime file\");\n\n faketime::enable(&faketime_file);\n\n\n\n let mut consensus = Consensus::default();\n\n consensus.max_block_proposals_limit = 3;\n\n consensus.genesis_epoch_ext.set_length(10);\n\n\n\n let (chain_controller, shared) = start_chain(Some(consensus));\n\n\n\n let number = 20;\n\n let mut chain1: Vec<BlockView> = Vec::new();\n\n let mut chain2: Vec<BlockView> = Vec::new();\n\n\n\n faketime::write_millis(&faketime_file, 10).expect(\"write millis\");\n\n\n\n let genesis = shared\n\n .store()\n\n .get_block_header(&shared.store().get_block_hash(0).unwrap())\n\n .unwrap();\n", "file_path": "verification/src/tests/uncle_verifier.rs", "rank": 92, "score": 188702.36760238936 }, { "content": "fn setup_env() -> (ChainController, Shared, Byte32, Script, OutPoint) {\n\n let (always_success_cell, always_success_cell_data, always_success_script) =\n\n always_success_cell();\n\n let tx = TransactionBuilder::default()\n\n .witness(always_success_script.clone().into_witness())\n\n .input(CellInput::new(OutPoint::null(), 0))\n\n .output(always_success_cell.clone())\n\n .outputs(vec![\n\n CellOutputBuilder::default()\n\n .capacity(capacity_bytes!(1_000_000).pack())\n\n .lock(always_success_script.clone())\n\n .type_(Some(always_success_script.clone()).pack())\n\n .build();\n\n 100\n\n ])\n\n .output_data(always_success_cell_data.to_owned().pack())\n\n .outputs_data(vec![Bytes::new().pack(); 100])\n\n .build();\n\n let tx_hash = tx.data().calc_tx_hash();\n\n let genesis_block = BlockBuilder::default().transaction(tx).build();\n", "file_path": "verification/src/tests/two_phase_commit_verifier.rs", "rank": 93, "score": 188102.16963724614 }, { "content": "struct BlockTxsVerifier<'a, CS> {\n\n context: &'a VerifyContext<'a, CS>,\n\n block_number: BlockNumber,\n\n epoch_number_with_fraction: EpochNumberWithFraction,\n\n parent_hash: Byte32,\n\n resolved: &'a [ResolvedTransaction],\n\n}\n\n\n\nimpl<'a, CS: ChainStore<'a>> BlockTxsVerifier<'a, CS> {\n\n #[allow(clippy::too_many_arguments)]\n\n pub fn new(\n\n context: &'a VerifyContext<'a, CS>,\n\n block_number: BlockNumber,\n\n epoch_number_with_fraction: EpochNumberWithFraction,\n\n parent_hash: Byte32,\n\n resolved: &'a [ResolvedTransaction],\n\n ) -> Self {\n\n BlockTxsVerifier {\n\n context,\n\n block_number,\n", "file_path": "verification/src/contextual_block_verifier.rs", "rank": 94, "score": 187329.74317413397 }, { "content": "#[test]\n\npub fn test_outputs_data_length_mismatch() {\n\n let transaction = TransactionBuilder::default()\n\n .output(Default::default())\n\n .build();\n\n let verifier = OutputsDataVerifier::new(&transaction);\n\n\n\n assert_error_eq!(\n\n verifier.verify().unwrap_err(),\n\n TransactionError::OutputsDataLengthMismatch,\n\n );\n\n\n\n let transaction = TransactionBuilder::default()\n\n .output(Default::default())\n\n .output_data(Default::default())\n\n .build();\n\n let verifier = OutputsDataVerifier::new(&transaction);\n\n\n\n assert!(verifier.verify().is_ok());\n\n}\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 95, "score": 185659.46132200328 }, { "content": "#[test]\n\npub fn test_skip_dao_capacity_check() {\n\n let dao_type_script = build_genesis_type_id_script(OUTPUT_INDEX_DAO);\n\n let transaction = TransactionBuilder::default()\n\n .output(\n\n CellOutput::new_builder()\n\n .capacity(capacity_bytes!(500).pack())\n\n .type_(Some(dao_type_script.clone()).pack())\n\n .build(),\n\n )\n\n .output_data(Bytes::new().pack())\n\n .build();\n\n\n\n let rtx = ResolvedTransaction {\n\n transaction,\n\n resolved_cell_deps: Vec::new(),\n\n resolved_inputs: vec![],\n\n resolved_dep_groups: vec![],\n\n };\n\n let verifier = CapacityVerifier::new(&rtx, Some(dao_type_script.calc_script_hash()));\n\n\n\n assert!(verifier.verify().is_ok());\n\n}\n\n\n\n// inputs immature verify\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 96, "score": 185659.46132200328 }, { "content": "#[test]\n\npub fn test_absolute_epoch_number_lock() {\n\n // absolute lock until epoch number 0xa\n\n let tx = create_tx_with_lock(0x2000_0100_0000_000a);\n\n let rtx = create_resolve_tx_with_transaction_info(\n\n &tx,\n\n MockMedianTime::get_transaction_info(1, EpochNumberWithFraction::new(0, 0, 10), 1),\n\n );\n\n\n\n let median_time_context = MockMedianTime::new(vec![0; 11]);\n\n assert_error_eq!(\n\n verify_since(&rtx, &median_time_context, 5, 1).unwrap_err(),\n\n TransactionError::Immature,\n\n );\n\n // spent after 10 epoch\n\n assert!(verify_since(&rtx, &median_time_context, 100, 10).is_ok());\n\n}\n\n\n", "file_path": "verification/src/tests/transaction_verifier.rs", "rank": 97, "score": 185659.46132200328 }, { "content": "#[test]\n\npub fn test_genesis_non_zero_number() {\n\n let genesis_block = Consensus::default().genesis_block().to_owned();\n\n let genesis_block = genesis_block\n\n .as_advanced_builder()\n\n .number(42.pack())\n\n .build();\n\n let consensus = ConsensusBuilder::default()\n\n .genesis_block(genesis_block)\n\n .build();\n\n let verifier = GenesisVerifier::new();\n\n assert_error_eq!(\n\n verifier.verify(&consensus).unwrap_err(),\n\n NumberError {\n\n expected: 0,\n\n actual: 42\n\n },\n\n );\n\n}\n\n\n", "file_path": "verification/src/tests/genesis_verifier.rs", "rank": 98, "score": 185659.46132200328 }, { "content": "// Build compact block based on core block, and specific prefilled indices\n\npub fn build_compact_block_with_prefilled(block: &BlockView, prefilled: Vec<usize>) -> Bytes {\n\n let prefilled = prefilled.into_iter().collect();\n\n let compact_block = CompactBlock::build_from_block(block, &prefilled);\n\n\n\n RelayMessage::new_builder()\n\n .set(compact_block)\n\n .build()\n\n .as_bytes()\n\n}\n\n\n", "file_path": "test/src/utils.rs", "rank": 99, "score": 185508.9126452069 } ]
Rust
src/linux/mod.rs
8176135/InputBot
e54b3a04cccbcdead0141a42688d2cf7e89fd6d4
use crate::{common::*, linux::inputs::*, public::*}; use input::{ event::{ keyboard::{ KeyState, {KeyboardEvent, KeyboardEventTrait}, }, pointer::{ButtonState, PointerEvent::*}, Event::{self, *}, }, Libinput, LibinputInterface, }; use nix::{ fcntl::{open, OFlag}, sys::stat::Mode, unistd::close, }; use std::{ os::unix::io::RawFd, path::Path, thread::sleep, time::Duration, ptr::null, mem::MaybeUninit, }; use uinput::event::{controller::{Controller, Mouse}, Event as UinputEvent, relative::Position}; use x11::xlib::*; use once_cell::sync::Lazy; mod inputs; type ButtonStatesMap = HashMap<MouseButton, bool>; type KeyStatesMap = HashMap<KeybdKey, bool>; static BUTTON_STATES: Lazy<Mutex<ButtonStatesMap>> = Lazy::new(|| Mutex::new(ButtonStatesMap::new())); static KEY_STATES: Lazy<Mutex<KeyStatesMap>> = Lazy::new(|| Mutex::new(KeyStatesMap::new())); static SEND_DISPLAY: Lazy<AtomicPtr<Display>> = Lazy::new(|| { unsafe { XInitThreads() }; AtomicPtr::new(unsafe { XOpenDisplay(null()) }) }); static FAKE_DEVICE: Lazy<Mutex<uinput::Device>> = Lazy::new(|| { Mutex::new( uinput::default() .unwrap() .name("inputbot") .unwrap() .event(uinput::event::Keyboard::All) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Left))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Right))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Middle))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Side))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Extra))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Forward))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Back))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Task))) .unwrap() .event(Position::X) .unwrap() .event(Position::Y) .unwrap() .create() .unwrap(), ) }); pub fn init_device() { FAKE_DEVICE.lock().unwrap(); } impl KeybdKey { pub fn is_pressed(self) -> bool { *KEY_STATES.lock().unwrap().entry(self).or_insert(false) } pub fn press(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device .write(0x01, key_to_scan_code(self), 1) .unwrap(); device.synchronize().unwrap(); } pub fn release(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device .write(0x01, key_to_scan_code(self), 0) .unwrap(); device.synchronize().unwrap(); } pub fn is_toggled(self) -> bool { if let Some(key) = match self { KeybdKey::ScrollLockKey => Some(4), KeybdKey::NumLockKey => Some(2), KeybdKey::CapsLockKey => Some(1), _ => None, } { let mut state: XKeyboardState = unsafe { MaybeUninit::zeroed().assume_init() }; SEND_DISPLAY.with(|display| unsafe { XGetKeyboardControl(display, &mut state); }); state.led_mask & key != 0 } else { false } } } impl MouseButton { pub fn is_pressed(self) -> bool { *BUTTON_STATES.lock().unwrap().entry(self).or_insert(false) } pub fn press(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device.press(&Controller::Mouse(Mouse::from(self))).unwrap(); device.synchronize().unwrap(); } pub fn release(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device.release(&Controller::Mouse(Mouse::from(self))).unwrap(); device.synchronize().unwrap(); } } impl MouseCursor { pub fn move_rel(x: i32, y: i32) { SEND_DISPLAY.with(|display| unsafe { XWarpPointer(display, 0, 0, 0, 0, 0, 0, x, y); }); } pub fn move_abs(x: i32, y: i32) { SEND_DISPLAY.with(|display| unsafe { XWarpPointer(display, 0, XRootWindow(display, XDefaultScreen(display)), 0, 0, 0, 0, x, y); }); } } impl MouseWheel { pub fn scroll_ver(y: i32) { if y < 0 { MouseButton::OtherButton(4).press(); MouseButton::OtherButton(4).release(); } else { MouseButton::OtherButton(5).press(); MouseButton::OtherButton(5).release(); } } pub fn scroll_hor(x: i32) { if x < 0 { MouseButton::OtherButton(6).press(); MouseButton::OtherButton(6).release(); } else { MouseButton::OtherButton(7).press(); MouseButton::OtherButton(7).release(); } } } struct LibinputInterfaceRaw; impl LibinputInterfaceRaw { fn seat(&self) -> String { String::from("seat0") } } impl LibinputInterface for LibinputInterfaceRaw { fn open_restricted(&mut self, path: &Path, flags: i32) -> std::result::Result<RawFd, i32> { if let Ok(fd) = open(path, OFlag::from_bits_truncate(flags), Mode::empty()) { Ok(fd) } else { Err(1) } } fn close_restricted(&mut self, fd: RawFd) { let _ = close(fd); } } pub fn handle_input_events() { let mut libinput_context = Libinput::new_with_udev(LibinputInterfaceRaw); libinput_context .udev_assign_seat(&LibinputInterfaceRaw.seat()) .unwrap(); while !MOUSE_BINDS.lock().unwrap().is_empty() || !KEYBD_BINDS.lock().unwrap().is_empty() { libinput_context.dispatch().unwrap(); while let Some(event) = libinput_context.next() { handle_input_event(event); } sleep(Duration::from_millis(10)); } } fn handle_input_event(event: Event) { match event { Keyboard(keyboard_event) => { let KeyboardEvent::Key(keyboard_key_event) = keyboard_event; let key = keyboard_key_event.key(); if let Some(keybd_key) = scan_code_to_key(key) { if keyboard_key_event.key_state() == KeyState::Pressed { KEY_STATES.lock().unwrap().insert(keybd_key, true); if let Some(Bind::NormalBind(cb)) = KEYBD_BINDS.lock().unwrap().get(&keybd_key) { let cb = Arc::clone(cb); spawn(move || cb()); }; } else { KEY_STATES.lock().unwrap().insert(keybd_key, false); } } } Pointer(pointer_event) => { if let Button(button_event) = pointer_event { let button = button_event.button(); if let Some(mouse_button) = match button { 272 => Some(MouseButton::LeftButton), 273 => Some(MouseButton::RightButton), 274 => Some(MouseButton::MiddleButton), 275 => Some(MouseButton::X1Button), 276 => Some(MouseButton::X2Button), _ => None, } { if button_event.button_state() == ButtonState::Pressed { BUTTON_STATES.lock().unwrap().insert(mouse_button, true); if let Some(Bind::NormalBind(cb)) = MOUSE_BINDS.lock().unwrap().get(&mouse_button) { let cb = Arc::clone(cb); spawn(move || cb()); }; } else { BUTTON_STATES.lock().unwrap().insert(mouse_button, false); } } } } _ => {} } } trait DisplayAcquirable { fn with<F, Z>(&self, cb: F) -> Z where F: FnOnce(*mut Display) -> Z; } impl DisplayAcquirable for AtomicPtr<Display> { fn with<F, Z>(&self, cb: F) -> Z where F: FnOnce(*mut Display) -> Z, { let display = self.load(Ordering::Relaxed); unsafe { XLockDisplay(display); }; let cb_result = cb(display); unsafe { XFlush(display); XUnlockDisplay(display); }; cb_result } }
use crate::{common::*, linux::inputs::*, public::*}; use input::{ event::{ keyboard::{ KeyState, {KeyboardEvent, KeyboardEventTrait}, }, pointer::{ButtonState, PointerEvent::*}, Event::{self, *}, }, Libinput, LibinputInterface, }; use nix::{ fcntl::{open, OFlag}, sys::stat::Mode, unistd::close, }; use std::{ os::unix::io::RawFd, path::Path, thread::sleep, time::Duration, ptr::null, mem::MaybeUninit, }; use uinput::event::{controller::{Controller, Mouse}, Event as UinputEvent, relative::Position}; use x11::xlib::*; use once_cell::sync::Lazy; mod inputs; type ButtonStatesMap = HashMap<MouseButton, bool>; type KeyStatesMap = HashMap<KeybdKey, bool>; static BUTTON_STATES: Lazy<Mutex<ButtonStatesMap>> = Lazy::new(|| Mutex::new(ButtonStatesMap::new())); static KEY_STATES: Lazy<Mutex<KeyStatesMap>> = Lazy::new(|| Mutex::new(KeyStatesMap::new())); static SEND_DISPLAY: Lazy<AtomicPtr<Display>> = Lazy::new(|| { unsafe { XInitThreads() }; AtomicPtr::new(unsafe { XOpenDisplay(null()) }) }); static FAKE_DEVICE: Lazy<Mutex<uinput::Device>> = Lazy::new(|| { Mutex::new( uinput::default() .unwrap() .name("inputbot") .unwrap() .event(uinput::event::Keyboard::All) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Left))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Right))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Middle))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Side))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Extra))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Forward))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Back))) .unwrap() .event(UinputEvent::Controller(Controller::Mouse(Mouse::Task))) .unwrap() .event(Position::X) .unwrap() .event(Position::Y) .unwrap() .create() .unwrap(), ) }); pub fn init_device() { FAKE_DEVICE.lock().unwrap(); } impl KeybdKey { pub fn is_pressed(self) -> bool { *KEY_STATES.lock().unwrap().entry(self).or_insert(false) } pub fn pre
&LibinputInterfaceRaw.seat()) .unwrap(); while !MOUSE_BINDS.lock().unwrap().is_empty() || !KEYBD_BINDS.lock().unwrap().is_empty() { libinput_context.dispatch().unwrap(); while let Some(event) = libinput_context.next() { handle_input_event(event); } sleep(Duration::from_millis(10)); } } fn handle_input_event(event: Event) { match event { Keyboard(keyboard_event) => { let KeyboardEvent::Key(keyboard_key_event) = keyboard_event; let key = keyboard_key_event.key(); if let Some(keybd_key) = scan_code_to_key(key) { if keyboard_key_event.key_state() == KeyState::Pressed { KEY_STATES.lock().unwrap().insert(keybd_key, true); if let Some(Bind::NormalBind(cb)) = KEYBD_BINDS.lock().unwrap().get(&keybd_key) { let cb = Arc::clone(cb); spawn(move || cb()); }; } else { KEY_STATES.lock().unwrap().insert(keybd_key, false); } } } Pointer(pointer_event) => { if let Button(button_event) = pointer_event { let button = button_event.button(); if let Some(mouse_button) = match button { 272 => Some(MouseButton::LeftButton), 273 => Some(MouseButton::RightButton), 274 => Some(MouseButton::MiddleButton), 275 => Some(MouseButton::X1Button), 276 => Some(MouseButton::X2Button), _ => None, } { if button_event.button_state() == ButtonState::Pressed { BUTTON_STATES.lock().unwrap().insert(mouse_button, true); if let Some(Bind::NormalBind(cb)) = MOUSE_BINDS.lock().unwrap().get(&mouse_button) { let cb = Arc::clone(cb); spawn(move || cb()); }; } else { BUTTON_STATES.lock().unwrap().insert(mouse_button, false); } } } } _ => {} } } trait DisplayAcquirable { fn with<F, Z>(&self, cb: F) -> Z where F: FnOnce(*mut Display) -> Z; } impl DisplayAcquirable for AtomicPtr<Display> { fn with<F, Z>(&self, cb: F) -> Z where F: FnOnce(*mut Display) -> Z, { let display = self.load(Ordering::Relaxed); unsafe { XLockDisplay(display); }; let cb_result = cb(display); unsafe { XFlush(display); XUnlockDisplay(display); }; cb_result } }
ss(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device .write(0x01, key_to_scan_code(self), 1) .unwrap(); device.synchronize().unwrap(); } pub fn release(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device .write(0x01, key_to_scan_code(self), 0) .unwrap(); device.synchronize().unwrap(); } pub fn is_toggled(self) -> bool { if let Some(key) = match self { KeybdKey::ScrollLockKey => Some(4), KeybdKey::NumLockKey => Some(2), KeybdKey::CapsLockKey => Some(1), _ => None, } { let mut state: XKeyboardState = unsafe { MaybeUninit::zeroed().assume_init() }; SEND_DISPLAY.with(|display| unsafe { XGetKeyboardControl(display, &mut state); }); state.led_mask & key != 0 } else { false } } } impl MouseButton { pub fn is_pressed(self) -> bool { *BUTTON_STATES.lock().unwrap().entry(self).or_insert(false) } pub fn press(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device.press(&Controller::Mouse(Mouse::from(self))).unwrap(); device.synchronize().unwrap(); } pub fn release(self) { let mut device = FAKE_DEVICE.lock().unwrap(); device.release(&Controller::Mouse(Mouse::from(self))).unwrap(); device.synchronize().unwrap(); } } impl MouseCursor { pub fn move_rel(x: i32, y: i32) { SEND_DISPLAY.with(|display| unsafe { XWarpPointer(display, 0, 0, 0, 0, 0, 0, x, y); }); } pub fn move_abs(x: i32, y: i32) { SEND_DISPLAY.with(|display| unsafe { XWarpPointer(display, 0, XRootWindow(display, XDefaultScreen(display)), 0, 0, 0, 0, x, y); }); } } impl MouseWheel { pub fn scroll_ver(y: i32) { if y < 0 { MouseButton::OtherButton(4).press(); MouseButton::OtherButton(4).release(); } else { MouseButton::OtherButton(5).press(); MouseButton::OtherButton(5).release(); } } pub fn scroll_hor(x: i32) { if x < 0 { MouseButton::OtherButton(6).press(); MouseButton::OtherButton(6).release(); } else { MouseButton::OtherButton(7).press(); MouseButton::OtherButton(7).release(); } } } struct LibinputInterfaceRaw; impl LibinputInterfaceRaw { fn seat(&self) -> String { String::from("seat0") } } impl LibinputInterface for LibinputInterfaceRaw { fn open_restricted(&mut self, path: &Path, flags: i32) -> std::result::Result<RawFd, i32> { if let Ok(fd) = open(path, OFlag::from_bits_truncate(flags), Mode::empty()) { Ok(fd) } else { Err(1) } } fn close_restricted(&mut self, fd: RawFd) { let _ = close(fd); } } pub fn handle_input_events() { let mut libinput_context = Libinput::new_with_udev(LibinputInterfaceRaw); libinput_context .udev_assign_seat(
random
[ { "content": "pub fn handle_input_events() {\n\n if !MOUSE_BINDS.lock().unwrap().is_empty() {\n\n set_hook(WH_MOUSE_LL, &*MOUSE_HHOOK, mouse_proc);\n\n };\n\n if !KEYBD_BINDS.lock().unwrap().is_empty() {\n\n set_hook(WH_KEYBOARD_LL, &*KEYBD_HHOOK, keybd_proc);\n\n };\n\n let mut msg: MSG = unsafe { MaybeUninit::zeroed().assume_init() };\n\n unsafe { GetMessageW(&mut msg, 0 as HWND, 0, 0) };\n\n}\n\n\n\nunsafe extern \"system\" fn keybd_proc(code: c_int, w_param: WPARAM, l_param: LPARAM) -> LRESULT {\n\n if KEYBD_BINDS.lock().unwrap().is_empty() {\n\n unset_hook(&*KEYBD_HHOOK);\n\n } else if w_param as u32 == WM_KEYDOWN {\n\n if let Some(bind) = KEYBD_BINDS\n\n .lock()\n\n .unwrap()\n\n .get_mut(&KeybdKey::from(u64::from(\n\n (*(l_param as *const KBDLLHOOKSTRUCT)).vkCode,\n", "file_path": "src/windows/mod.rs", "rank": 0, "score": 116876.79329026108 }, { "content": "pub fn from_keybd_key(k: KeybdKey) -> Option<char> {\n\n match k {\n\n KeybdKey::AKey => Some('a'),\n\n KeybdKey::BKey => Some('b'),\n\n KeybdKey::CKey => Some('c'),\n\n KeybdKey::DKey => Some('d'),\n\n KeybdKey::EKey => Some('e'),\n\n KeybdKey::FKey => Some('f'),\n\n KeybdKey::GKey => Some('g'),\n\n KeybdKey::HKey => Some('h'),\n\n KeybdKey::IKey => Some('i'),\n\n KeybdKey::JKey => Some('j'),\n\n KeybdKey::KKey => Some('k'),\n\n KeybdKey::LKey => Some('l'),\n\n KeybdKey::MKey => Some('m'),\n\n KeybdKey::NKey => Some('n'),\n\n KeybdKey::OKey => Some('o'),\n\n KeybdKey::PKey => Some('p'),\n\n KeybdKey::QKey => Some('q'),\n\n KeybdKey::RKey => Some('r'),\n", "file_path": "src/public.rs", "rank": 6, "score": 79451.43740766394 }, { "content": "pub fn get_keybd_key(c: char) -> Option<KeybdKey> {\n\n match c {\n\n ' ' => Some(KeybdKey::SpaceKey),\n\n 'A' | 'a' => Some(KeybdKey::AKey),\n\n 'B' | 'b' => Some(KeybdKey::BKey),\n\n 'C' | 'c' => Some(KeybdKey::CKey),\n\n 'D' | 'd' => Some(KeybdKey::DKey),\n\n 'E' | 'e' => Some(KeybdKey::EKey),\n\n 'F' | 'f' => Some(KeybdKey::FKey),\n\n 'G' | 'g' => Some(KeybdKey::GKey),\n\n 'H' | 'h' => Some(KeybdKey::HKey),\n\n 'I' | 'i' => Some(KeybdKey::IKey),\n\n 'J' | 'j' => Some(KeybdKey::JKey),\n\n 'K' | 'k' => Some(KeybdKey::KKey),\n\n 'L' | 'l' => Some(KeybdKey::LKey),\n\n 'M' | 'm' => Some(KeybdKey::MKey),\n\n 'N' | 'n' => Some(KeybdKey::NKey),\n\n 'O' | 'o' => Some(KeybdKey::OKey),\n\n 'P' | 'p' => Some(KeybdKey::PKey),\n\n 'Q' | 'q' => Some(KeybdKey::QKey),\n", "file_path": "src/public.rs", "rank": 7, "score": 77350.48779173967 }, { "content": "// https://www.win.tue.nl/~aeb/linux/kbd/scancodes-1.html\n\npub fn key_to_scan_code(key: KeybdKey) -> i32 {\n\n match key {\n\n BackspaceKey => 0x0e,\n\n TabKey => 0x0f,\n\n EnterKey => 0x1c,\n\n EscapeKey => 0x01,\n\n SpaceKey => 0x39,\n\n HomeKey => 0x47,\n\n LeftKey => 0x4b,\n\n UpKey => 0x48,\n\n RightKey => 0x4d,\n\n DownKey => 0x50,\n\n InsertKey => 0x52,\n\n DeleteKey => 0x53,\n\n Numrow0Key => 0x0b,\n\n Numrow1Key => 0x02,\n\n Numrow2Key => 0x03,\n\n Numrow3Key => 0x04,\n\n Numrow4Key => 0x05,\n\n Numrow5Key => 0x06,\n", "file_path": "src/linux/inputs.rs", "rank": 8, "score": 76346.40936036693 }, { "content": "// https://www.win.tue.nl/~aeb/linux/kbd/scancodes-1.html\n\npub fn scan_code_to_key(scan_code: u32) -> Option<KeybdKey> {\n\n match scan_code {\n\n 0x0e => Some(BackspaceKey),\n\n 0x0f => Some(TabKey),\n\n 0x1c => Some(EnterKey),\n\n 0x01 => Some(EscapeKey),\n\n 0x39 => Some(SpaceKey),\n\n 0x47 => Some(HomeKey),\n\n 0x4b => Some(LeftKey),\n\n 0x48 => Some(UpKey),\n\n 0x4d => Some(RightKey),\n\n 0x50 => Some(DownKey),\n\n 0x52 => Some(InsertKey),\n\n 0x53 => Some(DeleteKey),\n\n 0x0b => Some(Numrow0Key),\n\n 0x02 => Some(Numrow1Key),\n\n 0x03 => Some(Numrow2Key),\n\n 0x04 => Some(Numrow3Key),\n\n 0x05 => Some(Numrow4Key),\n\n 0x06 => Some(Numrow5Key),\n", "file_path": "src/linux/inputs.rs", "rank": 9, "score": 70783.10110227179 }, { "content": "fn send_keybd_input(flags: u32, key_code: KeybdKey) {\n\n let mut input = INPUT {\n\n type_: INPUT_KEYBOARD,\n\n u: unsafe {\n\n transmute_copy(&KEYBDINPUT {\n\n wVk: 0,\n\n wScan: MapVirtualKeyW(u64::from(key_code) as u32, 0) as u16,\n\n dwFlags: flags,\n\n time: 0,\n\n dwExtraInfo: 0,\n\n })\n\n },\n\n };\n\n unsafe { SendInput(1, &mut input as LPINPUT, size_of::<INPUT>() as c_int) };\n\n}\n", "file_path": "src/windows/mod.rs", "rank": 10, "score": 62711.834889889506 }, { "content": "fn send_mouse_input(flags: u32, data: u32, dx: i32, dy: i32) {\n\n let mut input = INPUT {\n\n type_: INPUT_MOUSE,\n\n u: unsafe {\n\n transmute_copy(&MOUSEINPUT {\n\n dx,\n\n dy,\n\n mouseData: data,\n\n dwFlags: flags,\n\n time: 0,\n\n dwExtraInfo: 0,\n\n })\n\n },\n\n };\n\n unsafe { SendInput(1, &mut input as LPINPUT, size_of::<INPUT>() as c_int) };\n\n}\n\n\n", "file_path": "src/windows/mod.rs", "rank": 11, "score": 61338.109407748234 }, { "content": "fn set_hook(\n\n hook_id: i32,\n\n hook_ptr: &AtomicPtr<HHOOK__>,\n\n hook_proc: unsafe extern \"system\" fn(c_int, WPARAM, LPARAM) -> LRESULT,\n\n) {\n\n hook_ptr.store(\n\n unsafe { SetWindowsHookExW(hook_id, Some(hook_proc), 0 as HINSTANCE, 0) },\n\n Ordering::Relaxed,\n\n );\n\n}\n\n\n", "file_path": "src/windows/mod.rs", "rank": 12, "score": 52355.24647762228 }, { "content": "fn unset_hook(hook_ptr: &AtomicPtr<HHOOK__>) {\n\n if !hook_ptr.load(Ordering::Relaxed).is_null() {\n\n unsafe { UnhookWindowsHookEx(hook_ptr.load(Ordering::Relaxed)) };\n\n hook_ptr.store(null_mut(), Ordering::Relaxed);\n\n }\n\n}\n\n\n", "file_path": "src/windows/mod.rs", "rank": 13, "score": 41027.19158019852 }, { "content": "fn main() {\n\n\n\n // Autorun for videogames.\n\n NumLockKey.bind(|| {\n\n while NumLockKey.is_toggled() {\n\n LShiftKey.press();\n\n WKey.press();\n\n sleep(Duration::from_millis(50));\n\n WKey.release();\n\n LShiftKey.release();\n\n }\n\n });\n\n\n\n // Rapidfire for videogames.\n\n RightButton.bind(|| {\n\n while RightButton.is_pressed() {\n\n LeftButton.press();\n\n sleep(Duration::from_millis(50));\n\n LeftButton.release();\n\n }\n", "file_path": "examples/test.rs", "rank": 15, "score": 35287.56104248592 }, { "content": "\n\nimpl MouseButton {\n\n pub fn bind<F: Fn() + Send + Sync + 'static>(self, callback: F) {\n\n MOUSE_BINDS\n\n .lock()\n\n .unwrap()\n\n .insert(self, Bind::NormalBind(Arc::new(callback)));\n\n }\n\n\n\n pub fn block_bind<F: Fn() + Send + Sync + 'static>(self, callback: F) {\n\n MOUSE_BINDS\n\n .lock()\n\n .unwrap()\n\n .insert(self, Bind::BlockBind(Arc::new(callback)));\n\n }\n\n\n\n pub fn blockable_bind<F: Fn() -> BlockInput + Send + Sync + 'static>(self, callback: F) {\n\n MOUSE_BINDS\n\n .lock()\n\n .unwrap()\n\n .insert(self, Bind::BlockableBind(Arc::new(callback)));\n\n }\n\n\n\n pub fn unbind(self) {\n\n MOUSE_BINDS.lock().unwrap().remove(&self);\n\n }\n\n}\n\n\n", "file_path": "src/public.rs", "rank": 16, "score": 24185.585385732928 }, { "content": "\n\nimpl KeybdKey {\n\n pub fn bind<F: Fn() + Send + Sync + 'static>(self, callback: F) {\n\n KEYBD_BINDS\n\n .lock()\n\n .unwrap()\n\n .insert(self, Bind::NormalBind(Arc::new(callback)));\n\n }\n\n\n\n pub fn block_bind<F: Fn() + Send + Sync + 'static>(self, callback: F) {\n\n KEYBD_BINDS\n\n .lock()\n\n .unwrap()\n\n .insert(self, Bind::BlockBind(Arc::new(callback)));\n\n }\n\n\n\n pub fn blockable_bind<F: Fn() -> BlockInput + Send + Sync + 'static>(self, callback: F) {\n\n KEYBD_BINDS\n\n .lock()\n\n .unwrap()\n", "file_path": "src/public.rs", "rank": 17, "score": 24183.4453763205 }, { "content": "use crate::common::*;\n\nuse std::{thread::sleep, time::Duration};\n\n\n\nuse strum::IntoEnumIterator;\n\nuse strum_macros::EnumIter;\n\n\n\npub enum BlockInput {\n\n Block,\n\n DontBlock,\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone, EnumIter)]\n\npub enum KeybdKey {\n\n BackspaceKey,\n\n TabKey,\n\n EnterKey,\n\n EscapeKey,\n\n SpaceKey,\n\n HomeKey,\n\n LeftKey,\n", "file_path": "src/public.rs", "rank": 18, "score": 24178.655033358937 }, { "content": " .insert(self, Bind::BlockableBind(Arc::new(callback)));\n\n }\n\n\n\n pub fn bind_all<F: Fn(KeybdKey) + Send + Sync + Copy + 'static>(callback: F) {\n\n for key in KeybdKey::iter() {\n\n let fire = move || {\n\n callback(key);\n\n };\n\n\n\n KEYBD_BINDS\n\n .lock()\n\n .unwrap()\n\n .insert(key, Bind::NormalBind(Arc::new(fire)));\n\n }\n\n }\n\n\n\n pub fn unbind(self) {\n\n KEYBD_BINDS.lock().unwrap().remove(&self);\n\n }\n\n}\n", "file_path": "src/public.rs", "rank": 19, "score": 24178.44749075751 }, { "content": " LControlKey,\n\n RControlKey,\n\n\n\n #[strum(disabled)]\n\n OtherKey(u64),\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)]\n\npub enum MouseButton {\n\n LeftButton,\n\n MiddleButton,\n\n RightButton,\n\n X1Button,\n\n X2Button,\n\n OtherButton(u32),\n\n}\n\n\n\npub struct MouseCursor;\n\n\n\npub struct MouseWheel;\n", "file_path": "src/public.rs", "rank": 20, "score": 24175.705233867968 }, { "content": " 'R' | 'r' => Some(KeybdKey::RKey),\n\n 'S' | 's' => Some(KeybdKey::SKey),\n\n 'T' | 't' => Some(KeybdKey::TKey),\n\n 'U' | 'u' => Some(KeybdKey::UKey),\n\n 'V' | 'v' => Some(KeybdKey::VKey),\n\n 'W' | 'w' => Some(KeybdKey::WKey),\n\n 'X' | 'x' => Some(KeybdKey::XKey),\n\n 'Y' | 'y' => Some(KeybdKey::YKey),\n\n 'Z' | 'z' => Some(KeybdKey::ZKey),\n\n _ => None,\n\n }\n\n}\n\n\n\npub struct KeySequence(pub &'static str);\n\n\n\nimpl KeySequence {\n\n pub fn send(&self) {\n\n for c in self.0.chars() {\n\n let mut uppercase = false;\n\n if let Some(keybd_key) = {\n", "file_path": "src/public.rs", "rank": 21, "score": 24175.662197754005 }, { "content": " UpKey,\n\n RightKey,\n\n DownKey,\n\n InsertKey,\n\n DeleteKey,\n\n Numrow0Key,\n\n Numrow1Key,\n\n Numrow2Key,\n\n Numrow3Key,\n\n Numrow4Key,\n\n Numrow5Key,\n\n Numrow6Key,\n\n Numrow7Key,\n\n Numrow8Key,\n\n Numrow9Key,\n\n AKey,\n\n BKey,\n\n CKey,\n\n DKey,\n\n EKey,\n", "file_path": "src/public.rs", "rank": 22, "score": 24170.368769820052 }, { "content": " F10Key,\n\n F11Key,\n\n F12Key,\n\n F13Key,\n\n F14Key,\n\n F15Key,\n\n F16Key,\n\n F17Key,\n\n F18Key,\n\n F19Key,\n\n F20Key,\n\n F21Key,\n\n F22Key,\n\n F23Key,\n\n F24Key,\n\n NumLockKey,\n\n ScrollLockKey,\n\n CapsLockKey,\n\n LShiftKey,\n\n RShiftKey,\n", "file_path": "src/public.rs", "rank": 23, "score": 24170.368769820052 }, { "content": " KeybdKey::SKey => Some('s'),\n\n KeybdKey::TKey => Some('t'),\n\n KeybdKey::UKey => Some('u'),\n\n KeybdKey::VKey => Some('v'),\n\n KeybdKey::WKey => Some('w'),\n\n KeybdKey::XKey => Some('x'),\n\n KeybdKey::YKey => Some('y'),\n\n KeybdKey::ZKey => Some('z'),\n\n KeybdKey::Numpad0Key => Some('0'),\n\n KeybdKey::Numpad1Key => Some('1'),\n\n KeybdKey::Numpad2Key => Some('2'),\n\n KeybdKey::Numpad3Key => Some('3'),\n\n KeybdKey::Numpad4Key => Some('4'),\n\n KeybdKey::Numpad5Key => Some('5'),\n\n KeybdKey::Numpad6Key => Some('6'),\n\n KeybdKey::Numpad7Key => Some('7'),\n\n KeybdKey::Numpad8Key => Some('8'),\n\n KeybdKey::Numpad9Key => Some('9'),\n\n KeybdKey::Numrow0Key => Some('0'),\n\n KeybdKey::Numrow1Key => Some('1'),\n", "file_path": "src/public.rs", "rank": 24, "score": 24170.368769820052 }, { "content": " if c.is_uppercase() {\n\n uppercase = true;\n\n }\n\n get_keybd_key(c)\n\n } {\n\n if uppercase {\n\n KeybdKey::LShiftKey.press();\n\n }\n\n keybd_key.press();\n\n sleep(Duration::from_millis(20));\n\n keybd_key.release();\n\n if uppercase {\n\n KeybdKey::LShiftKey.release();\n\n }\n\n };\n\n }\n\n }\n\n}", "file_path": "src/public.rs", "rank": 25, "score": 24170.368769820052 }, { "content": " FKey,\n\n GKey,\n\n HKey,\n\n IKey,\n\n JKey,\n\n KKey,\n\n LKey,\n\n MKey,\n\n NKey,\n\n OKey,\n\n PKey,\n\n QKey,\n\n RKey,\n\n SKey,\n\n TKey,\n\n UKey,\n\n VKey,\n\n WKey,\n\n XKey,\n\n YKey,\n", "file_path": "src/public.rs", "rank": 26, "score": 24170.368769820052 }, { "content": " KeybdKey::Numrow2Key => Some('2'),\n\n KeybdKey::Numrow3Key => Some('3'),\n\n KeybdKey::Numrow4Key => Some('4'),\n\n KeybdKey::Numrow5Key => Some('5'),\n\n KeybdKey::Numrow6Key => Some('6'),\n\n KeybdKey::Numrow7Key => Some('7'),\n\n KeybdKey::Numrow8Key => Some('8'),\n\n KeybdKey::Numrow9Key => Some('9'),\n\n _ => None\n\n }\n\n}\n\n\n", "file_path": "src/public.rs", "rank": 27, "score": 24170.368769820052 }, { "content": " ZKey,\n\n Numpad0Key,\n\n Numpad1Key,\n\n Numpad2Key,\n\n Numpad3Key,\n\n Numpad4Key,\n\n Numpad5Key,\n\n Numpad6Key,\n\n Numpad7Key,\n\n Numpad8Key,\n\n Numpad9Key,\n\n F1Key,\n\n F2Key,\n\n F3Key,\n\n F4Key,\n\n F5Key,\n\n F6Key,\n\n F7Key,\n\n F8Key,\n\n F9Key,\n", "file_path": "src/public.rs", "rank": 28, "score": 24170.368769820052 }, { "content": "use crate::{common::*, public::*};\n\nuse std::{\n\n mem::{size_of, transmute_copy, MaybeUninit},\n\n ptr::null_mut,\n\n sync::atomic::AtomicPtr,\n\n};\n\nuse winapi::{\n\n ctypes::*,\n\n shared::{minwindef::*, windef::*},\n\n um::winuser::*,\n\n};\n\nuse once_cell::sync::Lazy;\n\n\n\nmod inputs;\n\n\n\nstatic KEYBD_HHOOK: Lazy<AtomicPtr<HHOOK__>> = Lazy::new(AtomicPtr::default);\n\nstatic MOUSE_HHOOK: Lazy<AtomicPtr<HHOOK__>> = Lazy::new(AtomicPtr::default);\n\n\n\nimpl KeybdKey {\n\n pub fn is_pressed(self) -> bool {\n", "file_path": "src/windows/mod.rs", "rank": 30, "score": 21644.864579142635 }, { "content": " (unsafe { GetAsyncKeyState(u64::from(self) as i32) } >> 15) != 0\n\n }\n\n\n\n pub fn is_toggled(self) -> bool {\n\n unsafe { GetKeyState(u64::from(self) as i32) & 15 != 0 }\n\n }\n\n\n\n pub fn press(self) {\n\n send_keybd_input(KEYEVENTF_SCANCODE, self);\n\n }\n\n\n\n pub fn release(self) {\n\n send_keybd_input(KEYEVENTF_SCANCODE | KEYEVENTF_KEYUP, self);\n\n }\n\n}\n\n\n\nimpl MouseButton {\n\n pub fn is_pressed(self) -> bool {\n\n (unsafe { GetAsyncKeyState(u32::from(self) as i32) } >> 15) != 0\n\n }\n", "file_path": "src/windows/mod.rs", "rank": 31, "score": 21643.565019381283 }, { "content": "}\n\n\n\nimpl MouseWheel {\n\n pub fn scroll_ver(dwheel: i32) {\n\n send_mouse_input(MOUSEEVENTF_WHEEL, (dwheel * 120) as u32, 0, 0);\n\n }\n\n\n\n pub fn scroll_hor(dwheel: i32) {\n\n send_mouse_input(MOUSEEVENTF_HWHEEL, (dwheel * 120) as u32, 0, 0);\n\n }\n\n}\n\n\n", "file_path": "src/windows/mod.rs", "rank": 35, "score": 21639.976649597225 }, { "content": "\n\n pub fn press(self) {\n\n match self {\n\n MouseButton::LeftButton => send_mouse_input(MOUSEEVENTF_LEFTDOWN, 0, 0, 0),\n\n MouseButton::RightButton => send_mouse_input(MOUSEEVENTF_RIGHTDOWN, 0, 0, 0),\n\n MouseButton::MiddleButton => send_mouse_input(MOUSEEVENTF_MIDDLEDOWN, 0, 0, 0),\n\n _ => {}\n\n }\n\n }\n\n\n\n pub fn release(self) {\n\n match self {\n\n MouseButton::LeftButton => send_mouse_input(MOUSEEVENTF_LEFTUP, 0, 0, 0),\n\n MouseButton::RightButton => send_mouse_input(MOUSEEVENTF_RIGHTUP, 0, 0, 0),\n\n MouseButton::MiddleButton => send_mouse_input(MOUSEEVENTF_MIDDLEUP, 0, 0, 0),\n\n _ => {}\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/windows/mod.rs", "rank": 37, "score": 21638.19641146499 }, { "content": "impl MouseCursor {\n\n pub fn pos() -> (i32, i32) {\n\n unsafe {\n\n let mut point = MaybeUninit::uninit();\n\n GetCursorPos(point.as_mut_ptr());\n\n let point = point.assume_init();\n\n (point.x, point.y)\n\n }\n\n }\n\n\n\n pub fn move_rel(dx: i32, dy: i32) {\n\n let (x, y) = Self::pos();\n\n Self::move_abs(x + dx, y + dy);\n\n }\n\n\n\n pub fn move_abs(x: i32, y: i32) {\n\n unsafe {\n\n SetCursorPos(x, y);\n\n }\n\n }\n", "file_path": "src/windows/mod.rs", "rank": 38, "score": 21638.022130483598 }, { "content": " CallNextHookEx(null_mut(), code, w_param, l_param)\n\n}\n\n\n\nunsafe extern \"system\" fn mouse_proc(code: c_int, w_param: WPARAM, l_param: LPARAM) -> LRESULT {\n\n if MOUSE_BINDS.lock().unwrap().is_empty() {\n\n unset_hook(&*MOUSE_HHOOK);\n\n } else if let Some(event) = match w_param as u32 {\n\n WM_LBUTTONDOWN => Some(MouseButton::LeftButton),\n\n WM_RBUTTONDOWN => Some(MouseButton::RightButton),\n\n WM_MBUTTONDOWN => Some(MouseButton::MiddleButton),\n\n WM_XBUTTONDOWN => {\n\n let llhs = &*(l_param as *const MSLLHOOKSTRUCT);\n\n\n\n match HIWORD(llhs.mouseData) {\n\n XBUTTON1 => Some(MouseButton::X1Button),\n\n XBUTTON2 => Some(MouseButton::X2Button),\n\n _ => None,\n\n }\n\n },\n\n _ => None,\n", "file_path": "src/windows/mod.rs", "rank": 39, "score": 21636.86487051221 }, { "content": " } {\n\n if let Some(bind) = MOUSE_BINDS.lock().unwrap().get_mut(&event) {\n\n match bind {\n\n Bind::NormalBind(cb) => {\n\n let cb = Arc::clone(cb);\n\n spawn(move || cb());\n\n }\n\n Bind::BlockBind(cb) => {\n\n let cb = Arc::clone(cb);\n\n spawn(move || cb());\n\n return 1;\n\n }\n\n Bind::BlockableBind(cb) => {\n\n if let BlockInput::Block = cb() {\n\n return 1;\n\n }\n\n }\n\n }\n\n };\n\n }\n\n CallNextHookEx(null_mut(), code, w_param, l_param)\n\n}\n\n\n", "file_path": "src/windows/mod.rs", "rank": 41, "score": 21636.25082324301 }, { "content": " )))\n\n {\n\n match bind {\n\n Bind::NormalBind(cb) => {\n\n let cb = Arc::clone(cb);\n\n spawn(move || cb());\n\n }\n\n Bind::BlockBind(cb) => {\n\n let cb = Arc::clone(cb);\n\n spawn(move || cb());\n\n return 1;\n\n }\n\n Bind::BlockableBind(cb) => {\n\n if let BlockInput::Block = cb() {\n\n return 1;\n\n }\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/windows/mod.rs", "rank": 43, "score": 21631.803043052343 }, { "content": "\n\nimpl From<MouseButton> for uinput::event::controller::Mouse {\n\n fn from(button: MouseButton) -> Self {\n\n use uinput::event::controller::Mouse;\n\n match button {\n\n MouseButton::LeftButton => Mouse::Left,\n\n MouseButton::RightButton => Mouse::Right,\n\n MouseButton::MiddleButton => Mouse::Middle,\n\n MouseButton::X1Button => unimplemented!(),\n\n MouseButton::X2Button => unimplemented!(),\n\n MouseButton::OtherButton(_) => unimplemented!(),\n\n }\n\n }\n\n}\n", "file_path": "src/linux/inputs.rs", "rank": 44, "score": 20950.04384895682 }, { "content": "}\n\n\n\nimpl From<u32> for MouseButton {\n\n fn from(keycode: u32) -> MouseButton {\n\n match keycode {\n\n 1 => LeftButton,\n\n 2 => MiddleButton,\n\n 3 => RightButton,\n\n 4 => X1Button,\n\n 5 => X2Button,\n\n _ => OtherButton(keycode),\n\n }\n\n }\n\n}\n\n\n\nimpl From<MouseButton> for u32 {\n\n fn from(button: MouseButton) -> u32 {\n\n match button {\n\n LeftButton => 1,\n\n MiddleButton => 2,\n\n RightButton => 3,\n\n X1Button => 4,\n\n X2Button => 5,\n\n OtherButton(keycode) => keycode,\n\n }\n\n }\n\n}\n\n\n\n// https://www.win.tue.nl/~aeb/linux/kbd/scancodes-1.html\n", "file_path": "src/linux/inputs.rs", "rank": 45, "score": 20946.0637503521 }, { "content": "use crate::public::{\n\n KeybdKey::{self, *},\n\n MouseButton::{self, *},\n\n};\n\n\n\nimpl From<KeybdKey> for u64 {\n\n fn from(key: KeybdKey) -> u64 {\n\n match key {\n\n BackspaceKey => 0xFF08,\n\n TabKey => 0xFF09,\n\n EnterKey => 0xFF8D,\n\n EscapeKey => 0xFF1B,\n\n SpaceKey => 0x020,\n\n HomeKey => 0xFF50,\n\n LeftKey => 0xFF51,\n\n UpKey => 0xFF52,\n\n RightKey => 0xFF53,\n\n DownKey => 0xFF54,\n\n InsertKey => 0xFF63,\n\n DeleteKey => 0xFF9F,\n", "file_path": "src/linux/inputs.rs", "rank": 46, "score": 20945.947657118508 }, { "content": "use crate::public::{\n\n KeybdKey::{self, *},\n\n MouseButton::{self, *},\n\n};\n\n\n\nimpl From<KeybdKey> for u64 {\n\n fn from(key: KeybdKey) -> u64 {\n\n match key {\n\n BackspaceKey => 0x08,\n\n TabKey => 0x09,\n\n EnterKey => 0x0D,\n\n EscapeKey => 0x1B,\n\n SpaceKey => 0x20,\n\n HomeKey => 0x24,\n\n LeftKey => 0x25,\n\n UpKey => 0x26,\n\n RightKey => 0x27,\n\n DownKey => 0x28,\n\n InsertKey => 0x2D,\n\n DeleteKey => 0x2E,\n", "file_path": "src/windows/inputs.rs", "rank": 47, "score": 20945.777657730323 }, { "content": " 0x81 => F18Key,\n\n 0x82 => F19Key,\n\n 0x83 => F20Key,\n\n 0x84 => F21Key,\n\n 0x85 => F22Key,\n\n 0x86 => F23Key,\n\n 0x87 => F24Key,\n\n 0x90 => NumLockKey,\n\n 0x91 => ScrollLockKey,\n\n 0x14 => CapsLockKey,\n\n 0xA0 => LShiftKey,\n\n 0xA1 => RShiftKey,\n\n 0xA2 => LControlKey,\n\n 0xA3 => RControlKey,\n\n _ => OtherKey(code),\n\n }\n\n }\n\n}\n\n\n\nimpl From<MouseButton> for u32 {\n", "file_path": "src/windows/inputs.rs", "rank": 48, "score": 20944.19987261632 }, { "content": " fn from(button: MouseButton) -> u32 {\n\n match button {\n\n LeftButton => 0x01,\n\n RightButton => 0x02,\n\n MiddleButton => 0x04,\n\n X1Button => 0x05,\n\n X2Button => 0x06,\n\n OtherButton(code) => code,\n\n }\n\n }\n\n}\n", "file_path": "src/windows/inputs.rs", "rank": 49, "score": 20943.24077512293 }, { "content": "}\n\n\n\nimpl From<u64> for KeybdKey {\n\n fn from(code: u64) -> KeybdKey {\n\n match code {\n\n 0x08 => BackspaceKey,\n\n 0x09 => TabKey,\n\n 0x0D => EnterKey,\n\n 0x1B => EscapeKey,\n\n 0x20 => SpaceKey,\n\n 0x24 => HomeKey,\n\n 0x25 => LeftKey,\n\n 0x26 => UpKey,\n\n 0x27 => RightKey,\n\n 0x28 => DownKey,\n\n 0x2D => InsertKey,\n\n 0x2E => DeleteKey,\n\n 0x30 => Numrow0Key,\n\n 0x31 => Numrow1Key,\n\n 0x32 => Numrow2Key,\n", "file_path": "src/windows/inputs.rs", "rank": 50, "score": 20942.764846570335 }, { "content": " 0x07 => Some(Numrow6Key),\n\n 0x08 => Some(Numrow7Key),\n\n 0x09 => Some(Numrow8Key),\n\n 0x0a => Some(Numrow9Key),\n\n 0x1e => Some(AKey),\n\n 0x30 => Some(BKey),\n\n 0x2e => Some(CKey),\n\n 0x20 => Some(DKey),\n\n 0x12 => Some(EKey),\n\n 0x21 => Some(FKey),\n\n 0x22 => Some(GKey),\n\n 0x23 => Some(HKey),\n\n 0x17 => Some(IKey),\n\n 0x24 => Some(JKey),\n\n 0x25 => Some(KKey),\n\n 0x26 => Some(LKey),\n\n 0x32 => Some(MKey),\n\n 0x31 => Some(NKey),\n\n 0x18 => Some(OKey),\n\n 0x19 => Some(PKey),\n", "file_path": "src/linux/inputs.rs", "rank": 51, "score": 20941.28481675689 }, { "content": " 0x33 => Numrow3Key,\n\n 0x34 => Numrow4Key,\n\n 0x35 => Numrow5Key,\n\n 0x36 => Numrow6Key,\n\n 0x37 => Numrow7Key,\n\n 0x38 => Numrow8Key,\n\n 0x39 => Numrow9Key,\n\n 0x41 => AKey,\n\n 0x42 => BKey,\n\n 0x43 => CKey,\n\n 0x44 => DKey,\n\n 0x45 => EKey,\n\n 0x46 => FKey,\n\n 0x47 => GKey,\n\n 0x48 => HKey,\n\n 0x49 => IKey,\n\n 0x4A => JKey,\n\n 0x4B => KKey,\n\n 0x4C => LKey,\n\n 0x4D => MKey,\n", "file_path": "src/windows/inputs.rs", "rank": 52, "score": 20941.28481675689 }, { "content": " F15Key => 0xFFCC,\n\n F16Key => 0xFFCD,\n\n F17Key => 0xFFCE,\n\n F18Key => 0xFFCF,\n\n F19Key => 0xFFD0,\n\n F20Key => 0xFFD1,\n\n F21Key => 0xFFD2,\n\n F22Key => 0xFFD3,\n\n F23Key => 0xFFD4,\n\n F24Key => 0xFFD5,\n\n NumLockKey => 0xFF7F,\n\n ScrollLockKey => 0xFF14,\n\n CapsLockKey => 0xFFE5,\n\n LShiftKey => 0xFFE1,\n\n RShiftKey => 0xFFE2,\n\n LControlKey => 0xFFE3,\n\n RControlKey => 0xFFE4,\n\n OtherKey(keycode) => keycode,\n\n }\n\n }\n", "file_path": "src/linux/inputs.rs", "rank": 53, "score": 20941.28481675689 }, { "content": " QKey => 0x10,\n\n RKey => 0x13,\n\n SKey => 0x1f,\n\n TKey => 0x14,\n\n UKey => 0x16,\n\n VKey => 0x2f,\n\n WKey => 0x11,\n\n XKey => 0x2d,\n\n YKey => 0x15,\n\n ZKey => 0x2c,\n\n Numpad0Key => 0x52,\n\n Numpad1Key => 0x4f,\n\n Numpad2Key => 0x50,\n\n Numpad3Key => 0x51,\n\n Numpad4Key => 0x4b,\n\n Numpad5Key => 0x4c,\n\n Numpad6Key => 0x4d,\n\n Numpad7Key => 0x47,\n\n Numpad8Key => 0x48,\n\n Numpad9Key => 0x49,\n", "file_path": "src/linux/inputs.rs", "rank": 54, "score": 20941.28481675689 }, { "content": " KKey => 0x4B,\n\n LKey => 0x4C,\n\n MKey => 0x4D,\n\n NKey => 0x4E,\n\n OKey => 0x4F,\n\n PKey => 0x50,\n\n QKey => 0x51,\n\n RKey => 0x52,\n\n SKey => 0x53,\n\n TKey => 0x54,\n\n UKey => 0x55,\n\n VKey => 0x56,\n\n WKey => 0x57,\n\n XKey => 0x58,\n\n YKey => 0x59,\n\n ZKey => 0x5A,\n\n Numpad0Key => 0x60,\n\n Numpad1Key => 0x61,\n\n Numpad2Key => 0x62,\n\n Numpad3Key => 0x63,\n", "file_path": "src/windows/inputs.rs", "rank": 55, "score": 20941.28481675689 }, { "content": " Numpad4Key => 0xFFB4,\n\n Numpad5Key => 0xFFB5,\n\n Numpad6Key => 0xFFB6,\n\n Numpad7Key => 0xFFB7,\n\n Numpad8Key => 0xFFB8,\n\n Numpad9Key => 0xFFB9,\n\n F1Key => 0xFFBE,\n\n F2Key => 0xFFBF,\n\n F3Key => 0xFFC0,\n\n F4Key => 0xFFC1,\n\n F5Key => 0xFFC2,\n\n F6Key => 0xFFC3,\n\n F7Key => 0xFFC4,\n\n F8Key => 0xFFC5,\n\n F9Key => 0xFFC6,\n\n F10Key => 0xFFC7,\n\n F11Key => 0xFFC8,\n\n F12Key => 0xFFC9,\n\n F13Key => 0xFFCA,\n\n F14Key => 0xFFCB,\n", "file_path": "src/linux/inputs.rs", "rank": 56, "score": 20941.28481675689 }, { "content": " 0x10 => Some(QKey),\n\n 0x13 => Some(RKey),\n\n 0x1f => Some(SKey),\n\n 0x14 => Some(TKey),\n\n 0x16 => Some(UKey),\n\n 0x2f => Some(VKey),\n\n 0x11 => Some(WKey),\n\n 0x2d => Some(XKey),\n\n 0x15 => Some(YKey),\n\n 0x2c => Some(ZKey),\n\n 0x52 => Some(Numpad0Key),\n\n 0x4f => Some(Numpad1Key),\n\n 0x50 => Some(Numpad2Key),\n\n 0x51 => Some(Numpad3Key),\n\n 0x4b => Some(Numpad4Key),\n\n 0x4c => Some(Numpad5Key),\n\n 0x4d => Some(Numpad6Key),\n\n 0x47 => Some(Numpad7Key),\n\n 0x48 => Some(Numpad8Key),\n\n 0x49 => Some(Numpad9Key),\n", "file_path": "src/linux/inputs.rs", "rank": 57, "score": 20941.28481675689 }, { "content": " F1Key => 0x3b,\n\n F2Key => 0x3c,\n\n F3Key => 0x3d,\n\n F4Key => 0x3e,\n\n F5Key => 0x3f,\n\n F6Key => 0x40,\n\n F7Key => 0x41,\n\n F8Key => 0x42,\n\n F9Key => 0x43,\n\n F10Key => 0x44,\n\n NumLockKey => 0x45,\n\n ScrollLockKey => 0x46,\n\n CapsLockKey => 0x3a,\n\n LShiftKey => 0x2a,\n\n RShiftKey => 0x36,\n\n LControlKey => 0x1d,\n\n OtherKey(code) => code as i32,\n\n _ => 0x0,\n\n }\n\n}\n", "file_path": "src/linux/inputs.rs", "rank": 58, "score": 20941.28481675689 }, { "content": " F15Key => 0x7E,\n\n F16Key => 0x7F,\n\n F17Key => 0x80,\n\n F18Key => 0x81,\n\n F19Key => 0x82,\n\n F20Key => 0x83,\n\n F21Key => 0x84,\n\n F22Key => 0x85,\n\n F23Key => 0x86,\n\n F24Key => 0x87,\n\n NumLockKey => 0x90,\n\n ScrollLockKey => 0x91,\n\n CapsLockKey => 0x14,\n\n LShiftKey => 0xA0,\n\n RShiftKey => 0xA1,\n\n LControlKey => 0xA2,\n\n RControlKey => 0xA3,\n\n OtherKey(code) => code,\n\n }\n\n }\n", "file_path": "src/windows/inputs.rs", "rank": 59, "score": 20941.28481675689 }, { "content": " 0x67 => Numpad7Key,\n\n 0x68 => Numpad8Key,\n\n 0x69 => Numpad9Key,\n\n 0x70 => F1Key,\n\n 0x71 => F2Key,\n\n 0x72 => F3Key,\n\n 0x73 => F4Key,\n\n 0x74 => F5Key,\n\n 0x75 => F6Key,\n\n 0x76 => F7Key,\n\n 0x77 => F8Key,\n\n 0x78 => F9Key,\n\n 0x79 => F10Key,\n\n 0x7A => F11Key,\n\n 0x7B => F12Key,\n\n 0x7C => F13Key,\n\n 0x7D => F14Key,\n\n 0x7E => F15Key,\n\n 0x7F => F16Key,\n\n 0x80 => F17Key,\n", "file_path": "src/windows/inputs.rs", "rank": 60, "score": 20941.28481675689 }, { "content": " Numrow0Key => 0x30,\n\n Numrow1Key => 0x31,\n\n Numrow2Key => 0x32,\n\n Numrow3Key => 0x33,\n\n Numrow4Key => 0x34,\n\n Numrow5Key => 0x35,\n\n Numrow6Key => 0x36,\n\n Numrow7Key => 0x37,\n\n Numrow8Key => 0x38,\n\n Numrow9Key => 0x39,\n\n AKey => 0x41,\n\n BKey => 0x42,\n\n CKey => 0x43,\n\n DKey => 0x44,\n\n EKey => 0x45,\n\n FKey => 0x46,\n\n GKey => 0x47,\n\n HKey => 0x48,\n\n IKey => 0x49,\n\n JKey => 0x4A,\n", "file_path": "src/windows/inputs.rs", "rank": 61, "score": 20941.28481675689 }, { "content": " Numrow0Key => 0x030,\n\n Numrow1Key => 0x031,\n\n Numrow2Key => 0x032,\n\n Numrow3Key => 0x033,\n\n Numrow4Key => 0x034,\n\n Numrow5Key => 0x035,\n\n Numrow6Key => 0x036,\n\n Numrow7Key => 0x037,\n\n Numrow8Key => 0x038,\n\n Numrow9Key => 0x039,\n\n AKey => 0x041,\n\n BKey => 0x042,\n\n CKey => 0x043,\n\n DKey => 0x044,\n\n EKey => 0x045,\n\n FKey => 0x046,\n\n GKey => 0x047,\n\n HKey => 0x048,\n\n IKey => 0x049,\n\n JKey => 0x04A,\n", "file_path": "src/linux/inputs.rs", "rank": 62, "score": 20941.28481675689 }, { "content": " Numpad4Key => 0x64,\n\n Numpad5Key => 0x65,\n\n Numpad6Key => 0x66,\n\n Numpad7Key => 0x67,\n\n Numpad8Key => 0x68,\n\n Numpad9Key => 0x69,\n\n F1Key => 0x70,\n\n F2Key => 0x71,\n\n F3Key => 0x72,\n\n F4Key => 0x73,\n\n F5Key => 0x74,\n\n F6Key => 0x75,\n\n F7Key => 0x76,\n\n F8Key => 0x77,\n\n F9Key => 0x78,\n\n F10Key => 0x79,\n\n F11Key => 0x7A,\n\n F12Key => 0x7B,\n\n F13Key => 0x7C,\n\n F14Key => 0x7D,\n", "file_path": "src/windows/inputs.rs", "rank": 63, "score": 20941.28481675689 }, { "content": " Numrow6Key => 0x07,\n\n Numrow7Key => 0x08,\n\n Numrow8Key => 0x09,\n\n Numrow9Key => 0x0a,\n\n AKey => 0x1e,\n\n BKey => 0x30,\n\n CKey => 0x2e,\n\n DKey => 0x20,\n\n EKey => 0x12,\n\n FKey => 0x21,\n\n GKey => 0x22,\n\n HKey => 0x23,\n\n IKey => 0x17,\n\n JKey => 0x24,\n\n KKey => 0x25,\n\n LKey => 0x26,\n\n MKey => 0x32,\n\n NKey => 0x31,\n\n OKey => 0x18,\n\n PKey => 0x19,\n", "file_path": "src/linux/inputs.rs", "rank": 64, "score": 20941.28481675689 }, { "content": " 0x3b => Some(F1Key),\n\n 0x3c => Some(F2Key),\n\n 0x3d => Some(F3Key),\n\n 0x3e => Some(F4Key),\n\n 0x3f => Some(F5Key),\n\n 0x40 => Some(F6Key),\n\n 0x41 => Some(F7Key),\n\n 0x42 => Some(F8Key),\n\n 0x43 => Some(F9Key),\n\n 0x44 => Some(F10Key),\n\n 0x45 => Some(NumLockKey),\n\n 0x46 => Some(ScrollLockKey),\n\n 0x3a => Some(CapsLockKey),\n\n 0x2a => Some(LShiftKey),\n\n 0x36 => Some(RShiftKey),\n\n 0x1d => Some(LControlKey),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/linux/inputs.rs", "rank": 65, "score": 20941.28481675689 }, { "content": " KKey => 0x04B,\n\n LKey => 0x04C,\n\n MKey => 0x04D,\n\n NKey => 0x04E,\n\n OKey => 0x04F,\n\n PKey => 0x050,\n\n QKey => 0x051,\n\n RKey => 0x052,\n\n SKey => 0x053,\n\n TKey => 0x054,\n\n UKey => 0x055,\n\n VKey => 0x056,\n\n WKey => 0x057,\n\n XKey => 0x058,\n\n YKey => 0x059,\n\n ZKey => 0x05A,\n\n Numpad0Key => 0xFFB0,\n\n Numpad1Key => 0xFFB1,\n\n Numpad2Key => 0xFFB2,\n\n Numpad3Key => 0xFFB3,\n", "file_path": "src/linux/inputs.rs", "rank": 66, "score": 20941.28481675689 }, { "content": " 0x4E => NKey,\n\n 0x4F => OKey,\n\n 0x50 => PKey,\n\n 0x51 => QKey,\n\n 0x52 => RKey,\n\n 0x53 => SKey,\n\n 0x54 => TKey,\n\n 0x55 => UKey,\n\n 0x56 => VKey,\n\n 0x57 => WKey,\n\n 0x58 => XKey,\n\n 0x59 => YKey,\n\n 0x5A => ZKey,\n\n 0x60 => Numpad0Key,\n\n 0x61 => Numpad1Key,\n\n 0x62 => Numpad2Key,\n\n 0x63 => Numpad3Key,\n\n 0x64 => Numpad4Key,\n\n 0x65 => Numpad5Key,\n\n 0x66 => Numpad6Key,\n", "file_path": "src/windows/inputs.rs", "rank": 67, "score": 20941.28481675689 }, { "content": "# InputBot [![docs link](https://docs.rs/inputbot/badge.svg)](https://docs.rs/inputbot) [![crates.io version](https://img.shields.io/crates/v/inputbot.svg)](https://crates.io/crates/inputbot) \n\nCross-platform (Windows & Linux) library for simulating keyboard/mouse input events and registering global input device event handlers.\n\n\n\nAllows writing automation programs that collapse long action-sequences into single key-presses.\n\n\n\n## Usage sample\n\n\n\n```Rust\n\nuse inputbot::{KeybdKey::*, MouseButton::*, *};\n\nuse std::{thread::sleep, time::Duration};\n\n\n\nfn main() {\n\n // Autorun for videogames.\n\n NumLockKey.bind(|| {\n\n while NumLockKey.is_toggled() {\n\n LShiftKey.press();\n\n WKey.press();\n\n sleep(Duration::from_millis(50));\n\n WKey.release();\n\n LShiftKey.release();\n\n }\n\n });\n\n\n\n // Rapidfire for videogames.\n\n RightButton.bind(|| {\n\n while RightButton.is_pressed() {\n\n LeftButton.press();\n\n sleep(Duration::from_millis(50));\n\n LeftButton.release();\n\n }\n\n });\n\n\n\n // Create a handler to trigger on any and all keyboard events.\n\n inputbot::KeybdKey::bind_all(|evnt| {\n\n println!(\"{:?}\", evnt);\n\n });\n\n\n\n // Send a key sequence.\n\n RKey.bind(|| KeySequence(\"Sample text\").send());\n\n\n\n // Move mouse.\n\n QKey.bind(|| MouseCursor::move_rel(10, 10));\n\n\n\n // Call this to start listening for bound inputs.\n\n handle_input_events();\n\n}\n\n```\n\n\n\n## Build Dependencies\n\n### Debian or Ubuntu based distros\n\n* **libx11-dev**\n\n* **libxtst-dev**\n\n* **libudev-dev**\n\n* **libinput-dev**\n\n\n\n**Note:** libinput requires InputBot to be run with sudo on Linux - `sudo ./target/debug/<program name>`.\n", "file_path": "README.md", "rank": 69, "score": 13854.125938793582 }, { "content": "# Changelog\n\n\n\n## 0.5.0\n\n\n\n### Added\n\n\n\n- Blockable binds (optionally hide events from system)\n\n- Block binds (hide events from system)\n\n- Function to get current mouse cursor position\n\n- License file\n\n\n\n### Changed\n\n\n\n- [Mouse cursor move functions behave correctly now](https://github.com/obv-mikhail/InputBot/pull/22)\n\n- Mouse cursor methods changed to not take self\n\n- Rust 2018 edition\n\n\n\n### Removed\n\n\n\n- Lazystatic\n\n\n\n\n\n## 0.4.0\n\n\n\n### Added\n\n\n\n- Support for sending key sequences\n\n\n\n### Changed\n\n\n", "file_path": "CHANGELOG.md", "rank": 70, "score": 13848.633734854004 }, { "content": "use crate::public::*;\n\npub use std::{\n\n collections::hash_map::HashMap,\n\n sync::atomic::{AtomicPtr, Ordering},\n\n sync::{Arc, Mutex},\n\n thread::spawn,\n\n};\n\nuse once_cell::sync::Lazy;\n\n\n\npub enum Bind {\n\n NormalBind(BindHandler),\n\n BlockBind(BlockBindHandler),\n\n BlockableBind(BlockableBindHandler),\n\n}\n\n\n\npub type BindHandler = Arc<dyn Fn() + Send + Sync + 'static>;\n\npub type BlockBindHandler = Arc<dyn Fn() + Send + Sync + 'static>;\n\npub type BlockableBindHandler = Arc<dyn Fn() -> BlockInput + Send + Sync + 'static>;\n\npub type KeybdBindMap = HashMap<KeybdKey, Bind>;\n\npub type MouseBindMap = HashMap<MouseButton, Bind>;\n\n\n\npub static KEYBD_BINDS: Lazy<Mutex<KeybdBindMap>> = Lazy::new(|| Mutex::new(KeybdBindMap::new()));\n\npub static MOUSE_BINDS: Lazy<Mutex<MouseBindMap>> = Lazy::new(|| Mutex::new(MouseBindMap::new()));\n", "file_path": "src/common.rs", "rank": 71, "score": 13.376729300300493 }, { "content": "use inputbot::{BlockInput::*, KeybdKey::*, MouseButton::*, *};\n\nuse std::{thread::sleep, time::Duration};\n\n\n", "file_path": "examples/test.rs", "rank": 72, "score": 10.137428877356793 }, { "content": "mod common;\n\n\n\nmod public;\n\npub use crate::public::*;\n\n\n\n#[cfg(target_os = \"windows\")]\n\nmod windows;\n\n#[cfg(target_os = \"windows\")]\n\npub use crate::windows::*;\n\n\n\n#[cfg(target_os = \"linux\")]\n\nmod linux;\n\n#[cfg(target_os = \"linux\")]\n\npub use crate::linux::*;\n", "file_path": "src/lib.rs", "rank": 73, "score": 8.962910883878326 }, { "content": " });\n\n\n\n // Send a key sequence.\n\n RKey.bind(|| KeySequence(\"Sample text\").send());\n\n\n\n // Move mouse.\n\n QKey.bind(|| MouseCursor::move_rel(10, 10));\n\n\n\n // Bind all keys to a common callback event.\n\n KeybdKey::bind_all(|event| {\n\n match inputbot::from_keybd_key(event) {\n\n Some(c) => println!(\"{}\", c),\n\n None => println!(\"{}\", \"Unregistered Key\")\n\n };\n\n });\n\n\n\n inputbot::handle_input_events();\n\n // Block the A key when left shift is held.\n\n // Note: callbacks for blockable binds won't be executed in new threads, \n\n // so for long-running processes create new threads inside the callback if needed. \n", "file_path": "examples/test.rs", "rank": 74, "score": 7.485093132285939 }, { "content": " AKey.blockable_bind(|| {\n\n if LShiftKey.is_pressed() {\n\n Block\n\n } else {\n\n DontBlock\n\n }\n\n });\n\n\n\n // Block the A key when left shift is held.\n\n KKey.block_bind(|| ());\n\n\n\n // Call this to start listening for bound inputs.\n\n handle_input_events();\n\n}", "file_path": "examples/test.rs", "rank": 75, "score": 4.710176642208371 } ]
Rust
src/wayland/output/xdg.rs
rano-oss/smithay
a06e8b231e305cda37f68c63f872d468d673d598
use std::{ ops::Deref as _, sync::{Arc, Mutex}, }; use slog::{o, trace}; use wayland_protocols::unstable::xdg_output::v1::server::{ zxdg_output_manager_v1::{self, ZxdgOutputManagerV1}, zxdg_output_v1::ZxdgOutputV1, }; use wayland_server::{protocol::wl_output::WlOutput, Display, Filter, Global, Main}; use crate::utils::{Logical, Physical, Point, Size}; use super::{Mode, Output}; #[derive(Debug)] struct Inner { name: String, description: String, logical_position: Point<i32, Logical>, physical_size: Option<Size<i32, Physical>>, scale: i32, instances: Vec<ZxdgOutputV1>, _log: ::slog::Logger, } #[derive(Debug, Clone)] pub(super) struct XdgOutput { inner: Arc<Mutex<Inner>>, } impl XdgOutput { fn new(output: &super::Inner, log: ::slog::Logger) -> Self { trace!(log, "Creating new xdg_output"; "name" => &output.name); let description = format!( "{} - {} - {}", output.physical.make, output.physical.model, output.name ); let physical_size = output.current_mode.map(|mode| mode.size); Self { inner: Arc::new(Mutex::new(Inner { name: output.name.clone(), description, logical_position: output.location, physical_size, scale: output.scale, instances: Vec::new(), _log: log, })), } } fn add_instance(&self, xdg_output: Main<ZxdgOutputV1>, wl_output: &WlOutput) { let mut inner = self.inner.lock().unwrap(); xdg_output.logical_position(inner.logical_position.x, inner.logical_position.y); if let Some(size) = inner.physical_size { let logical_size = size.to_logical(inner.scale); xdg_output.logical_size(logical_size.w, logical_size.h); } if xdg_output.as_ref().version() >= 2 { xdg_output.name(inner.name.clone()); xdg_output.description(inner.description.clone()); } if xdg_output.as_ref().version() < 3 { xdg_output.done(); } wl_output.done(); xdg_output.quick_assign(|_, _, _| {}); xdg_output.assign_destructor(Filter::new(|xdg_output: ZxdgOutputV1, _, _| { let inner = &xdg_output.as_ref().user_data().get::<XdgOutput>().unwrap().inner; inner .lock() .unwrap() .instances .retain(|o| !o.as_ref().equals(xdg_output.as_ref())); })); xdg_output.as_ref().user_data().set_threadsafe({ let xdg_output = self.clone(); move || xdg_output }); inner.instances.push(xdg_output.deref().clone()); } pub(super) fn change_current_state( &self, new_mode: Option<Mode>, new_scale: Option<i32>, new_location: Option<Point<i32, Logical>>, ) { let mut output = self.inner.lock().unwrap(); if let Some(new_mode) = new_mode { output.physical_size = Some(new_mode.size); } if let Some(new_scale) = new_scale { output.scale = new_scale; } if let Some(new_location) = new_location { output.logical_position = new_location; } for instance in output.instances.iter() { if new_mode.is_some() | new_scale.is_some() { if let Some(size) = output.physical_size { let logical_size = size.to_logical(output.scale); instance.logical_size(logical_size.w, logical_size.h); } } if new_location.is_some() { instance.logical_position(output.logical_position.x, output.logical_position.y); } if instance.as_ref().version() < 3 { instance.done(); } } } } pub fn init_xdg_output_manager<L>(display: &mut Display, logger: L) -> Global<ZxdgOutputManagerV1> where L: Into<Option<::slog::Logger>>, { let log = crate::slog_or_fallback(logger).new(o!("smithay_module" => "xdg_output_handler")); display.create_global( 3, Filter::new(move |(manager, _version): (Main<ZxdgOutputManagerV1>, _), _, _| { let log = log.clone(); manager.quick_assign(move |_, req, _| match req { zxdg_output_manager_v1::Request::GetXdgOutput { id, output: wl_output, } => { let output = Output::from_resource(&wl_output).unwrap(); let mut inner = output.inner.0.lock().unwrap(); if inner.xdg_output.is_none() { inner.xdg_output = Some(XdgOutput::new(&inner, log.clone())); } inner.xdg_output.as_ref().unwrap().add_instance(id, &wl_output); } zxdg_output_manager_v1::Request::Destroy => { } _ => {} }); }), ) }
use std::{ ops::Deref as _, sync::{Arc, Mutex}, }; use slog::{o, trace}; use wayland_protocols::unstable::xdg_output::v1::server::{ zxdg_output_manager_v1::{self, ZxdgOutputManagerV1}, zxdg_output_v1::ZxdgOutputV1, }; use wayland_server::{protocol::wl_output::WlOutput, Display, Filter, Global, Main}; use crate::utils::{Logical, Physical, Point, Size}; use super::{Mode, Output}; #[derive(Debug)] struct Inner { name: String, description: String, logical_position: Point<i32, Logical>, physical_size: Option<Size<i32, Physical>>, scale: i32, instances: Vec<ZxdgOutputV1>, _log: ::slog::Logger, } #[derive(Debug, Clone)] pub(super) struct XdgOutput { inner: Arc<Mutex<Inner>>, } impl XdgOutput { fn new(output: &super::Inner, log: ::slog::Logger) -> Self { trace!(log, "Creating new xdg_output"; "name" => &output.name); let description = format!( "{} - {} - {}", output.physical.make, output.physical.model, output.name ); let physical_size = output.current_mode.map(|mode| mode.size); Self { inner: Arc::new(Mutex::new(Inner { name: output.name.clone(), description, logical_position: output.location, physical_size, scale: output.scale, instances: Vec::new(), _log: log, })), } } fn add_instance(&self, xdg_output: Main<ZxdgOutputV1>, wl_output: &WlOutput) { let mut inner = self.inner.lock().unwrap(); xdg_output.logical_position(inner.logical_position.x, inner.logical_position.y); if let Some(size) = inner.physical_size { let logical_size = size.to_logical(inner.scale); xdg_output.logical_size(logical_size.w, logical_size.h); } if xdg_output.as_ref().version() >= 2 { xdg_output.name(inner.name.clone()); xdg_output.description(inner.description.clone()); } if xdg_output.as_ref().version() < 3 { xdg_output.done(); } wl_output.done(); xdg_output.quick_assign(|_, _, _| {}); xdg_output.assign_destructor(Filter::new(|xdg_output: ZxdgOutputV1, _, _| { let inner = &xdg_output.as_ref().user_data().get::<XdgOutput>().unwrap().inner; inner .lock() .unwrap() .instances .retain(|o| !o.as_ref().equals(xdg_output.as_ref())); })); xdg_output.as_ref().user_data().set_threadsafe({ let xdg_output = self.clone(); move || xdg_output }); inner.instances.push(xdg_output.deref().clone()); } pub(super) fn change_current_state( &self, new_mode: Option<Mode>, new_scale: Option<i32>, new_location: Option<Point<i32, Logical>>, ) { let mut output = self.inner.lock().unwrap(); if let Some(new_mode) = new_mode { output.physical_size = Some(new_mode.size); } if let Some(new_scale) = new_scale { output.scale = new_scale; } if let Some(new_location) = new_location { output.logical_position = new_location; } for instance in output.instances.iter() { if new_mode.is_some() | new_scale.is_some() { if let Some(size) = output.physical_size { let logical_size = size.to_logical(output.scale); instance.logical_size(logical_size.w, logical_size.h); } } if new_location.is_some() { instance.logical_position(output.logical_position.x, output.logical_position.y); } if instance.as_ref().version() < 3 { instance.done(); } } } } pub fn init_xdg_output_manager<L>(display: &mut Display, logger: L) -> Global<ZxdgOutputManagerV1> where
L: Into<Option<::slog::Logger>>, { let log = crate::slog_or_fallback(logger).new(o!("smithay_module" => "xdg_output_handler")); display.create_global( 3, Filter::new(move |(manager, _version): (Main<ZxdgOutputManagerV1>, _), _, _| { let log = log.clone(); manager.quick_assign(move |_, req, _| match req { zxdg_output_manager_v1::Request::GetXdgOutput { id, output: wl_output, } => { let output = Output::from_resource(&wl_output).unwrap(); let mut inner = output.inner.0.lock().unwrap(); if inner.xdg_output.is_none() { inner.xdg_output = Some(XdgOutput::new(&inner, log.clone())); } inner.xdg_output.as_ref().unwrap().add_instance(id, &wl_output); } zxdg_output_manager_v1::Request::Destroy => { } _ => {} }); }), ) }
function_block-function_prefix_line
[ { "content": "/// Initialize a tablet manager global.\n\npub fn init_tablet_manager_global(display: &mut Display) -> Global<ZwpTabletManagerV2> {\n\n display.create_global::<ZwpTabletManagerV2, _>(\n\n MANAGER_VERSION,\n\n Filter::new(\n\n move |(manager, _version): (Main<ZwpTabletManagerV2>, u32), _, _| {\n\n manager.quick_assign(|_manager, req, _| match req {\n\n zwp_tablet_manager_v2::Request::GetTabletSeat { tablet_seat, seat } => {\n\n let seat = Seat::from_resource(&seat).unwrap();\n\n\n\n let user_data = seat.user_data();\n\n user_data.insert_if_missing(TabletSeatHandle::default);\n\n\n\n let instance = tablet_seat;\n\n let tablet_seat = user_data.get::<TabletSeatHandle>().unwrap();\n\n\n\n tablet_seat.add_instance(instance);\n\n }\n\n zwp_tablet_manager_v2::Request::Destroy => {\n\n // Nothing to do\n\n }\n\n _ => {}\n\n });\n\n },\n\n ),\n\n )\n\n}\n", "file_path": "src/wayland/tablet_manager/mod.rs", "rank": 1, "score": 322490.1365791962 }, { "content": "/// Creates new `xdg-activation` global.\n\npub fn init_xdg_activation_global<L, Impl>(\n\n display: &mut Display,\n\n implementation: Impl,\n\n logger: L,\n\n) -> (\n\n Arc<Mutex<XdgActivationState>>,\n\n Global<xdg_activation_v1::XdgActivationV1>,\n\n)\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n Impl: FnMut(&Mutex<XdgActivationState>, XdgActivationEvent, DispatchData<'_>) + 'static,\n\n{\n\n let log = crate::slog_or_fallback(logger);\n\n\n\n let implementation = Rc::new(RefCell::new(implementation));\n\n\n\n let activation_state = Arc::new(Mutex::new(XdgActivationState {\n\n _log: log.new(slog::o!(\"smithay_module\" => \"xdg_activation_handler\")),\n\n user_data: UserDataMap::new(),\n\n pending_tokens: HashMap::new(),\n", "file_path": "src/wayland/xdg_activation/mod.rs", "rank": 2, "score": 307968.5894422319 }, { "content": "pub fn run_winit(log: Logger) {\n\n let mut event_loop = EventLoop::try_new().unwrap();\n\n let display = Rc::new(RefCell::new(Display::new()));\n\n\n\n let (backend, mut winit) = match winit::init(log.clone()) {\n\n Ok(ret) => ret,\n\n Err(err) => {\n\n slog::crit!(log, \"Failed to initialize Winit backend: {}\", err);\n\n return;\n\n }\n\n };\n\n let backend = Rc::new(RefCell::new(backend));\n\n\n\n #[cfg(feature = \"egl\")]\n\n if backend\n\n .borrow_mut()\n\n .renderer()\n\n .bind_wl_display(&display.borrow())\n\n .is_ok()\n\n {\n", "file_path": "anvil/src/winit.rs", "rank": 3, "score": 284256.7791593457 }, { "content": "pub fn run_udev(log: Logger) {\n\n let mut event_loop = EventLoop::try_new().unwrap();\n\n let display = Rc::new(RefCell::new(Display::new()));\n\n\n\n /*\n\n * Initialize session\n\n */\n\n let (session, notifier) = match AutoSession::new(log.clone()) {\n\n Some(ret) => ret,\n\n None => {\n\n crit!(log, \"Could not initialize a session\");\n\n return;\n\n }\n\n };\n\n let session_signal = notifier.signaler();\n\n\n\n /*\n\n * Initialize the compositor\n\n */\n\n #[cfg(feature = \"egl\")]\n", "file_path": "anvil/src/udev.rs", "rank": 4, "score": 284256.7791593457 }, { "content": "pub fn run_x11(log: Logger) {\n\n let mut event_loop = EventLoop::try_new().unwrap();\n\n let display = Rc::new(RefCell::new(Display::new()));\n\n\n\n let backend = X11Backend::new(log.clone()).expect(\"Failed to initilize X11 backend\");\n\n let handle = backend.handle();\n\n\n\n // Obtain the DRM node the X server uses for direct rendering.\n\n let drm_node = handle\n\n .drm_node()\n\n .expect(\"Could not get DRM node used by X server\");\n\n\n\n // Create the gbm device for buffer allocation.\n\n let device = gbm::Device::new(drm_node).expect(\"Failed to create gbm device\");\n\n // Initialize EGL using the GBM device.\n\n let egl = EGLDisplay::new(&device, log.clone()).expect(\"Failed to create EGLDisplay\");\n\n // Create the OpenGL context\n\n let context = EGLContext::new(&egl, log.clone()).expect(\"Failed to create EGLContext\");\n\n\n\n let window = WindowBuilder::new()\n", "file_path": "anvil/src/x11.rs", "rank": 5, "score": 284256.7791593457 }, { "content": "/// Create new [`wl_compositor`](wayland_server::protocol::wl_compositor)\n\n/// and [`wl_subcompositor`](wayland_server::protocol::wl_subcompositor) globals.\n\n///\n\n/// It returns the two global handles, in case you wish to remove these globals from\n\n/// the event loop in the future.\n\npub fn compositor_init<Impl, L>(\n\n display: &mut Display,\n\n implem: Impl,\n\n logger: L,\n\n) -> (\n\n Global<wl_compositor::WlCompositor>,\n\n Global<wl_subcompositor::WlSubcompositor>,\n\n)\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n Impl: for<'a> FnMut(WlSurface, DispatchData<'a>) + 'static,\n\n{\n\n let log = crate::slog_or_fallback(logger).new(slog::o!(\"smithay_module\" => \"compositor_handler\"));\n\n let implem = Rc::new(RefCell::new(implem));\n\n\n\n let compositor = display.create_global(\n\n 4,\n\n Filter::new(move |(new_compositor, _version), _, _| {\n\n self::handlers::implement_compositor::<Impl>(new_compositor, log.clone(), implem.clone());\n\n }),\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 6, "score": 279617.22511754336 }, { "content": "/// Initialize a dmabuf global with a client filter.\n\n///\n\n/// You need to provide a vector of the supported formats, as well as a closure,\n\n/// that will validate the parameters provided by the client and tests the import as a dmabuf.\n\npub fn init_dmabuf_global_with_filter<H, F, L>(\n\n display: &mut Display,\n\n formats: Vec<Format>,\n\n handler: H,\n\n filter: F,\n\n logger: L,\n\n) -> Global<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n H: for<'a> FnMut(&Dmabuf, DispatchData<'a>) -> bool + 'static,\n\n F: FnMut(Client) -> bool + 'static,\n\n{\n\n display.create_global_with_filter(DMABUF_VERSION, dmabuf_global(formats, handler, logger), filter)\n\n}\n\n\n", "file_path": "src/wayland/dmabuf/mod.rs", "rank": 7, "score": 274330.877164974 }, { "content": "/// Create a new `wl_shell` global\n\npub fn wl_shell_init<L, Impl>(\n\n display: &mut Display,\n\n implementation: Impl,\n\n logger: L,\n\n) -> (Arc<Mutex<ShellState>>, Global<wl_shell::WlShell>)\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n Impl: FnMut(ShellRequest, DispatchData<'_>) + 'static,\n\n{\n\n let _log = crate::slog_or_fallback(logger);\n\n\n\n let implementation = Rc::new(RefCell::new(implementation));\n\n\n\n let state = Arc::new(Mutex::new(ShellState {\n\n known_surfaces: Vec::new(),\n\n }));\n\n let state2 = state.clone();\n\n\n\n let global = display.create_global(\n\n 1,\n\n Filter::new(move |(shell, _version), _, _data| {\n\n self::wl_handlers::implement_shell(shell, implementation.clone(), state2.clone());\n\n }),\n\n );\n\n\n\n (state, global)\n\n}\n", "file_path": "src/wayland/shell/legacy/mod.rs", "rank": 8, "score": 271521.84801456495 }, { "content": "/// Create a new `xdg_shell` global\n\npub fn xdg_shell_init<L, Impl>(\n\n display: &mut Display,\n\n implementation: Impl,\n\n logger: L,\n\n) -> (Arc<Mutex<ShellState>>, Global<xdg_wm_base::XdgWmBase>)\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n Impl: FnMut(XdgRequest, DispatchData<'_>) + 'static,\n\n{\n\n let log = crate::slog_or_fallback(logger);\n\n let shell_state = Arc::new(Mutex::new(ShellState {\n\n known_toplevels: Vec::new(),\n\n known_popups: Vec::new(),\n\n }));\n\n\n\n let shell_data = ShellData {\n\n log: log.new(slog::o!(\"smithay_module\" => \"xdg_shell_handler\")),\n\n user_impl: Rc::new(RefCell::new(implementation)),\n\n shell_state: shell_state.clone(),\n\n };\n", "file_path": "src/wayland/shell/xdg/mod.rs", "rank": 9, "score": 271521.84801456495 }, { "content": "/// Create a new XDG Decoration Manager global\n\npub fn init_xdg_decoration_manager<L, Impl>(\n\n display: &mut Display,\n\n implementation: Impl,\n\n _logger: L,\n\n) -> Global<ZxdgDecorationManagerV1>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n Impl: FnMut(XdgDecorationRequest, DispatchData<'_>) + 'static,\n\n{\n\n let cb = Rc::new(RefCell::new(implementation));\n\n display.create_global(\n\n 1,\n\n Filter::new(\n\n move |(manager, _version): (Main<ZxdgDecorationManagerV1>, _), _, _| {\n\n let cb = cb.clone();\n\n manager.quick_assign(move |_manager, request, ddata| {\n\n match request {\n\n zxdg_decoration_manager_v1::Request::Destroy => {\n\n // All is handled by destructor.\n\n }\n", "file_path": "src/wayland/shell/xdg/decoration.rs", "rank": 10, "score": 267752.75700838753 }, { "content": "pub fn window_loc(window: &Window, space_id: &usize) -> Point<i32, Logical> {\n\n window\n\n .user_data()\n\n .get::<RefCell<HashMap<usize, WindowState>>>()\n\n .unwrap()\n\n .borrow()\n\n .get(space_id)\n\n .unwrap()\n\n .location\n\n}\n\n\n\nimpl<R, F, E, T> SpaceElement<R, F, E, T> for Window\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll,\n\n F: Frame<Error = E, TextureId = T>,\n\n E: std::error::Error,\n\n T: Texture + 'static,\n\n{\n\n fn id(&self) -> usize {\n\n self.0.id\n", "file_path": "src/desktop/space/window.rs", "rank": 11, "score": 265109.6500351428 }, { "content": "/// Create a new `wlr_layer_shell` globals\n\npub fn wlr_layer_shell_init<L, Impl>(\n\n display: &mut Display,\n\n implementation: Impl,\n\n logger: L,\n\n) -> (\n\n Arc<Mutex<LayerShellState>>,\n\n Global<zwlr_layer_shell_v1::ZwlrLayerShellV1>,\n\n)\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n Impl: FnMut(LayerShellRequest, DispatchData<'_>) + 'static,\n\n{\n\n let log = crate::slog_or_fallback(logger);\n\n let shell_state = Arc::new(Mutex::new(LayerShellState {\n\n known_layers: Vec::new(),\n\n }));\n\n\n\n let shell_data = ShellUserData {\n\n _log: log.new(slog::o!(\"smithay_module\" => \"layer_shell_handler\")),\n\n user_impl: Rc::new(RefCell::new(implementation)),\n", "file_path": "src/wayland/shell/wlr_layer/mod.rs", "rank": 12, "score": 264153.19411115476 }, { "content": "/// Retrieve a [`LayerMap`] for a given [`Output`].\n\n///\n\n/// If none existed before a new empty [`LayerMap`] is attached\n\n/// to the output and returned on subsequent calls.\n\n///\n\n/// Note: This function internally uses a [`RefCell`] per\n\n/// [`Output`] as exposed by its return type. Therefor\n\n/// trying to hold on to multiple references of a [`LayerMap`]\n\n/// of the same output using this function *will* result in a panic.\n\npub fn layer_map_for_output(o: &Output) -> RefMut<'_, LayerMap> {\n\n let userdata = o.user_data();\n\n let weak_output = Arc::downgrade(&o.inner);\n\n userdata.insert_if_missing(|| {\n\n RefCell::new(LayerMap {\n\n layers: IndexSet::new(),\n\n output: weak_output,\n\n zone: Rectangle::from_loc_and_size(\n\n (0, 0),\n\n o.current_mode()\n\n .map(|mode| mode.size.to_logical(o.current_scale()))\n\n .unwrap_or_else(|| (0, 0).into()),\n\n ),\n\n surfaces: Vec::new(),\n\n logger: (*o.inner.0.lock().unwrap())\n\n .log\n\n .new(slog::o!(\"smithay_module\" => \"layer_map\")),\n\n })\n\n });\n\n userdata.get::<RefCell<LayerMap>>().unwrap().borrow_mut()\n", "file_path": "src/desktop/layer.rs", "rank": 13, "score": 260126.3972022564 }, { "content": "/// Create a new SHM global advertizing given supported formats.\n\n///\n\n/// This global will always advertize `ARGB8888` and `XRGB8888` format\n\n/// as they are required by the protocol. Formats given as argument\n\n/// as additionally advertized.\n\n///\n\n/// The global is directly created on the provided [`Display`](wayland_server::Display),\n\n/// and this function returns the global handle, in case you wish to remove this global in\n\n/// the future.\n\npub fn init_shm_global<L>(\n\n display: &mut Display,\n\n mut formats: Vec<wl_shm::Format>,\n\n logger: L,\n\n) -> Global<wl_shm::WlShm>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n let log = crate::slog_or_fallback(logger);\n\n\n\n // always add the mandatory formats\n\n formats.push(wl_shm::Format::Argb8888);\n\n formats.push(wl_shm::Format::Xrgb8888);\n\n let data = ShmGlobalData {\n\n formats: formats.into(),\n\n log: log.new(slog::o!(\"smithay_module\" => \"shm_handler\")),\n\n };\n\n\n\n display.create_global::<wl_shm::WlShm, _>(\n\n 1,\n", "file_path": "src/wayland/shm/mod.rs", "rank": 14, "score": 258499.91785917128 }, { "content": "pub fn output_state(space: usize, o: &Output) -> RefMut<'_, OutputState> {\n\n let userdata = o.user_data();\n\n userdata.insert_if_missing(OutputUserdata::default);\n\n RefMut::map(userdata.get::<OutputUserdata>().unwrap().borrow_mut(), |m| {\n\n m.entry(space).or_default()\n\n })\n\n}\n", "file_path": "src/desktop/space/output.rs", "rank": 15, "score": 253758.17774789638 }, { "content": "/// Initialize the explicit synchronization global\n\n///\n\n/// See module-level documentation for its use.\n\npub fn init_explicit_synchronization_global<L>(\n\n display: &mut Display,\n\n logger: L,\n\n) -> Global<ZwpLinuxExplicitSynchronizationV1>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n let _log =\n\n crate::slog_or_fallback(logger).new(slog::o!(\"smithay_module\" => \"wayland_explicit_synchronization\"));\n\n\n\n display.create_global::<ZwpLinuxExplicitSynchronizationV1, _>(\n\n 2,\n\n Filter::new(\n\n move |(sync, _version): (Main<ZwpLinuxExplicitSynchronizationV1>, _), _, _| {\n\n sync.quick_assign(move |explicit_sync, req, _| {\n\n if let zwp_linux_explicit_synchronization_v1::Request::GetSynchronization {\n\n id,\n\n surface,\n\n } = req\n\n {\n", "file_path": "src/wayland/explicit_synchronization/mod.rs", "rank": 16, "score": 250385.51478246873 }, { "content": "/// Initialize a dmabuf global.\n\n///\n\n/// You need to provide a vector of the supported formats, as well as a closure,\n\n/// that will validate the parameters provided by the client and tests the import as a dmabuf.\n\npub fn init_dmabuf_global<F, L>(\n\n display: &mut Display,\n\n formats: Vec<Format>,\n\n handler: F,\n\n logger: L,\n\n) -> Global<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n F: for<'a> FnMut(&Dmabuf, DispatchData<'a>) -> bool + 'static,\n\n{\n\n display.create_global(DMABUF_VERSION, dmabuf_global(formats, handler, logger))\n\n}\n\n\n", "file_path": "src/wayland/dmabuf/mod.rs", "rank": 17, "score": 247584.21984109556 }, { "content": "#[cfg(not(feature = \"slog-stdlog\"))]\n\n#[allow(dead_code)]\n\nfn slog_or_fallback<L>(logger: L) -> ::slog::Logger\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n logger\n\n .into()\n\n .unwrap_or_else(|| ::slog::Logger::root(::slog::Discard, slog::o!()))\n\n}\n", "file_path": "src/lib.rs", "rank": 18, "score": 244268.27482336754 }, { "content": "/// Create a new [`WinitGraphicsBackend`], which implements the\n\n/// [`Renderer`](crate::backend::renderer::Renderer) trait and a corresponding\n\n/// [`WinitEventLoop`].\n\npub fn init<L>(logger: L) -> Result<(WinitGraphicsBackend, WinitEventLoop), Error>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n init_from_builder(\n\n WindowBuilder::new()\n\n .with_inner_size(LogicalSize::new(1280.0, 800.0))\n\n .with_title(\"Smithay\")\n\n .with_visible(true),\n\n logger,\n\n )\n\n}\n\n\n", "file_path": "src/backend/winit/mod.rs", "rank": 19, "score": 244044.33912810107 }, { "content": "#[cfg(feature = \"wayland_frontend\")]\n\npub fn buffer_dimensions(buffer: &wl_buffer::WlBuffer) -> Option<Size<i32, Buffer>> {\n\n use crate::backend::allocator::Buffer;\n\n\n\n if let Some(buf) = buffer.as_ref().user_data().get::<Dmabuf>() {\n\n return Some((buf.width() as i32, buf.height() as i32).into());\n\n }\n\n\n\n #[cfg(all(feature = \"backend_egl\", feature = \"use_system_lib\"))]\n\n if let Some(dim) = BUFFER_READER\n\n .lock()\n\n .unwrap()\n\n .as_ref()\n\n .and_then(|x| x.upgrade())\n\n .and_then(|x| x.egl_buffer_dimensions(buffer))\n\n {\n\n return Some(dim);\n\n }\n\n\n\n crate::wayland::shm::with_buffer_contents(buffer, |_, data| (data.width, data.height).into()).ok()\n\n}\n", "file_path": "src/backend/renderer/mod.rs", "rank": 20, "score": 224296.05411406478 }, { "content": "/// Create a new [`WinitGraphicsBackend`], which implements the\n\n/// [`Renderer`](crate::backend::renderer::Renderer) trait, from a given [`WindowBuilder`]\n\n/// struct and a corresponding [`WinitEventLoop`].\n\npub fn init_from_builder<L>(\n\n builder: WindowBuilder,\n\n logger: L,\n\n) -> Result<(WinitGraphicsBackend, WinitEventLoop), Error>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n init_from_builder_with_gl_attr(\n\n builder,\n\n GlAttributes {\n\n version: (3, 0),\n\n profile: None,\n\n debug: cfg!(debug_assertions),\n\n vsync: true,\n\n },\n\n logger,\n\n )\n\n}\n\n\n", "file_path": "src/backend/winit/mod.rs", "rank": 21, "score": 218554.1660246809 }, { "content": "/// Creates new `xdg-foreign` globals.\n\npub fn xdg_foreign_init<L>(\n\n display: &mut Display,\n\n xdg_shell_state: Arc<Mutex<ShellState>>,\n\n logger: L,\n\n) -> (\n\n Arc<Mutex<XdgForeignState>>,\n\n Global<zxdg_exporter_v2::ZxdgExporterV2>,\n\n Global<zxdg_importer_v2::ZxdgImporterV2>,\n\n)\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n let log = crate::slog_or_fallback(logger);\n\n\n\n let state = Arc::new(Mutex::new(XdgForeignState {\n\n _log: log.new(slog::o!(\"smithay_module\" => \"xdg_foreign_handler\")),\n\n exports: vec![],\n\n }));\n\n\n\n // Borrow checking does not like us cloning the state inside the filter's closure so we clone\n", "file_path": "src/wayland/xdg_foreign/mod.rs", "rank": 23, "score": 212188.3959548495 }, { "content": "/// Create a new [`WinitGraphicsBackend`], which implements the\n\n/// [`Renderer`](crate::backend::renderer::Renderer) trait, from a given [`WindowBuilder`]\n\n/// struct, as well as given [`GlAttributes`] for further customization of the rendering pipeline and a\n\n/// corresponding [`WinitEventLoop`].\n\npub fn init_from_builder_with_gl_attr<L>(\n\n builder: WindowBuilder,\n\n attributes: GlAttributes,\n\n logger: L,\n\n) -> Result<(WinitGraphicsBackend, WinitEventLoop), Error>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n let log = crate::slog_or_fallback(logger).new(o!(\"smithay_module\" => \"backend_winit\"));\n\n info!(log, \"Initializing a winit backend\");\n\n\n\n let events_loop = EventLoop::new();\n\n let winit_window = builder.build(&events_loop).map_err(Error::InitFailed)?;\n\n\n\n debug!(log, \"Window created\");\n\n\n\n let reqs = Default::default();\n\n let (display, context, surface, is_x11) = {\n\n let display = EGLDisplay::new(&winit_window, log.clone())?;\n\n let context = EGLContext::new_with_config(&display, attributes, reqs, log.clone())?;\n", "file_path": "src/backend/winit/mod.rs", "rank": 24, "score": 212185.99903879728 }, { "content": "pub fn window_rect(window: &Window, space_id: &usize) -> Rectangle<i32, Logical> {\n\n let loc = window_loc(window, space_id);\n\n let mut wgeo = window.bbox();\n\n wgeo.loc += loc;\n\n wgeo\n\n}\n\n\n", "file_path": "src/desktop/space/window.rs", "rank": 25, "score": 207314.14488183925 }, { "content": "type Impl = dyn FnMut(&Mutex<XdgActivationState>, XdgActivationEvent, DispatchData<'_>);\n\n\n\n/// New xdg activation global\n\npub(super) fn implement_activation_global(\n\n global: Main<xdg_activation_v1::XdgActivationV1>,\n\n state: Arc<Mutex<XdgActivationState>>,\n\n implementation: Rc<RefCell<Impl>>,\n\n) {\n\n global.quick_assign(move |_, req, ddata| match req {\n\n xdg_activation_v1::Request::GetActivationToken { id } => {\n\n get_activation_token(id, state.clone(), implementation.clone());\n\n }\n\n xdg_activation_v1::Request::Activate { token, surface } => {\n\n activate(\n\n token.into(),\n\n surface,\n\n state.as_ref(),\n\n implementation.as_ref(),\n\n ddata,\n\n );\n\n }\n\n _ => {}\n\n });\n\n}\n\n\n", "file_path": "src/wayland/xdg_activation/handlers.rs", "rank": 26, "score": 204908.8859019748 }, { "content": "pub fn window_rect_with_popups(window: &Window, space_id: &usize) -> Rectangle<i32, Logical> {\n\n let loc = window_loc(window, space_id);\n\n let mut wgeo = window.bbox_with_popups();\n\n wgeo.loc += loc;\n\n wgeo\n\n}\n\n\n", "file_path": "src/desktop/space/window.rs", "rank": 27, "score": 204158.39214777964 }, { "content": "pub fn fixup_positions(space: &mut Space) {\n\n // fixup outputs\n\n let mut offset = Point::<i32, Logical>::from((0, 0));\n\n for output in space.outputs().cloned().collect::<Vec<_>>().into_iter() {\n\n let size = space\n\n .output_geometry(&output)\n\n .map(|geo| geo.size)\n\n .unwrap_or_else(|| Size::from((0, 0)));\n\n let scale = space.output_scale(&output).unwrap_or(1.0);\n\n space.map_output(&output, scale, offset);\n\n layer_map_for_output(&output).arrange();\n\n offset.x += size.w;\n\n }\n\n\n\n // fixup windows\n\n let mut orphaned_windows = Vec::new();\n\n let outputs = space\n\n .outputs()\n\n .flat_map(|o| {\n\n let geo = space.output_geometry(o)?;\n", "file_path": "anvil/src/shell.rs", "rank": 28, "score": 202894.08604378867 }, { "content": "/// Initialize the data device global\n\n///\n\n/// You can provide a callback to peek into the actions of your clients over the data devices\n\n/// (allowing you to retrieve the current selection buffer, or intercept DnD data). See the\n\n/// [`DataDeviceEvent`] type for details about what notifications you can receive. Note that this\n\n/// closure will not receive notifications about dnd actions the compositor initiated, see\n\n/// [`start_dnd`] for details about that.\n\n///\n\n/// You also need to provide a `(DndAction, DndAction) -> DndAction` closure that will arbitrate\n\n/// the choice of action resulting from a drag'n'drop session. Its first argument is the set of\n\n/// available actions (which is the intersection of the actions supported by the source and targets)\n\n/// and the second argument is the preferred action reported by the target. If no action should be\n\n/// chosen (and thus the drag'n'drop should abort on drop), return\n\n/// [`DndAction::empty()`](wayland_server::protocol::wl_data_device_manager::DndAction::empty).\n\npub fn init_data_device<F, C, L>(\n\n display: &mut Display,\n\n callback: C,\n\n action_choice: F,\n\n logger: L,\n\n) -> Global<wl_data_device_manager::WlDataDeviceManager>\n\nwhere\n\n F: FnMut(DndAction, DndAction) -> DndAction + 'static,\n\n C: FnMut(DataDeviceEvent) + 'static,\n\n L: Into<Option<::slog::Logger>>,\n\n{\n\n let log = crate::slog_or_fallback(logger).new(o!(\"smithay_module\" => \"data_device_mgr\"));\n\n let action_choice = Rc::new(RefCell::new(action_choice));\n\n let callback = Rc::new(RefCell::new(callback));\n\n display.create_global(\n\n 3,\n\n Filter::new(move |(ddm, _version), _, _| {\n\n implement_ddm(ddm, callback.clone(), action_choice.clone(), log.clone());\n\n }),\n\n )\n\n}\n\n\n", "file_path": "src/wayland/data_device/mod.rs", "rank": 29, "score": 193868.27579976583 }, { "content": "/// Returns the bounding box of a given surface and all its subsurfaces.\n\n///\n\n/// - `location` can be set to offset the returned bounding box.\n\npub fn bbox_from_surface_tree<P>(surface: &wl_surface::WlSurface, location: P) -> Rectangle<i32, Logical>\n\nwhere\n\n P: Into<Point<i32, Logical>>,\n\n{\n\n let location = location.into();\n\n let mut bounding_box = Rectangle::from_loc_and_size(location, (0, 0));\n\n with_surface_tree_downward(\n\n surface,\n\n location,\n\n |_, states, loc: &Point<i32, Logical>| {\n\n let mut loc = *loc;\n\n let data = states.data_map.get::<RefCell<SurfaceState>>();\n\n\n\n if let Some(size) = data.and_then(|d| d.borrow().surface_size()) {\n\n if states.role == Some(\"subsurface\") {\n\n let current = states.cached_state.current::<SubsurfaceCachedState>();\n\n loc += current.location;\n\n }\n\n\n\n // Update the bounding box.\n", "file_path": "src/desktop/utils.rs", "rank": 30, "score": 192429.12983161365 }, { "content": "pub fn render_output<R>(\n\n output: &Output,\n\n space: &mut Space,\n\n renderer: &mut R,\n\n age: usize,\n\n elements: &[DynamicRenderElements<R>],\n\n log: &slog::Logger,\n\n) -> Result<Option<Vec<Rectangle<i32, Logical>>>, RenderError<R>>\n\nwhere\n\n R: Renderer + ImportAll + 'static,\n\n R::Frame: 'static,\n\n R::TextureId: 'static,\n\n R::Error: 'static,\n\n{\n\n if let Some(window) = output\n\n .user_data()\n\n .get::<FullscreenSurface>()\n\n .and_then(|f| f.get())\n\n {\n\n let transform = output.current_transform().into();\n", "file_path": "anvil/src/render.rs", "rank": 31, "score": 191769.55556178966 }, { "content": "fn dmabuf_global<F, L>(\n\n formats: Vec<Format>,\n\n handler: F,\n\n logger: L,\n\n) -> Filter<(Main<zwp_linux_dmabuf_v1::ZwpLinuxDmabufV1>, u32)>\n\nwhere\n\n L: Into<Option<::slog::Logger>>,\n\n F: for<'a> FnMut(&Dmabuf, DispatchData<'a>) -> bool + 'static,\n\n{\n\n let log = crate::slog_or_fallback(logger).new(o!(\"smithay_module\" => \"dmabuf_handler\"));\n\n\n\n let formats = Rc::<[Format]>::from(formats);\n\n let handler = Rc::new(RefCell::new(handler));\n\n\n\n trace!(\n\n log,\n\n \"Initializing DMABUF handler with {} supported formats\",\n\n formats.len()\n\n );\n\n\n", "file_path": "src/wayland/dmabuf/mod.rs", "rank": 32, "score": 190382.46484964498 }, { "content": "// Launch an XWayland server\n\n//\n\n// Does nothing if there is already a launched instance\n\nfn launch<Data: Any>(inner: &Rc<RefCell<Inner<Data>>>) -> std::io::Result<()> {\n\n let mut guard = inner.borrow_mut();\n\n if guard.instance.is_some() {\n\n return Ok(());\n\n }\n\n\n\n info!(guard.log, \"Starting XWayland\");\n\n\n\n let (x_wm_x11, x_wm_me) = UnixStream::pair()?;\n\n let (wl_x11, wl_me) = UnixStream::pair()?;\n\n\n\n let (lock, x_fds) = prepare_x11_sockets(guard.log.clone())?;\n\n\n\n // we have now created all the required sockets\n\n\n\n // Setup the associated wayland client to be created in an idle callback, so that we don't need\n\n // to access the dispatch_data *right now*\n\n let idle_inner = inner.clone();\n\n guard.handle.insert_idle(move |data| {\n\n let mut guard = idle_inner.borrow_mut();\n", "file_path": "src/xwayland/xserver.rs", "rank": 33, "score": 190228.02862265805 }, { "content": "pub fn layer_state(space: usize, l: &LayerSurface) -> RefMut<'_, LayerState> {\n\n let userdata = l.user_data();\n\n userdata.insert_if_missing(LayerUserdata::default);\n\n RefMut::map(userdata.get::<LayerUserdata>().unwrap().borrow_mut(), |m| {\n\n m.entry(space).or_default()\n\n })\n\n}\n\n\n\nimpl<R, F, E, T> SpaceElement<R, F, E, T> for LayerSurface\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll,\n\n F: Frame<Error = E, TextureId = T>,\n\n E: std::error::Error,\n\n T: Texture + 'static,\n\n{\n\n fn id(&self) -> usize {\n\n self.0.id\n\n }\n\n\n\n fn type_of(&self) -> TypeId {\n", "file_path": "src/desktop/space/layer.rs", "rank": 34, "score": 189625.73838349228 }, { "content": "/// Loads libEGL symbols, if not loaded already.\n\n/// This normally happens automatically during [`EGLDisplay`](super::EGLDisplay) initialization.\n\npub fn make_sure_egl_is_loaded() -> Result<Vec<String>, Error> {\n\n use std::{\n\n ffi::{CStr, CString},\n\n ptr,\n\n };\n\n\n\n fn constrain<F>(f: F) -> F\n\n where\n\n F: for<'a> Fn(&'a str) -> *const ::std::os::raw::c_void,\n\n {\n\n f\n\n }\n\n let proc_address = constrain(|sym| unsafe { super::get_proc_address(sym) });\n\n\n\n egl::LOAD.call_once(|| unsafe {\n\n egl::load_with(|sym| {\n\n let name = CString::new(sym).unwrap();\n\n let symbol = egl::LIB.get::<*mut c_void>(name.as_bytes());\n\n match symbol {\n\n Ok(x) => *x as *const _,\n", "file_path": "src/backend/egl/ffi.rs", "rank": 35, "score": 186410.3463489596 }, { "content": "type InnerType = Arc<(Mutex<Inner>, UserDataMap)>;\n\n\n\nimpl Inner {\n\n fn new_global(&mut self, output: WlOutput) {\n\n trace!(self.log, \"New global instantiated.\");\n\n\n\n if self.modes.is_empty() {\n\n warn!(self.log, \"Output is used with no modes set\"; \"name\" => &self.name);\n\n }\n\n if self.current_mode.is_none() {\n\n warn!(self.log, \"Output is used with no current mod set\"; \"name\" => &self.name);\n\n }\n\n if self.preferred_mode.is_none() {\n\n warn!(self.log, \"Output is used with not preferred mode set\"; \"name\" => &self.name);\n\n }\n\n\n\n self.send_geometry(&output);\n\n for &mode in &self.modes {\n\n let mut flags = WMode::empty();\n\n if Some(mode) == self.current_mode {\n", "file_path": "src/wayland/output/mod.rs", "rank": 36, "score": 185097.96476588224 }, { "content": "pub fn layer_state(layer: &LayerSurface) -> RefMut<'_, LayerState> {\n\n let userdata = layer.user_data();\n\n userdata.insert_if_missing(LayerUserdata::default);\n\n RefMut::map(userdata.get::<LayerUserdata>().unwrap().borrow_mut(), |opt| {\n\n if opt.is_none() {\n\n *opt = Some(LayerState::default());\n\n }\n\n opt.as_mut().unwrap()\n\n })\n\n}\n\n\n\n/// A [`LayerSurface`] represents a single layer surface as given by the wlr-layer-shell protocol.\n\n#[derive(Debug, Clone)]\n\npub struct LayerSurface(pub(crate) Rc<LayerSurfaceInner>);\n\n\n\nimpl PartialEq for LayerSurface {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0.id == other.0.id\n\n }\n\n}\n", "file_path": "src/desktop/layer.rs", "rank": 37, "score": 181949.06340198938 }, { "content": "#[cfg(feature = \"image\")]\n\npub fn import_bitmap<C: std::ops::Deref<Target = [u8]>>(\n\n renderer: &mut Gles2Renderer,\n\n image: &ImageBuffer<Rgba<u8>, C>,\n\n) -> Result<Gles2Texture, Gles2Error> {\n\n use smithay::backend::renderer::gles2::ffi;\n\n\n\n renderer.with_context(|renderer, gl| unsafe {\n\n let mut tex = 0;\n\n gl.GenTextures(1, &mut tex);\n\n gl.BindTexture(ffi::TEXTURE_2D, tex);\n\n gl.TexParameteri(ffi::TEXTURE_2D, ffi::TEXTURE_WRAP_S, ffi::CLAMP_TO_EDGE as i32);\n\n gl.TexParameteri(ffi::TEXTURE_2D, ffi::TEXTURE_WRAP_T, ffi::CLAMP_TO_EDGE as i32);\n\n gl.TexImage2D(\n\n ffi::TEXTURE_2D,\n\n 0,\n\n ffi::RGBA as i32,\n\n image.width() as i32,\n\n image.height() as i32,\n\n 0,\n\n ffi::RGBA,\n", "file_path": "anvil/src/drawing.rs", "rank": 38, "score": 180854.18552739266 }, { "content": "fn frame(mut millis: u32, size: u32, images: &[Image]) -> Image {\n\n let total = nearest_images(size, images).fold(0, |acc, image| acc + image.delay);\n\n millis %= total;\n\n\n\n for img in nearest_images(size, images) {\n\n if millis < img.delay {\n\n return img.clone();\n\n }\n\n millis -= img.delay;\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "anvil/src/cursor.rs", "rank": 39, "score": 178929.7085326453 }, { "content": "fn main() {\n\n // A logger facility, here we use the terminal here\n\n let log = if std::env::var(\"ANVIL_MUTEX_LOG\").is_ok() {\n\n slog::Logger::root(std::sync::Mutex::new(slog_term::term_full().fuse()).fuse(), o!())\n\n } else {\n\n slog::Logger::root(\n\n slog_async::Async::default(slog_term::term_full().fuse()).fuse(),\n\n o!(),\n\n )\n\n };\n\n\n\n let _guard = slog_scope::set_global_logger(log.clone());\n\n slog_stdlog::init().expect(\"Could not setup log backend\");\n\n\n\n let arg = ::std::env::args().nth(1);\n\n match arg.as_ref().map(|s| &s[..]) {\n\n #[cfg(feature = \"winit\")]\n\n Some(\"--winit\") => {\n\n slog::info!(log, \"Starting anvil with winit backend\");\n\n anvil::winit::run_winit(log);\n", "file_path": "anvil/src/main.rs", "rank": 40, "score": 178891.4232956866 }, { "content": "fn nearest_images(size: u32, images: &[Image]) -> impl Iterator<Item = &Image> {\n\n // Follow the nominal size of the cursor to choose the nearest\n\n let nearest_image = images\n\n .iter()\n\n .min_by_key(|image| (size as i32 - image.size as i32).abs())\n\n .unwrap();\n\n\n\n images\n\n .iter()\n\n .filter(move |image| image.width == nearest_image.width && image.height == nearest_image.height)\n\n}\n\n\n", "file_path": "anvil/src/cursor.rs", "rank": 41, "score": 176198.76114178152 }, { "content": "/// Set a compositor-provided selection for this seat\n\n///\n\n/// You need to provide the available mime types for this selection.\n\n///\n\n/// Whenever a client requests to read the selection, your callback will\n\n/// receive a [`DataDeviceEvent::SendSelection`] event.\n\npub fn set_data_device_selection(seat: &Seat, mime_types: Vec<String>) {\n\n // TODO: same question as in set_data_device_focus\n\n seat.user_data().insert_if_missing(|| {\n\n RefCell::new(SeatData::new(\n\n seat.arc.log.new(o!(\"smithay_module\" => \"data_device_mgr\")),\n\n ))\n\n });\n\n let seat_data = seat.user_data().get::<RefCell<SeatData>>().unwrap();\n\n seat_data\n\n .borrow_mut()\n\n .set_selection(Selection::Compositor(SourceMetadata {\n\n mime_types,\n\n dnd_action: DndAction::empty(),\n\n }));\n\n}\n\n\n", "file_path": "src/wayland/data_device/mod.rs", "rank": 42, "score": 174621.77673258202 }, { "content": "fn main() {\n\n #[cfg(any(feature = \"backend_egl\", feature = \"renderer_gl\"))]\n\n gl_generate();\n\n\n\n #[cfg(feature = \"backend_session_logind\")]\n\n find_logind();\n\n}\n", "file_path": "build.rs", "rank": 43, "score": 173908.31807229063 }, { "content": "pub fn window_state(space: usize, w: &Window) -> RefMut<'_, WindowState> {\n\n let userdata = w.user_data();\n\n userdata.insert_if_missing(WindowUserdata::default);\n\n RefMut::map(userdata.get::<WindowUserdata>().unwrap().borrow_mut(), |m| {\n\n m.entry(space).or_default()\n\n })\n\n}\n\n\n", "file_path": "src/desktop/space/window.rs", "rank": 44, "score": 172104.91466322856 }, { "content": "pub fn run(channel: Channel<WlcsEvent>) {\n\n let mut event_loop =\n\n EventLoop::<AnvilState<TestState>>::try_new().expect(\"Failed to init the event loop.\");\n\n\n\n let display = Rc::new(RefCell::new(Display::new()));\n\n\n\n let logger = slog::Logger::root(slog::Discard, slog::o!());\n\n\n\n let test_state = TestState {\n\n clients: HashMap::new(),\n\n };\n\n\n\n let mut state = AnvilState::init(\n\n display.clone(),\n\n event_loop.handle(),\n\n test_state,\n\n logger.clone(),\n\n false,\n\n );\n\n\n", "file_path": "wlcs_anvil/src/main_loop.rs", "rank": 45, "score": 170899.61304658075 }, { "content": "fn main() {\n\n if var(\"CARGO_FEATURE_LOGIND\").ok().is_none() && var(\"CARGO_FEATURE_LIBSEAT\").ok().is_none() {\n\n println!(\"cargo:warning=You are compiling anvil without logind/libseat support.\");\n\n println!(\"cargo:warning=This means that you'll likely need to run it as root if you want to launch it from a tty.\");\n\n println!(\"cargo:warning=To enable logind support add `--feature logind` to your cargo invocation.\");\n\n println!(\"cargo:warning=$ cd anvil; cargo run --feature logind\");\n\n println!(\"cargo:warning=To enable libseat support add `--feature libseat` to your cargo invocation.\");\n\n println!(\"cargo:warning=$ cd anvil; cargo run --feature libseat\");\n\n }\n\n}\n", "file_path": "anvil/build.rs", "rank": 46, "score": 170896.0219417856 }, { "content": "fn get_dmabuf_formats(\n\n display: &ffi::egl::types::EGLDisplay,\n\n extensions: &[String],\n\n log: &::slog::Logger,\n\n) -> Result<(HashSet<DrmFormat>, HashSet<DrmFormat>), EGLError> {\n\n use std::convert::TryFrom;\n\n\n\n if !extensions.iter().any(|s| s == \"EGL_EXT_image_dma_buf_import\") {\n\n warn!(log, \"Dmabuf import extension not available\");\n\n return Ok((HashSet::new(), HashSet::new()));\n\n }\n\n\n\n let formats = {\n\n // when we only have the image_dmabuf_import extension we can't query\n\n // which formats are supported. These two are on almost always\n\n // supported; it's the intended way to just try to create buffers.\n\n // Just a guess but better than not supporting dmabufs at all,\n\n // given that the modifiers extension isn't supported everywhere.\n\n if !extensions\n\n .iter()\n", "file_path": "src/backend/egl/display.rs", "rank": 47, "score": 169282.00016493563 }, { "content": "fn main() {\n\n let log = slog::Logger::root(Mutex::new(slog_term::term_full().fuse()).fuse(), o!());\n\n\n\n /*\n\n * Initialize the drm backend\n\n */\n\n\n\n // \"Find\" a suitable drm device\n\n let mut options = OpenOptions::new();\n\n options.read(true);\n\n options.write(true);\n\n let fd = FdWrapper {\n\n file: Rc::new(options.open(\"/dev/dri/card0\").unwrap()),\n\n };\n\n\n\n let device = DrmDevice::new(fd.clone(), true, log.clone()).unwrap();\n\n\n\n // Get a set of all modesetting resource handles (excluding planes):\n\n let res_handles = ControlDevice::resource_handles(&device).unwrap();\n\n\n", "file_path": "examples/raw_drm.rs", "rank": 48, "score": 168053.443202481 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n pointer: Option<PointerHandle>,\n\n keyboard: Option<KeyboardHandle>,\n\n touch: Option<TouchHandle>,\n\n known_seats: Vec<wl_seat::WlSeat>,\n\n}\n\n\n\npub(crate) struct SeatRc {\n\n inner: RefCell<Inner>,\n\n user_data: UserDataMap,\n\n pub(crate) log: ::slog::Logger,\n\n name: String,\n\n}\n\n\n\n// UserDataMap does not implement debug, so we have to impl Debug manually\n\nimpl fmt::Debug for SeatRc {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"SeatRc\")\n\n .field(\"inner\", &self.inner)\n\n .field(\"user_data\", &\"...\")\n", "file_path": "src/wayland/seat/mod.rs", "rank": 49, "score": 167788.47916986962 }, { "content": "/// This thread reads X11 events from the connection and sends them on the channel.\n\n///\n\n/// This is run in an extra thread since sending an X11 request or waiting for the reply to an X11\n\n/// request can both read X11 events from the underlying socket which are then saved in the\n\n/// RustConnection. Thus, readability of the underlying socket is not enough to guarantee we do not\n\n/// miss wakeups.\n\n///\n\n/// This thread will call wait_for_event(). RustConnection then ensures internally to wake us up\n\n/// when an event arrives. So far, this seems to be the only safe way to integrate x11rb with\n\n/// calloop.\n\nfn run_event_thread(connection: Arc<RustConnection>, sender: SyncSender<Event>, log: slog::Logger) {\n\n loop {\n\n let event = match connection.wait_for_event() {\n\n Ok(event) => event,\n\n Err(err) => {\n\n // Connection errors are most likely permanent. Thus, exit the thread.\n\n slog::crit!(log, \"Event thread exiting due to connection error {}\", err);\n\n break;\n\n }\n\n };\n\n match sender.send(event) {\n\n Ok(()) => {}\n\n Err(_) => {\n\n // The only possible error is that the other end of the channel was dropped.\n\n // This happens in X11Source's Drop impl.\n\n break;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/utils/x11rb.rs", "rank": 50, "score": 163420.74665638298 }, { "content": "#[derive(Debug)]\n\nstruct Inner<Data> {\n\n sender: SyncSender<XWaylandEvent>,\n\n handle: LoopHandle<'static, Data>,\n\n wayland_display: Rc<RefCell<Display>>,\n\n instance: Option<XWaylandInstance>,\n\n log: ::slog::Logger,\n\n}\n\n\n", "file_path": "src/xwayland/xserver.rs", "rank": 51, "score": 161110.19189165905 }, { "content": "/// Returns the loaded driver for a device named by it's [`dev_t`](::nix::sys::stat::dev_t).\n\npub fn driver(dev: dev_t) -> io::Result<Option<OsString>> {\n\n let mut enumerator = Enumerator::new()?;\n\n enumerator.match_subsystem(\"drm\")?;\n\n enumerator.match_sysname(\"card[0-9]*\")?;\n\n Ok(enumerator\n\n .scan_devices()?\n\n .filter(|device| device.devnum() == Some(dev))\n\n .flat_map(|dev| {\n\n let mut device = Some(dev);\n\n while let Some(dev) = device {\n\n if dev.driver().is_some() {\n\n return dev.driver().map(std::ffi::OsStr::to_os_string);\n\n }\n\n device = dev.parent();\n\n }\n\n None\n\n })\n\n .next())\n\n}\n", "file_path": "src/backend/udev.rs", "rank": 52, "score": 159535.05817578186 }, { "content": "fn place_new_window(space: &mut Space, window: &Window, activate: bool) {\n\n // place the window at a random location on the primary output\n\n // or if there is not output in a [0;800]x[0;800] square\n\n use rand::distributions::{Distribution, Uniform};\n\n\n\n let output = space.outputs().next().cloned();\n\n let output_geometry = output\n\n .and_then(|o| {\n\n let geo = space.output_geometry(&o)?;\n\n let map = layer_map_for_output(&o);\n\n let zone = map.non_exclusive_zone();\n\n Some(Rectangle::from_loc_and_size(geo.loc + zone.loc, zone.size))\n\n })\n\n .unwrap_or_else(|| Rectangle::from_loc_and_size((0, 0), (800, 800)));\n\n\n\n let max_x = output_geometry.loc.x + (((output_geometry.size.w as f32) / 3.0) * 2.0) as i32;\n\n let max_y = output_geometry.loc.y + (((output_geometry.size.h as f32) / 3.0) * 2.0) as i32;\n\n let x_range = Uniform::new(output_geometry.loc.x, max_x);\n\n let y_range = Uniform::new(output_geometry.loc.y, max_y);\n\n let mut rng = rand::thread_rng();\n\n let x = x_range.sample(&mut rng);\n\n let y = y_range.sample(&mut rng);\n\n\n\n space.map_window(window, (x, y), activate);\n\n}\n\n\n", "file_path": "anvil/src/shell.rs", "rank": 53, "score": 158389.82673473767 }, { "content": "/// Open the two unix sockets an X server listens on\n\n///\n\n/// Should only be done after the associated lockfile is acquired!\n\nfn open_x11_sockets_for_display(display: u32) -> nix::Result<[UnixStream; 2]> {\n\n let path = format!(\"/tmp/.X11-unix/X{}\", display);\n\n let _ = ::std::fs::remove_file(&path);\n\n // We know this path is not to long, these unwrap cannot fail\n\n let fs_addr = socket::UnixAddr::new(path.as_bytes()).unwrap();\n\n let abs_addr = socket::UnixAddr::new_abstract(path.as_bytes()).unwrap();\n\n let fs_socket = open_socket(fs_addr)?;\n\n let abstract_socket = open_socket(abs_addr)?;\n\n Ok([fs_socket, abstract_socket])\n\n}\n\n\n", "file_path": "src/xwayland/x11_sockets.rs", "rank": 54, "score": 155431.09821059747 }, { "content": "pub fn run_raw(\n\n display: Rc<RefCell<Display>>,\n\n event_loop: &mut EventLoop<AnvilState>,\n\n path: impl AsRef<str>,\n\n log: Logger,\n\n) -> Result<(), ()> {\n\n let name = display\n\n .borrow_mut()\n\n .add_socket_auto()\n\n .unwrap()\n\n .into_string()\n\n .unwrap();\n\n info!(log, \"Listening on wayland socket\"; \"name\" => name.clone());\n\n ::std::env::set_var(\"WAYLAND_DISPLAY\", name);\n\n\n\n #[cfg(feature = \"egl\")]\n\n let egl_buffer_reader = Rc::new(RefCell::new(None));\n\n\n\n let output_map = Rc::new(RefCell::new(Vec::new()));\n\n\n", "file_path": "anvil/src/raw.rs", "rank": 55, "score": 154022.05622944926 }, { "content": "pub fn scan_connectors(\n\n device: &mut DrmDevice<FileWrapper>,\n\n gbm: &GbmDevice<FileWrapper>,\n\n egl: &EGLDisplay,\n\n context: &EGLContext,\n\n display: &mut Display,\n\n output_map: &mut Vec<MyOutput>,\n\n logger: &::slog::Logger,\n\n) -> HashMap<crtc::Handle, Rc<RefCell<RenderSurface>>> {\n\n // Get a set of all modesetting resource handles (excluding planes):\n\n let res_handles = device.resource_handles().unwrap();\n\n\n\n // Use first connected connector\n\n let connector_infos: Vec<ConnectorInfo> = res_handles\n\n .connectors()\n\n .iter()\n\n .map(|conn| device.get_connector(*conn).unwrap())\n\n .filter(|conn| conn.state() == ConnectorState::Connected)\n\n .inspect(|conn| info!(logger, \"Connected: {:?}\", conn.interface()))\n\n .collect();\n", "file_path": "anvil/src/raw.rs", "rank": 56, "score": 154022.05622944926 }, { "content": "fn implement_surface<Impl>(\n\n surface: Main<wl_surface::WlSurface>,\n\n log: ::slog::Logger,\n\n implem: Rc<RefCell<Impl>>,\n\n) -> wl_surface::WlSurface\n\nwhere\n\n Impl: for<'a> FnMut(wl_surface::WlSurface, DispatchData<'a>) + 'static,\n\n{\n\n surface.quick_assign({\n\n let mut implem = SurfaceImplem::make(log, implem);\n\n move |surface, req, ddata| implem.receive_surface_request(req, surface.deref().clone(), ddata)\n\n });\n\n surface.assign_destructor(Filter::new(|surface, _, _| PrivateSurfaceData::cleanup(&surface)));\n\n surface\n\n .as_ref()\n\n .user_data()\n\n .set_threadsafe(PrivateSurfaceData::new);\n\n PrivateSurfaceData::init(&surface);\n\n surface.deref().clone()\n\n}\n\n\n\n/*\n\n * wl_region\n\n */\n\n\n", "file_path": "src/wayland/compositor/handlers.rs", "rank": 57, "score": 153880.15806413052 }, { "content": "fn handle_event(event: WlcsEvent, state: &mut AnvilState<TestState>) {\n\n match event {\n\n WlcsEvent::Exit => state.running.store(false, Ordering::SeqCst),\n\n WlcsEvent::NewClient { stream, client_id } => {\n\n let display = state.display.clone();\n\n let client = unsafe { display.borrow_mut().create_client(stream.into_raw_fd(), state) };\n\n state.backend_data.clients.insert(client_id, client);\n\n }\n\n WlcsEvent::PositionWindow {\n\n client_id,\n\n surface_id,\n\n location,\n\n } => {\n\n // find the surface\n\n let client = state.backend_data.clients.get(&client_id);\n\n let mut space = state.space.borrow_mut();\n\n let toplevel = space.windows().find(|w| {\n\n let surface = w.toplevel().get_surface().unwrap();\n\n surface.as_ref().client().as_ref() == client && surface.as_ref().id() == surface_id\n\n });\n", "file_path": "wlcs_anvil/src/main_loop.rs", "rank": 58, "score": 153501.67384320378 }, { "content": "fn implement_shell_surface<Impl>(\n\n shell_surface: Main<wl_shell_surface::WlShellSurface>,\n\n surface: wl_surface::WlSurface,\n\n implementation: Rc<RefCell<Impl>>,\n\n state: Arc<Mutex<ShellState>>,\n\n) -> wl_shell_surface::WlShellSurface\n\nwhere\n\n Impl: FnMut(ShellRequest, DispatchData<'_>) + 'static,\n\n{\n\n use self::wl_shell_surface::Request;\n\n shell_surface.quick_assign(move |shell_surface, req, dispatch_data| {\n\n let data = shell_surface\n\n .as_ref()\n\n .user_data()\n\n .get::<ShellSurfaceUserData>()\n\n .unwrap();\n\n let mut user_impl = implementation.borrow_mut();\n\n match req {\n\n Request::Pong { serial } => {\n\n let serial = Serial::from(serial);\n", "file_path": "src/wayland/shell/legacy/wl_handlers.rs", "rank": 59, "score": 147341.8145009533 }, { "content": "/// Returns the (sub-)surface under a given position given a surface, if any.\n\n///\n\n/// - `point` has to be the position to query, relative to (0, 0) of the given surface + `location`.\n\n/// - `location` can be used to offset the returned point.\n\npub fn under_from_surface_tree<P>(\n\n surface: &wl_surface::WlSurface,\n\n point: Point<f64, Logical>,\n\n location: P,\n\n surface_type: WindowSurfaceType,\n\n) -> Option<(wl_surface::WlSurface, Point<i32, Logical>)>\n\nwhere\n\n P: Into<Point<i32, Logical>>,\n\n{\n\n let found = RefCell::new(None);\n\n with_surface_tree_downward(\n\n surface,\n\n location.into(),\n\n |wl_surface, states, location: &Point<i32, Logical>| {\n\n let mut location = *location;\n\n let data = states.data_map.get::<RefCell<SurfaceState>>();\n\n\n\n if states.role == Some(\"subsurface\") {\n\n let current = states.cached_state.current::<SubsurfaceCachedState>();\n\n location += current.location;\n", "file_path": "src/desktop/utils.rs", "rank": 60, "score": 146418.95702858025 }, { "content": "/// Returns the damage rectangles of the current buffer for a given surface and its subsurfaces.\n\n///\n\n/// - `location` can be set to offset the returned bounding box.\n\n/// - if a `key` is set the damage is only returned on the first call with the given key values.\n\n/// Subsequent calls will return an empty vector until the buffer is updated again and new\n\n/// damage values may be retrieved.\n\npub fn damage_from_surface_tree<P>(\n\n surface: &wl_surface::WlSurface,\n\n location: P,\n\n key: Option<(&Space, &Output)>,\n\n) -> Vec<Rectangle<i32, Logical>>\n\nwhere\n\n P: Into<Point<i32, Logical>>,\n\n{\n\n use super::space::SpaceOutputTuple;\n\n\n\n let mut damage = Vec::new();\n\n let key = key.map(|x| SpaceOutputTuple::from(x).owned_hash());\n\n with_surface_tree_upward(\n\n surface,\n\n location.into(),\n\n |_surface, states, location| {\n\n let mut location = *location;\n\n if let Some(data) = states.data_map.get::<RefCell<SurfaceState>>() {\n\n let data = data.borrow();\n\n if key\n", "file_path": "src/desktop/utils.rs", "rank": 61, "score": 144130.29800018208 }, { "content": "/// Start a drag'n'drop from a resource controlled by the compositor\n\n///\n\n/// You'll receive events generated by the interaction of clients with your\n\n/// drag'n'drop in the provided callback. See [`ServerDndEvent`] for details about\n\n/// which events can be generated and what response is expected from you to them.\n\npub fn start_dnd<C>(\n\n seat: &Seat,\n\n serial: Serial,\n\n start_data: PointerGrabStartData,\n\n metadata: SourceMetadata,\n\n callback: C,\n\n) where\n\n C: FnMut(ServerDndEvent) + 'static,\n\n{\n\n // TODO: same question as in set_data_device_focus\n\n seat.user_data().insert_if_missing(|| {\n\n RefCell::new(SeatData::new(\n\n seat.arc.log.new(o!(\"smithay_module\" => \"data_device_mgr\")),\n\n ))\n\n });\n\n if let Some(pointer) = seat.get_pointer() {\n\n pointer.set_grab(\n\n server_dnd_grab::ServerDnDGrab::new(\n\n start_data,\n\n metadata,\n\n seat.clone(),\n\n Rc::new(RefCell::new(callback)),\n\n ),\n\n serial,\n\n 0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/wayland/data_device/mod.rs", "rank": 62, "score": 141956.53896009212 }, { "content": "fn xwayland_ready<Data: 'static>(inner: &Rc<RefCell<Inner<Data>>>) {\n\n // Lots of re-borrowing to please the borrow-checker\n\n let mut guard = inner.borrow_mut();\n\n let guard = &mut *guard;\n\n // instance should never be None at this point\n\n let instance = guard.instance.as_mut().unwrap();\n\n // neither the child_stdout\n\n let child_stdout = instance.child_stdout.as_mut().unwrap();\n\n\n\n // This reads the one byte that is written when sh receives SIGUSR1\n\n let mut buffer = [0];\n\n let success = match child_stdout.read(&mut buffer) {\n\n Ok(len) => len > 0 && buffer[0] == b'S',\n\n Err(e) => {\n\n error!(guard.log, \"Checking launch status failed\"; \"err\" => format!(\"{:?}\", e));\n\n false\n\n }\n\n };\n\n\n\n if success {\n", "file_path": "src/xwayland/xserver.rs", "rank": 63, "score": 141896.90725206205 }, { "content": "#[cfg(feature = \"debug\")]\n\npub fn draw_fps<R, F, E, T>(texture: &T, value: u32) -> impl RenderElement<R, F, E, T>\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll + 'static,\n\n F: Frame<Error = E, TextureId = T> + 'static,\n\n E: std::error::Error + Into<SwapBuffersError> + 'static,\n\n T: Texture + Clone + 'static,\n\n{\n\n FpsElement {\n\n value,\n\n texture: texture.clone(),\n\n }\n\n}\n\n\n", "file_path": "anvil/src/drawing.rs", "rank": 64, "score": 140582.2534482792 }, { "content": "pub fn set_connector_state<D>(\n\n dev: &D,\n\n connectors: impl Iterator<Item = connector::Handle>,\n\n enabled: bool,\n\n) -> Result<(), Error>\n\nwhere\n\n D: ControlDevice,\n\n{\n\n // for every connector...\n\n for conn in connectors {\n\n let info = dev.get_connector(conn).map_err(|source| Error::Access {\n\n errmsg: \"Failed to get connector infos\",\n\n dev: dev.dev_path(),\n\n source,\n\n })?;\n\n // that is currently connected ...\n\n if info.state() == connector::State::Connected {\n\n // get a list of it's properties.\n\n let props = dev.get_properties(conn).map_err(|source| Error::Access {\n\n errmsg: \"Failed to get properties for connector\",\n", "file_path": "src/backend/drm/device/legacy.rs", "rank": 65, "score": 139891.70249712295 }, { "content": "/// Register a commit hook to be invoked on surface commit\n\n///\n\n/// For its precise semantics, see module-level documentation.\n\npub fn add_commit_hook(surface: &WlSurface, hook: fn(&WlSurface)) {\n\n if !surface.as_ref().is_alive() {\n\n return;\n\n }\n\n PrivateSurfaceData::add_commit_hook(surface, hook)\n\n}\n\n\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 66, "score": 138813.56452149161 }, { "content": "/// Allows errors to be described by an error number\n\npub trait AsErrno: ::std::fmt::Debug {\n\n /// Returns the error number representing this error if any\n\n fn as_errno(&self) -> Option<i32>;\n\n}\n\n\n\nimpl AsErrno for () {\n\n fn as_errno(&self) -> Option<i32> {\n\n None\n\n }\n\n}\n\n\n\npub mod auto;\n\npub mod direct;\n\n#[cfg(feature = \"backend_session_libseat\")]\n\npub mod libseat;\n\n\n\n#[cfg(feature = \"backend_session_logind\")]\n\nmod dbus;\n\n#[cfg(feature = \"backend_session_logind\")]\n\npub use self::dbus::*;\n", "file_path": "src/backend/session/mod.rs", "rank": 67, "score": 136404.36681193023 }, { "content": "// Called when a WlSurface commits.\n\npub fn commit_hook(surface: &WlSurface) {\n\n // Is this the Xwayland client?\n\n if let Some(client) = surface.as_ref().client() {\n\n if let Some(x11) = client.data_map().get::<Rc<RefCell<X11State>>>() {\n\n let mut inner = x11.borrow_mut();\n\n // Is the surface among the unpaired surfaces (see comment next to WL_SURFACE_ID\n\n // handling above)\n\n if let Some((window, location)) = inner.unpaired_surfaces.remove(&surface.as_ref().id()) {\n\n inner.new_window(window, surface.clone(), location);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "anvil/src/xwayland/mod.rs", "rank": 68, "score": 135495.57830509212 }, { "content": "/// Handler to let smithay take over buffer management.\n\n///\n\n/// Needs to be called first on the commit-callback of\n\n/// [`crate::wayland::compositor::compositor_init`].\n\n///\n\n/// Consumes the buffer of [`SurfaceAttributes`], the buffer will\n\n/// not be accessible anymore, but [`draw_surface_tree`] and other\n\n/// `draw_*` helpers of the [desktop module](`crate::desktop`) will\n\n/// become usable for surfaces handled this way.\n\npub fn on_commit_buffer_handler(surface: &WlSurface) {\n\n if !is_sync_subsurface(surface) {\n\n with_surface_tree_upward(\n\n surface,\n\n (),\n\n |_, _, _| TraversalAction::DoChildren(()),\n\n |_surf, states, _| {\n\n states\n\n .data_map\n\n .insert_if_missing(|| RefCell::new(SurfaceState::default()));\n\n let mut data = states\n\n .data_map\n\n .get::<RefCell<SurfaceState>>()\n\n .unwrap()\n\n .borrow_mut();\n\n data.update_buffer(&mut *states.cached_state.current::<SurfaceAttributes>());\n\n },\n\n |_, _, _| true,\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/backend/renderer/utils.rs", "rank": 69, "score": 133532.39347334346 }, { "content": "#[derive(Debug, Default, Clone)]\n\nstruct PopupTree(Arc<Mutex<Vec<PopupNode>>>);\n\n\n", "file_path": "src/desktop/popup/manager.rs", "rank": 70, "score": 133471.5638657244 }, { "content": "struct ParamsHandler<H: for<'a> FnMut(&Dmabuf, DispatchData<'a>) -> bool + 'static> {\n\n pending_planes: Vec<Plane>,\n\n max_planes: u32,\n\n used: bool,\n\n formats: Rc<[Format]>,\n\n handler: Rc<RefCell<H>>,\n\n log: ::slog::Logger,\n\n}\n\n\n\nimpl<H> ParamsHandler<H>\n\nwhere\n\n H: for<'a> FnMut(&Dmabuf, DispatchData<'a>) -> bool + 'static,\n\n{\n\n fn add(\n\n &mut self,\n\n params: &BufferParams,\n\n fd: RawFd,\n\n plane_idx: u32,\n\n offset: u32,\n\n stride: u32,\n", "file_path": "src/wayland/dmabuf/mod.rs", "rank": 71, "score": 133406.53783550733 }, { "content": "fn initial_render(surface: &mut RenderSurface, renderer: &mut Gles2Renderer) -> Result<(), SwapBuffersError> {\n\n let (dmabuf, _age) = surface.next_buffer()?;\n\n renderer.bind(dmabuf)?;\n\n // Does not matter if we render an empty frame\n\n renderer\n\n .render((1, 1).into(), Transform::Normal, |_, frame| {\n\n frame\n\n .clear(CLEAR_COLOR, &[Rectangle::from_loc_and_size((0, 0), (1, 1))])\n\n .map_err(Into::<SwapBuffersError>::into)\n\n })\n\n .map_err(Into::<SwapBuffersError>::into)\n\n .and_then(|x| x.map_err(Into::<SwapBuffersError>::into))?;\n\n surface.queue_buffer()?;\n\n surface.reset_buffers();\n\n Ok(())\n\n}\n", "file_path": "anvil/src/udev.rs", "rank": 72, "score": 133344.96664162 }, { "content": "pub fn init_shell<BackendData: Backend + 'static>(\n\n display: Rc<RefCell<Display>>,\n\n log: ::slog::Logger,\n\n) -> ShellHandles {\n\n // Create the compositor\n\n compositor_init(\n\n &mut *display.borrow_mut(),\n\n move |surface, mut ddata| {\n\n on_commit_buffer_handler(&surface);\n\n let anvil_state = ddata.get::<AnvilState<BackendData>>().unwrap();\n\n let mut popups = anvil_state.popups.borrow_mut();\n\n let space = anvil_state.space.as_ref();\n\n space.borrow_mut().commit(&surface);\n\n surface_commit(&surface, &*space, &mut *popups)\n\n },\n\n log.clone(),\n\n );\n\n\n\n let log_ref = log.clone();\n\n // init the xdg_shell\n", "file_path": "anvil/src/shell.rs", "rank": 73, "score": 131736.5735611817 }, { "content": "/// Returns true if the surface is toplevel equivalent.\n\n///\n\n/// This is method checks if the surface roles is one of `wl_shell_surface`, `xdg_toplevel`\n\n/// or `zxdg_toplevel`.\n\npub fn is_toplevel_equivalent(surface: &WlSurface) -> bool {\n\n // (z)xdg_toplevel and wl_shell_surface are toplevel like, so verify if the roles match.\n\n let role = compositor::get_role(surface);\n\n\n\n matches!(\n\n role,\n\n Some(xdg::XDG_TOPLEVEL_ROLE) | Some(xdg::ZXDG_TOPLEVEL_ROLE) | Some(legacy::WL_SHELL_SURFACE_ROLE)\n\n )\n\n}\n", "file_path": "src/wayland/shell/mod.rs", "rank": 74, "score": 129768.91092666896 }, { "content": "/// Check if this subsurface is a synchronized subsurface\n\n///\n\n/// Returns false if the surface is already dead\n\npub fn is_sync_subsurface(surface: &WlSurface) -> bool {\n\n if !surface.as_ref().is_alive() {\n\n return false;\n\n }\n\n self::handlers::is_effectively_sync(surface)\n\n}\n\n\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 75, "score": 129768.91092666896 }, { "content": "/// Access the metadata of a data source\n\npub fn with_source_metadata<T, F: FnOnce(&SourceMetadata) -> T>(\n\n source: &WlDataSource,\n\n f: F,\n\n) -> Result<T, crate::utils::UnmanagedResource> {\n\n match source.as_ref().user_data().get::<RefCell<SourceMetadata>>() {\n\n Some(data) => Ok(f(&data.borrow())),\n\n None => Err(crate::utils::UnmanagedResource),\n\n }\n\n}\n", "file_path": "src/wayland/data_device/data_source.rs", "rank": 76, "score": 129478.91366675589 }, { "content": "struct MoveSurfaceGrab {\n\n start_data: PointerGrabStartData,\n\n space: Rc<RefCell<Space>>,\n\n window: Window,\n\n initial_window_location: Point<i32, Logical>,\n\n}\n\n\n\nimpl PointerGrab for MoveSurfaceGrab {\n\n fn motion(\n\n &mut self,\n\n handle: &mut PointerInnerHandle<'_>,\n\n location: Point<f64, Logical>,\n\n _focus: Option<(wl_surface::WlSurface, Point<i32, Logical>)>,\n\n serial: Serial,\n\n time: u32,\n\n ) {\n\n // While the grab is active, no client has pointer focus\n\n handle.motion(location, None, serial, time);\n\n\n\n let delta = location - self.start_data.location;\n", "file_path": "anvil/src/shell.rs", "rank": 77, "score": 128777.53645644014 }, { "content": "#[derive(Debug)]\n\nstruct XWaylandInstance {\n\n display_lock: X11Lock,\n\n wayland_client: Option<Client>,\n\n wm_fd: Option<UnixStream>,\n\n child_stdout: Option<ChildStdout>,\n\n}\n\n\n\n// Inner implementation of the XWayland manager\n", "file_path": "src/xwayland/xserver.rs", "rank": 78, "score": 128777.53645644014 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct UdevOutputId {\n\n device_id: dev_t,\n\n crtc: crtc::Handle,\n\n}\n\n\n\npub struct UdevData {\n\n pub session: AutoSession,\n\n #[cfg(feature = \"egl\")]\n\n primary_gpu: Option<PathBuf>,\n\n backends: HashMap<dev_t, BackendData>,\n\n signaler: Signaler<SessionSignal>,\n\n pointer_image: crate::cursor::Cursor,\n\n render_timer: TimerHandle<(u64, crtc::Handle)>,\n\n}\n\n\n\nimpl Backend for UdevData {\n\n fn seat_name(&self) -> String {\n\n self.session.seat()\n\n }\n\n\n", "file_path": "anvil/src/udev.rs", "rank": 79, "score": 128675.74694503527 }, { "content": "pub fn draw_cursor<R, F, E, T>(\n\n surface: wl_surface::WlSurface,\n\n location: impl Into<Point<i32, Logical>>,\n\n log: &Logger,\n\n) -> impl RenderElement<R, F, E, T>\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll + 'static,\n\n F: Frame<Error = E, TextureId = T> + 'static,\n\n E: std::error::Error + Into<SwapBuffersError> + 'static,\n\n T: Texture + 'static,\n\n{\n\n let mut position = location.into();\n\n let ret = with_states(&surface, |states| {\n\n Some(\n\n states\n\n .data_map\n\n .get::<Mutex<CursorImageAttributes>>()\n\n .unwrap()\n\n .lock()\n\n .unwrap()\n", "file_path": "anvil/src/drawing.rs", "rank": 80, "score": 128485.5424313132 }, { "content": "pub fn draw_dnd_icon<R, F, E, T>(\n\n surface: wl_surface::WlSurface,\n\n location: impl Into<Point<i32, Logical>>,\n\n log: &Logger,\n\n) -> impl RenderElement<R, F, E, T>\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll + 'static,\n\n F: Frame<Error = E, TextureId = T> + 'static,\n\n E: std::error::Error + Into<SwapBuffersError> + 'static,\n\n T: Texture + 'static,\n\n{\n\n if get_role(&surface) != Some(\"dnd_icon\") {\n\n warn!(\n\n log,\n\n \"Trying to display as a dnd icon a surface that does not have the DndIcon role.\"\n\n );\n\n }\n\n SurfaceTree {\n\n surface,\n\n position: location.into(),\n", "file_path": "anvil/src/drawing.rs", "rank": 81, "score": 126517.87979680047 }, { "content": "#[derive(Debug, Clone)]\n\nstruct ShmGlobalData {\n\n formats: Rc<[wl_shm::Format]>,\n\n log: ::slog::Logger,\n\n}\n\n\n", "file_path": "src/wayland/shm/mod.rs", "rank": 82, "score": 126340.83495436632 }, { "content": "struct TestState {\n\n clients: HashMap<i32, Client>,\n\n}\n\n\n\nimpl Backend for TestState {\n\n fn seat_name(&self) -> String {\n\n \"anvil_wlcs\".into()\n\n }\n\n\n\n fn reset_buffers(&mut self, _output: &Output) {}\n\n}\n\n\n", "file_path": "wlcs_anvil/src/main_loop.rs", "rank": 83, "score": 126300.70953462168 }, { "content": "fn fullscreen_output_geometry(\n\n wl_surface: &wl_surface::WlSurface,\n\n wl_output: Option<&wl_output::WlOutput>,\n\n space: &mut Space,\n\n) -> Option<Rectangle<i32, Logical>> {\n\n // First test if a specific output has been requested\n\n // if the requested output is not found ignore the request\n\n wl_output\n\n .and_then(Output::from_resource)\n\n .or_else(|| {\n\n let w = space.window_for_surface(wl_surface).cloned();\n\n w.and_then(|w| space.outputs_for_window(&w).get(0).cloned())\n\n })\n\n .and_then(|o| space.output_geometry(&o))\n\n}\n\n\n\n#[derive(Default)]\n\npub struct FullscreenSurface(RefCell<Option<Window>>);\n\n\n\nimpl FullscreenSurface {\n", "file_path": "anvil/src/shell.rs", "rank": 84, "score": 126266.49136982282 }, { "content": "struct SignalInner<S> {\n\n callbacks: RefCell<Vec<WeakCallback<S>>>,\n\n pending_callbacks: RefCell<Vec<WeakCallback<S>>>,\n\n pending_events: RefCell<VecDeque<S>>,\n\n}\n\n\n\n// WeakCallback does not implement debug, so we have to impl Debug manually\n\nimpl<S: fmt::Debug> fmt::Debug for SignalInner<S> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"SignalInner\")\n\n .field(\"callbacks::len()\", &self.callbacks.borrow().len())\n\n .field(\"pending_callbacks::len()\", &self.pending_callbacks.borrow().len())\n\n .field(\"pending_events\", &self.pending_events)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl<S> SignalInner<S> {\n\n fn new() -> SignalInner<S> {\n\n SignalInner {\n", "file_path": "src/utils/signaling.rs", "rank": 85, "score": 125412.67349012388 }, { "content": "/// Draws a surface and its subsurfaces using a given [`Renderer`] and [`Frame`].\n\n///\n\n/// - `scale` needs to be equivalent to the fractional scale the rendered result should have.\n\n/// - `location` is the position the surface should be drawn at.\n\n/// - `damage` is the set of regions of the surface that should be drawn.\n\n///\n\n/// Note: This element will render nothing, if you are not using\n\n/// [`crate::backend::renderer::utils::on_commit_buffer_handler`]\n\n/// to let smithay handle buffer management.\n\npub fn draw_surface_tree<R, E, F, T>(\n\n renderer: &mut R,\n\n frame: &mut F,\n\n surface: &WlSurface,\n\n scale: f64,\n\n location: Point<i32, Logical>,\n\n damage: &[Rectangle<i32, Logical>],\n\n log: &slog::Logger,\n\n) -> Result<(), R::Error>\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll,\n\n F: Frame<Error = E, TextureId = T>,\n\n E: std::error::Error,\n\n T: Texture + 'static,\n\n{\n\n let mut result = Ok(());\n\n with_surface_tree_upward(\n\n surface,\n\n location,\n\n |_surface, states, location| {\n", "file_path": "src/backend/renderer/utils.rs", "rank": 86, "score": 124656.10165471508 }, { "content": "/// Access the data of a surface tree from bottom to top\n\n///\n\n/// You provide three closures, a \"filter\", a \"processor\" and a \"post filter\".\n\n///\n\n/// The first closure is initially called on a surface to determine if its children\n\n/// should be processed as well. It returns a `TraversalAction<T>` reflecting that.\n\n///\n\n/// The second closure is supposed to do the actual processing. The processing closure for\n\n/// a surface may be called after the processing closure of some of its children, depending\n\n/// on the stack ordering the client requested. Here the surfaces are processed in the same\n\n/// order as they are supposed to be drawn: from the farthest of the screen to the nearest.\n\n///\n\n/// The third closure is called once all the subtree of a node has been processed, and gives\n\n/// an opportunity for early-stopping. If it returns `true` the processing will continue,\n\n/// while if it returns `false` it'll stop.\n\n///\n\n/// The arguments provided to the closures are, in this order:\n\n///\n\n/// - The surface object itself\n\n/// - a mutable reference to its surface attribute data\n\n/// - a mutable reference to its role data,\n\n/// - a custom value that is passed in a fold-like manner, but only from the output of a parent\n\n/// to its children. See [`TraversalAction`] for details.\n\n///\n\n/// If the surface not managed by the `CompositorGlobal` that provided this token, this\n\n/// will panic (having more than one compositor is not supported).\n\npub fn with_surface_tree_upward<F1, F2, F3, T>(\n\n surface: &WlSurface,\n\n initial: T,\n\n filter: F1,\n\n processor: F2,\n\n post_filter: F3,\n\n) where\n\n F1: FnMut(&WlSurface, &SurfaceData, &T) -> TraversalAction<T>,\n\n F2: FnMut(&WlSurface, &SurfaceData, &T),\n\n F3: FnMut(&WlSurface, &SurfaceData, &T) -> bool,\n\n{\n\n PrivateSurfaceData::map_tree(surface, &initial, filter, processor, post_filter, false);\n\n}\n\n\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 87, "score": 124651.23091930855 }, { "content": "/// Access the data of a surface tree from top to bottom\n\n///\n\n/// Behavior is the same as [`with_surface_tree_upward`], but the processing is done in the reverse order,\n\n/// from the nearest of the screen to the deepest.\n\n///\n\n/// This would typically be used to find out which surface of a subsurface tree has been clicked for example.\n\npub fn with_surface_tree_downward<F1, F2, F3, T>(\n\n surface: &WlSurface,\n\n initial: T,\n\n filter: F1,\n\n processor: F2,\n\n post_filter: F3,\n\n) where\n\n F1: FnMut(&WlSurface, &SurfaceData, &T) -> TraversalAction<T>,\n\n F2: FnMut(&WlSurface, &SurfaceData, &T),\n\n F3: FnMut(&WlSurface, &SurfaceData, &T) -> bool,\n\n{\n\n PrivateSurfaceData::map_tree(surface, &initial, filter, processor, post_filter, true);\n\n}\n\n\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 88, "score": 124645.42127093309 }, { "content": "fn select_platform_display<N: EGLNativeDisplay + 'static>(\n\n native: &N,\n\n dp_extensions: &[String],\n\n log: &::slog::Logger,\n\n) -> Result<*const c_void, Error> {\n\n for platform in native.supported_platforms() {\n\n debug!(log, \"Trying EGL platform: {}\", platform.platform_name);\n\n\n\n let log = log.new(o!(\"platform\" => format!(\"{:?}\", platform)));\n\n\n\n let missing_extensions = platform\n\n .required_extensions\n\n .iter()\n\n .filter(|ext| !dp_extensions.iter().any(|x| x == *ext))\n\n .collect::<Vec<_>>();\n\n\n\n if !missing_extensions.is_empty() {\n\n info!(\n\n log,\n\n \"Skipping EGL platform because one or more required extensions are not supported. Missing extensions: {:?}\", missing_extensions\n", "file_path": "src/backend/egl/display.rs", "rank": 89, "score": 124373.13304996786 }, { "content": "/// Trait for generic functions every input device does provide\n\npub trait Device: PartialEq + Eq + std::hash::Hash {\n\n /// Unique id of a single device at a point in time.\n\n ///\n\n /// Note: This means ids may be re-used by the backend for later devices.\n\n fn id(&self) -> String;\n\n /// Human-readable name of the device\n\n fn name(&self) -> String;\n\n /// Test if this device has a specific capability\n\n fn has_capability(&self, capability: DeviceCapability) -> bool;\n\n\n\n /// Returns device USB (product,vendor) id\n\n fn usb_id(&self) -> Option<(u32, u32)>;\n\n\n\n /// Returns the syspath of the device.\n\n ///\n\n /// The path is an absolute path and includes the sys mount point.\n\n fn syspath(&self) -> Option<PathBuf>;\n\n}\n\n\n\n/// Set of input types a device may provide\n", "file_path": "src/backend/input/mod.rs", "rank": 90, "score": 124098.49831984723 }, { "content": "#[derive(Debug)]\n\nstruct LibSeatSessionImpl {\n\n seat: RefCell<Seat>,\n\n active: Arc<AtomicBool>,\n\n devices: RefCell<HashMap<RawFd, i32>>,\n\n logger: ::slog::Logger,\n\n}\n\n\n\nimpl Drop for LibSeatSessionImpl {\n\n fn drop(&mut self) {\n\n debug!(self.logger, \"Closing seat\")\n\n }\n\n}\n\n\n\n/// [`Session`] via the libseat\n\n#[derive(Debug, Clone)]\n\npub struct LibSeatSession {\n\n internal: Weak<LibSeatSessionImpl>,\n\n seat_name: String,\n\n}\n\n\n", "file_path": "src/backend/session/libseat.rs", "rank": 91, "score": 124025.01459146998 }, { "content": "struct LogindSessionImpl {\n\n session_id: String,\n\n conn: RefCell<DBusConnection>,\n\n session_path: DbusPath<'static>,\n\n active: AtomicBool,\n\n signaler: Signaler<SessionSignal>,\n\n seat: String,\n\n logger: ::slog::Logger,\n\n}\n\n\n\n// DBusConnection does not implement debug, so we have to impl Debug manually\n\nimpl fmt::Debug for LogindSessionImpl {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"LogindSessionImpl\")\n\n .field(\"session_id\", &self.session_id)\n\n .field(\"conn\", &\"...\")\n\n .field(\"session_path\", &self.session_path)\n\n .field(\"active\", &self.active)\n\n .field(\"signaler\", &self.signaler)\n\n .field(\"seat\", &self.seat)\n", "file_path": "src/backend/session/dbus/logind.rs", "rank": 92, "score": 124025.01459146998 }, { "content": "struct DisplayServerHandle {\n\n wlcs_display_server: WlcsDisplayServer,\n\n server: Option<(Sender<WlcsEvent>, JoinHandle<()>)>,\n\n next_device_id: u32,\n\n}\n\n\n\nimpl DisplayServerHandle {\n\n fn new() -> DisplayServerHandle {\n\n DisplayServerHandle {\n\n wlcs_display_server: WlcsDisplayServer {\n\n version: 3,\n\n start: Self::start,\n\n stop: Self::stop,\n\n create_client_socket: Self::create_client_socket,\n\n position_window_absolute: Self::position_window_absolute,\n\n create_pointer: Self::create_pointer,\n\n create_touch: Self::create_touch,\n\n get_descriptor: Self::get_descriptor,\n\n start_on_this_thread: None,\n\n },\n", "file_path": "wlcs_anvil/src/ffi_wrappers.rs", "rank": 93, "score": 123928.57296356378 }, { "content": "/// Renders a given [`Window`] using a provided renderer and frame.\n\n///\n\n/// - `scale` needs to be equivalent to the fractional scale the rendered result should have.\n\n/// - `location` is the position the window should be drawn at.\n\n/// - `damage` is the set of regions of the window that should be drawn.\n\n///\n\n/// Note: This function will render nothing, if you are not using\n\n/// [`crate::backend::renderer::utils::on_commit_buffer_handler`]\n\n/// to let smithay handle buffer management.\n\npub fn draw_window<R, E, F, T, P>(\n\n renderer: &mut R,\n\n frame: &mut F,\n\n window: &Window,\n\n scale: f64,\n\n location: P,\n\n damage: &[Rectangle<i32, Logical>],\n\n log: &slog::Logger,\n\n) -> Result<(), R::Error>\n\nwhere\n\n R: Renderer<Error = E, TextureId = T, Frame = F> + ImportAll,\n\n F: Frame<Error = E, TextureId = T>,\n\n E: std::error::Error,\n\n T: Texture + 'static,\n\n P: Into<Point<i32, Logical>>,\n\n{\n\n let location = location.into();\n\n if let Some(surface) = window.toplevel().get_surface() {\n\n draw_surface_tree(renderer, frame, surface, scale, location, damage, log)?;\n\n for (popup, p_location) in PopupManager::popups_for_surface(surface)\n", "file_path": "src/desktop/window.rs", "rank": 94, "score": 123693.78424261426 }, { "content": "/// Retrieve the parent of this surface\n\n///\n\n/// Returns `None` is this surface is a root surface\n\npub fn get_parent(surface: &WlSurface) -> Option<WlSurface> {\n\n if !surface.as_ref().is_alive() {\n\n return None;\n\n }\n\n PrivateSurfaceData::get_parent(surface)\n\n}\n\n\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 95, "score": 122847.87053306581 }, { "content": "/// Check if a (sub)surface is effectively sync\n\npub fn is_effectively_sync(surface: &wl_surface::WlSurface) -> bool {\n\n let is_direct_sync = PrivateSurfaceData::with_states(surface, |state| {\n\n state\n\n .data_map\n\n .get::<SubsurfaceState>()\n\n .map(|s| s.sync.load(Ordering::Acquire))\n\n .unwrap_or(false)\n\n });\n\n if is_direct_sync {\n\n return true;\n\n }\n\n if let Some(parent) = PrivateSurfaceData::get_parent(surface) {\n\n is_effectively_sync(&parent)\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/wayland/compositor/handlers.rs", "rank": 96, "score": 122847.87053306581 }, { "content": "/// Retrieve the children of this surface\n\npub fn get_children(surface: &WlSurface) -> Vec<WlSurface> {\n\n if !surface.as_ref().is_alive() {\n\n return Vec::new();\n\n }\n\n PrivateSurfaceData::get_children(surface)\n\n}\n\n\n", "file_path": "src/wayland/compositor/mod.rs", "rank": 97, "score": 122847.87053306581 }, { "content": "fn egl_init(_: &X11Inner) -> Result<DrmNode, EGLInitError> {\n\n let display = EGLDisplay::new(&X11DefaultDisplay, None)?;\n\n let device = EGLDevice::device_for_display(&display)?;\n\n let path = path_to_type(device.drm_device_path()?, NodeType::Render)?;\n\n fcntl::open(&path, OFlag::O_RDWR | OFlag::O_CLOEXEC, Mode::empty())\n\n .map_err(Into::<io::Error>::into)\n\n .and_then(|fd| {\n\n DrmNode::from_fd(fd).map_err(|err| match err {\n\n CreateDrmNodeError::Io(err) => err,\n\n _ => unreachable!(),\n\n })\n\n })\n\n .map_err(EGLInitError::IO)\n\n}\n\n\n", "file_path": "src/backend/x11/mod.rs", "rank": 98, "score": 122845.61338060393 } ]
Rust
src/list.rs
mantono/giss
867d2143196e631a875207684c5be45abb3feae7
use crate::search::{GraphQLQuery, SearchIssues, SearchQuery, Type}; use crate::{ api::ApiError, cfg::Config, issue::{Issue, Root}, project::Project, sort::Sorting, AppErr, }; use crate::{user::Username, Target}; use core::fmt; use std::{ sync::mpsc::{SendError, SyncSender}, time::Instant, }; #[derive(Debug)] pub struct FilterConfig { assigned_only: bool, pull_requests: bool, review_requests: bool, issues: bool, labels: Vec<String>, project: Option<Project>, sorting: Sorting, search: Option<String>, state: StateFilter, limit: u32, } impl FilterConfig { pub fn types(&self) -> Vec<Type> { let mut types: Vec<Type> = Vec::with_capacity(3); if self.issues { types.push(Type::Issue) } if self.pull_requests { types.push(Type::PullRequest) } if self.review_requests { types.push(Type::ReviewRequest) } types } } impl From<&Config> for FilterConfig { fn from(cfg: &Config) -> Self { FilterConfig { assigned_only: cfg.assigned_only(), pull_requests: cfg.pulls(), review_requests: cfg.reviews(), labels: cfg.label(), project: cfg.project(), sorting: cfg.sorting(), search: cfg.search(), issues: cfg.issues(), state: cfg.state(), limit: cfg.limit(), } } } #[derive(Eq, PartialEq, Debug, Copy, Clone)] pub enum StateFilter { Open, Closed, All, } impl std::fmt::Display for StateFilter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let output = match self { StateFilter::Open => "open", StateFilter::Closed => "closed", StateFilter::All => "all", }; write!(f, "{}", output) } } pub async fn list_issues( channel: SyncSender<Issue>, user: &Option<Username>, targets: &[Target], token: &str, config: &FilterConfig, ) -> Result<(), AppErr> { let user: Option<String> = user.clone().map(|u| u.0); log::debug!("Filter config: {:?}", config); let start = Instant::now(); let issues = async { if config.issues { req_and_send(Type::Issue, &channel, &user, targets, token, config).await?; } Ok::<(), AppErr>(()) }; let pulls = async { if config.pull_requests { req_and_send(Type::PullRequest, &channel, &user, targets, token, config).await?; } Ok::<(), AppErr>(()) }; let reviews = async { if config.review_requests { req_and_send(Type::ReviewRequest, &channel, &user, targets, token, config).await?; } Ok::<(), AppErr>(()) }; futures::try_join!(issues, pulls, reviews)?; let end = Instant::now(); let elapsed = end.duration_since(start); log::debug!("API execution took {:?}", elapsed); Ok(()) } async fn req_and_send( kind: Type, channel: &SyncSender<Issue>, user: &Option<String>, targets: &[Target], token: &str, config: &FilterConfig, ) -> Result<(), AppErr> { let query: SearchIssues = create_query(kind, &user, targets, config); let issues: Vec<Issue> = api_request(query, token).await?; for issue in issues { channel.send(issue)?; } Ok(()) } fn create_query(kind: Type, user: &Option<String>, targets: &[Target], config: &FilterConfig) -> SearchIssues { let assignee: Option<String> = match config.assigned_only { false => None, true => match kind { Type::Issue | Type::PullRequest => user.clone(), Type::ReviewRequest => None, }, }; let review_requested: Option<String> = match config.review_requests { false => None, true => match kind { Type::ReviewRequest => user.clone(), Type::Issue | Type::PullRequest => None, }, }; SearchIssues { archived: false, assignee, resource_type: Some(kind), review_requested, sort: config.sorting, state: config.state, labels: config.labels.clone(), project: config.project.clone(), targets: targets.to_vec(), search: config.search.clone(), limit: config.limit, } } impl From<SendError<Issue>> for AppErr { fn from(_: SendError<Issue>) -> Self { AppErr::ChannelError } } impl From<ApiError> for AppErr { fn from(err: ApiError) -> Self { log::error!("{:?}", err); match err { ApiError::NoResponse(_) => AppErr::ApiError, ApiError::Response(code) => match code { 429 => AppErr::RateLimited, _ => AppErr::ApiError, }, } } } async fn api_request(search: SearchIssues, token: &str) -> Result<Vec<Issue>, ApiError> { let query: GraphQLQuery = search.build(); let issues: Root = crate::api::v4::request(token, query).await?; let issues: Vec<Issue> = issues.data.search.edges.into_iter().map(|n| n.node).collect(); Ok(issues) }
use crate::search::{GraphQLQuery, SearchIssues, SearchQuery, Type}; use crate::{ api::ApiError, cfg::Config, issue::{Issue, Root}, project::Project, sort::Sorting, AppErr, }; use crate::{user::Username, Target}; use core::fmt; use std::{ sync::mpsc::{SendError, SyncSender}, time::Instant, }; #[derive(Debug)] pub struct FilterConfig { assigned_only: bool, pull_requests: bool, review_requests: bool, issues: bool, labels: Vec<String>, project: Option<Project>, sorting: Sorting, search: Option<String>, state: StateFilter, limit: u32, } impl FilterConfig { pub fn types(&self) -> Vec<Type> { let mut types: Vec<Type> = Vec::with_capacity(3); if self.issues { types.push(Type::Issue) } if self.pull_requests { types.push(Type::PullRequest) } if self.review_requests { types.push(Type::ReviewRequest) } types } } impl From<&Config> for FilterConfig { fn from(cfg: &Config) -> Self { FilterConfig { assigned_only: cfg.assigned_only(), pull_requests: cfg.pulls(), review_requests: cfg.reviews(), labels: cfg.label(), project: cfg.project(), sorting: cfg.sorting(), search: cfg.search(), issues: cfg.issues(), state: cfg.state(), limit: cfg.limit(), } } } #[derive(Eq, PartialEq, Debug, Copy, Clone)] pub enum StateFilter { Open, Closed, All, } impl std::fmt::Display for StateFilter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let output = match self { StateFilter::Open => "open", StateFilter::Closed => "closed", StateFilter::All => "all", }; write!(f, "{}", output) } } pub async fn list_issues( channel: SyncSender<Issue>, user: &Option<Username>, targets: &[Target], token: &str, config: &FilterConfig, ) -> Result<(), AppErr> { let user: Option<String> = user.clone().map(|u| u.0); log::debug!("Filter config: {:?}", config); let start = Instant::now(); let issues = async { if config.issues { req_and_send(Type::Issue, &channel, &user, targets, token, config).await?; } Ok::<(), AppErr>(()) }; let pulls = async { if config.pull_requests { req_and_send(Type::PullRequest, &channel, &user, targets, token, config).await?; } Ok::<(), AppErr>(()) }; let reviews = async { if config.review_requests { req_and_send(Type::ReviewRequest, &channel, &user, targets, token, config).await?; } Ok::<(), AppErr>(()) }; futures::try_join!(issues, pulls, reviews)?; let end = Instant::now(); let elapsed = end.duration_since(start); log::debug!("API execution took {:?}", elapsed); Ok(()) } async fn req_and_send( kind: Type, channel: &SyncSender<Issue>, user: &Option<String>, targets: &[Target], token: &str, config: &FilterConfig, ) -> Result<(), AppErr> { let query: SearchIssues = create_query(kind, &user, targets, config); let issues: Vec<Issue> = api_request(query, token).await?; for issue in issues { channel.send(issue)?; } Ok(()) }
impl From<SendError<Issue>> for AppErr { fn from(_: SendError<Issue>) -> Self { AppErr::ChannelError } } impl From<ApiError> for AppErr { fn from(err: ApiError) -> Self { log::error!("{:?}", err); match err { ApiError::NoResponse(_) => AppErr::ApiError, ApiError::Response(code) => match code { 429 => AppErr::RateLimited, _ => AppErr::ApiError, }, } } } async fn api_request(search: SearchIssues, token: &str) -> Result<Vec<Issue>, ApiError> { let query: GraphQLQuery = search.build(); let issues: Root = crate::api::v4::request(token, query).await?; let issues: Vec<Issue> = issues.data.search.edges.into_iter().map(|n| n.node).collect(); Ok(issues) }
fn create_query(kind: Type, user: &Option<String>, targets: &[Target], config: &FilterConfig) -> SearchIssues { let assignee: Option<String> = match config.assigned_only { false => None, true => match kind { Type::Issue | Type::PullRequest => user.clone(), Type::ReviewRequest => None, }, }; let review_requested: Option<String> = match config.review_requests { false => None, true => match kind { Type::ReviewRequest => user.clone(), Type::Issue | Type::PullRequest => None, }, }; SearchIssues { archived: false, assignee, resource_type: Some(kind), review_requested, sort: config.sorting, state: config.state, labels: config.labels.clone(), project: config.project.clone(), targets: targets.to_vec(), search: config.search.clone(), limit: config.limit, } }
function_block-full_function
[ { "content": "fn save_username(token: &str, username: &str) -> Result<(), std::io::Error> {\n\n let token_hash: String = hash_token(token);\n\n let mut path: PathBuf = get_users_dir();\n\n std::fs::create_dir_all(&path)?;\n\n path.push(token_hash);\n\n let mut file: File = File::create(&path)?;\n\n file.write_all(username.as_bytes())\n\n}\n\n\n", "file_path": "src/user.rs", "rank": 1, "score": 157065.61098110044 }, { "content": "pub fn display(channel: Receiver<Issue>, cfg: DisplayConfig) -> Result<(), AppErr> {\n\n let mut limit: u32 = cfg.limit * 3;\n\n let mut queue: Vec<Issue> = Vec::with_capacity(limit as usize);\n\n while limit > 0 {\n\n match channel.recv_timeout(Duration::from_secs(20)) {\n\n Ok(issue) => {\n\n queue.push(issue);\n\n limit -= 1;\n\n }\n\n Err(e) => match e {\n\n RecvTimeoutError::Timeout => return Err(AppErr::Timeout),\n\n RecvTimeoutError::Disconnected => limit = 0,\n\n },\n\n };\n\n }\n\n queue.sort_unstable_by(|i0, i1| cfg.sorting.sort(i0, i1));\n\n queue\n\n .into_iter()\n\n .unique_by(|i| i.id)\n\n .take(cfg.limit as usize)\n\n .for_each(|i| print_issue(i, true, &cfg));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 2, "score": 140182.60776143012 }, { "content": "fn hash_token(token: &str) -> String {\n\n let mut hasher = Sha256::new();\n\n hasher.input(token);\n\n format!(\"{:02x}\", hasher.result())\n\n}\n\n\n", "file_path": "src/user.rs", "rank": 3, "score": 124754.69995521293 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Root {\n\n pub data: Data,\n\n}\n\n\n", "file_path": "src/user.rs", "rank": 4, "score": 112218.36569377835 }, { "content": "fn get_saved_username(token: &str) -> Option<String> {\n\n let token_hash: String = hash_token(token);\n\n let mut path: PathBuf = get_users_dir();\n\n path.push(token_hash);\n\n if path.exists() {\n\n let content = std::fs::read_to_string(path).expect(\"Unable to read content of file\");\n\n Some(content)\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/user.rs", "rank": 5, "score": 107123.61767947319 }, { "content": "fn print_type(stream: &mut StandardStream, issue: &Issue, cfg: &DisplayConfig) {\n\n let kind: Type = match issue.kind {\n\n Type::Issue => Type::Issue,\n\n _ => match &cfg.user {\n\n Some(user) => match issue.has_review_request(&user.0) {\n\n true => Type::ReviewRequest,\n\n false => Type::PullRequest,\n\n },\n\n None => Type::PullRequest,\n\n },\n\n };\n\n\n\n match kind {\n\n crate::search::Type::Issue => write(stream, \"I \", Some(Color::Blue)),\n\n crate::search::Type::PullRequest => write(stream, \"P \", Some(Color::Magenta)),\n\n crate::search::Type::ReviewRequest => {\n\n write(stream, \"P\", Some(Color::Magenta));\n\n write(stream, \"R\", Some(Color::Yellow));\n\n }\n\n };\n\n write(stream, \"| \", Some(Color::Green));\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 6, "score": 99722.28050623492 }, { "content": "fn print_issue(issue: Issue, print_repo: bool, cfg: &DisplayConfig) {\n\n let use_colors: ColorChoice = cfg.colors;\n\n let title: String = truncate(issue.title.clone(), 50);\n\n let assignees: String = issue\n\n .assignees\n\n .nodes\n\n .iter()\n\n .map(|a: &UserFields| &a.login)\n\n .map(|s: &String| format!(\"{}{}\", \"@\", s))\n\n .collect::<Vec<String>>()\n\n .join(\", \");\n\n\n\n let repo: String = if print_repo {\n\n issue.repository.name_with_owner.clone()\n\n } else {\n\n String::from(\"\")\n\n };\n\n\n\n let labels: String = issue\n\n .labels\n", "file_path": "src/ui.rs", "rank": 7, "score": 93727.9287398302 }, { "content": "pub trait SearchQuery {\n\n fn search_type(&self) -> Option<String>;\n\n fn build(&self) -> GraphQLQuery;\n\n}\n\n\n\n#[derive(Debug, Deserialize, Copy, Clone)]\n\npub enum Type {\n\n Issue,\n\n PullRequest,\n\n ReviewRequest,\n\n}\n\n\n\nimpl Display for Type {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let tp: &str = match self {\n\n Type::Issue => \"I\",\n\n Type::PullRequest => \"P\",\n\n Type::ReviewRequest => \"R\",\n\n };\n\n write!(f, \"{}\", tp)\n", "file_path": "src/search.rs", "rank": 8, "score": 88282.82167919185 }, { "content": "fn formatter(buf: &mut Formatter, record: &Record) -> io::Result<()> {\n\n match record.level() {\n\n Level::Info => writeln!(buf, \"{}\", record.args()),\n\n Level::Warn => {\n\n let mut style = buf.style();\n\n style.set_color(Color::Yellow);\n\n writeln!(buf, \"{}: {}\", style.value(record.level()), record.args())\n\n }\n\n Level::Error => {\n\n let mut style = buf.style();\n\n style.set_color(Color::Red);\n\n writeln!(buf, \"{}: {}\", style.value(record.level()), record.args())\n\n }\n\n _ => writeln!(buf, \"{}: {}\", record.level(), record.args()),\n\n }\n\n}\n", "file_path": "src/logger.rs", "rank": 9, "score": 81205.27830643996 }, { "content": "fn write(stream: &mut StandardStream, content: &str, color: Option<Color>) {\n\n stream.set_color(ColorSpec::new().set_fg(color)).unwrap();\n\n write!(stream, \"{}\", content).unwrap();\n\n}\n", "file_path": "src/ui.rs", "rank": 10, "score": 75317.18605178379 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Viewer {\n\n login: String,\n\n id: String,\n\n}\n\n\n\nasync fn api_lookup_username(token: &str) -> Result<User, AppErr> {\n\n let query = GraphQLQuery {\n\n variables: serde_json::Value::Null,\n\n query: String::from(include_str!(\"../data/graphql/queries/get_user.graphql\")),\n\n operation_name: String::from(\"GetUser\"),\n\n };\n\n\n\n let root: Root = crate::api::v4::request(token, query).await?;\n\n let user = User {\n\n login: root.data.viewer.login,\n\n id: root.data.viewer.id,\n\n };\n\n Ok(user)\n\n}\n\n\n", "file_path": "src/user.rs", "rank": 11, "score": 67539.41088700936 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Data {\n\n pub viewer: Viewer,\n\n}\n\n\n", "file_path": "src/user.rs", "rank": 12, "score": 67539.41088700936 }, { "content": "pub fn dbg_info() -> String {\n\n format!(\n\n \"Crate version {}.\\nBuilt from commit {} by {} for target {} with profile '{}' and features = {:?}.\",\n\n crate_version!(),\n\n built_info::GIT_VERSION.unwrap(),\n\n built_info::RUSTC_VERSION,\n\n built_info::TARGET,\n\n built_info::PROFILE,\n\n built_info::FEATURES\n\n )\n\n}\n\n\n\n#[allow(dead_code)]\n\nmod built_info {\n\n include!(concat!(env!(\"OUT_DIR\"), \"/built.rs\"));\n\n}\n", "file_path": "src/dbg.rs", "rank": 13, "score": 66697.96164654478 }, { "content": "fn delimiter(stream: &mut StandardStream) {\n\n write(stream, \" | \", Some(Color::Green));\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 14, "score": 63466.203232665015 }, { "content": "pub fn setup_logging(verb: &Verbosity) {\n\n match std::env::var(\"RUST_LOG\") {\n\n Ok(_) => log_by_env_var(),\n\n Err(_) => log_by_cmd_arg(verb),\n\n }\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 15, "score": 62676.17569235645 }, { "content": "pub fn read_repo_from_file() -> Option<String> {\n\n let current_path: &Path = Path::new(\".\");\n\n let repo_root: PathBuf = match traverse(&current_path) {\n\n Some(root) => root,\n\n None => return None,\n\n };\n\n let config_file: PathBuf = repo_root.join(\".git\").join(\"config\");\n\n log::debug!(\"Using Git config file: '{:?}'\", config_file);\n\n let file_content: String = fs::read_to_string(config_file).expect(\"Could not find a git config\");\n\n\n\n let lines: Vec<&str> = file_content.lines().filter(|f| f.contains(\"github.com\")).collect();\n\n\n\n let repo: &str = lines\n\n .first()\n\n .expect(\"No Github repository found\")\n\n .split_terminator(':')\n\n .last()\n\n .expect(\"No match\");\n\n\n\n Some(repo.trim_end_matches(\".git\").to_string())\n\n}\n\n\n", "file_path": "src/args.rs", "rank": 16, "score": 60879.14007121088 }, { "content": "fn get_users_dir() -> PathBuf {\n\n let mut path: PathBuf = dirs_next::home_dir().expect(\"Cannot find home dir\");\n\n path.push([\".config\", \"giss\", \"usernames\"].join(\"/\"));\n\n path\n\n}\n\n\n", "file_path": "src/user.rs", "rank": 17, "score": 57259.23870240766 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nenum Flag {\n\n True,\n\n False,\n\n Auto,\n\n}\n\n\n\nimpl FromStr for Flag {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s.to_lowercase().as_str() {\n\n \"true\" | \"on\" => Ok(Flag::True),\n\n \"false\" | \"off\" => Ok(Flag::False),\n\n \"auto\" => Ok(Flag::Auto),\n\n _ => Err(format!(\"Unrecognized option {}\", s)),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/cfg.rs", "rank": 18, "score": 44687.48677511643 }, { "content": " pub trait Closeable {\n\n fn is_open(&self) -> bool;\n\n }\n\n\n", "file_path": "src/github_resources.rs", "rank": 19, "score": 38385.07138523609 }, { "content": "fn main() {\n\n let mut default: Options = Options::default();\n\n let options = default\n\n .set_compiler(true)\n\n .set_cfg(true)\n\n .set_ci(false)\n\n .set_dependencies(false)\n\n .set_git(true)\n\n .set_env(true)\n\n .set_features(true);\n\n\n\n let src: PathBuf = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap().into();\n\n let dst: PathBuf = Path::new(&std::env::var(\"OUT_DIR\").unwrap()).join(\"built.rs\");\n\n\n\n built::write_built_file_with_opts(&options, &src, &dst).expect(\"Failed to acquire build-time information\");\n\n}\n", "file_path": "build.rs", "rank": 20, "score": 38258.21592191409 }, { "content": " pub trait ModDate: Ord {\n\n fn mod_time(&self) -> u64;\n\n }\n\n}\n", "file_path": "src/github_resources.rs", "rank": 21, "score": 34865.080134137745 }, { "content": "fn log_by_env_var() {\n\n env_logger::Builder::from_default_env().format(formatter).init()\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 22, "score": 34103.21838571075 }, { "content": "fn log_by_cmd_arg(verb: &Verbosity) {\n\n let filter: LevelFilter = match verb.level() {\n\n 0 => LevelFilter::Off,\n\n 1 => LevelFilter::Error,\n\n 2 => LevelFilter::Warn,\n\n 3 => LevelFilter::Info,\n\n 4 => LevelFilter::Debug,\n\n 5 => LevelFilter::Trace,\n\n _ => panic!(\"Invalid verbosity level: {}\", verb.level()),\n\n };\n\n\n\n env_logger::builder().format(formatter).filter_level(filter).init()\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 23, "score": 29843.959036800145 }, { "content": "fn traverse(path: &Path) -> Option<PathBuf> {\n\n let path_full: PathBuf = path\n\n .to_path_buf()\n\n .canonicalize()\n\n .expect(\"Could not create the canonical path\");\n\n\n\n let git_config: PathBuf = path_full.join(\".git\").join(\"config\");\n\n if git_config.exists() {\n\n Some(path_full)\n\n } else {\n\n match path_full.parent() {\n\n Some(parent) => traverse(parent),\n\n None => None,\n\n }\n\n }\n\n}\n", "file_path": "src/args.rs", "rank": 24, "score": 27372.573084289885 }, { "content": "fn truncate(string: String, max_length: usize) -> String {\n\n let new_length: usize = std::cmp::min(string.len(), max_length);\n\n if new_length < string.len() {\n\n string[..new_length].to_string()\n\n } else {\n\n string\n\n }\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 25, "score": 25929.6093662218 }, { "content": "use std::{fmt::Display, str::FromStr};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Project {\n\n owner: String,\n\n repo: Option<String>,\n\n id: u32,\n\n}\n\n\n\nimpl FromStr for Project {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let parts: Vec<&str> = s.split('/').collect();\n\n match parts.len() {\n\n 2 => {\n\n let owner: &str = parts.first().expect(\"Exactly two should be present\");\n\n let id: u32 = parts\n\n .last()\n\n .expect(\"Exactly two should be present\")\n", "file_path": "src/project.rs", "rank": 26, "score": 25604.88062433183 }, { "content": "use lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse std::{fmt, str::FromStr};\n\n\n\n#[derive(Debug)]\n\npub enum Target {\n\n Organization(String),\n\n Repository(String, String),\n\n}\n\n\n\nlazy_static! {\n\n static ref TARGET: Regex = Regex::new(r\"[\\w\\-\\.]+\").unwrap();\n\n}\n\n\n\nimpl FromStr for Target {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let parts: Vec<String> = TARGET\n\n .find_iter(s)\n", "file_path": "src/target.rs", "rank": 27, "score": 25602.065267969752 }, { "content": " .into_iter()\n\n .map(|x| x.as_str().to_string())\n\n .collect();\n\n match parts.len() {\n\n 1 => Ok(Target::Organization(parts[0].clone())),\n\n 2 => Ok(Target::Repository(parts[0].clone(), parts[1].clone())),\n\n _ => Err(format!(\"Could not resolve a valid target from '{}'\", s)),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Target {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Target::Organization(name) => write!(f, \"org:{}\", name),\n\n Target::Repository(owner, name) => write!(f, \"repo:{}/{}\", owner, name),\n\n }\n\n }\n\n}\n\n\n\nimpl Clone for Target {\n\n fn clone(&self) -> Target {\n\n match self {\n\n Target::Organization(name) => Target::Organization(name.clone()),\n\n Target::Repository(owner, name) => Target::Repository(owner.clone(), name.clone()),\n\n }\n\n }\n\n}\n", "file_path": "src/target.rs", "rank": 28, "score": 25600.05264457683 }, { "content": " owner: owner.to_string(),\n\n repo: Some(repo.to_string()),\n\n id,\n\n };\n\n Ok(project)\n\n }\n\n _ => Err(format!(\n\n \"Invalid argument for project '{}', must have format org/repo/number or org/number\",\n\n s\n\n )),\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Project {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self.repo.clone() {\n\n Some(repo) => write!(f, \"{}/{}/{}\", self.owner, repo, self.id)?,\n\n None => write!(f, \"{}/{}\", self.owner, self.id)?,\n\n };\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/project.rs", "rank": 29, "score": 25599.120080223744 }, { "content": " .parse::<u32>()\n\n .map_err(|_| String::from(\"id must be a number\"))?;\n\n\n\n let project = Project {\n\n owner: owner.to_string(),\n\n repo: None,\n\n id,\n\n };\n\n Ok(project)\n\n }\n\n 3 => {\n\n let owner: &str = parts.first().expect(\"Exactly three should be present\");\n\n let repo: &str = parts.get(1).expect(\"Exactly three should be present\");\n\n let id: u32 = parts\n\n .last()\n\n .expect(\"Exactly three should be present\")\n\n .parse::<u32>()\n\n .map_err(|_| String::from(\"id must be a number\"))?;\n\n\n\n let project = Project {\n", "file_path": "src/project.rs", "rank": 30, "score": 25593.480330580725 }, { "content": "use std::{cmp::Ordering, fmt, str::FromStr};\n\n\n\nuse fmt::Display;\n\n\n\nuse crate::issue::Issue;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Sorting(pub Property, pub Order);\n\n\n\nimpl Display for Sorting {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}-{}\", self.0, self.1)\n\n }\n\n}\n\n\n\nimpl Sorting {\n\n pub fn sort(&self, i0: &Issue, i1: &Issue) -> Ordering {\n\n let Sorting(prop, order) = self;\n\n order.order(prop.sort(i0, i1))\n\n }\n", "file_path": "src/sort.rs", "rank": 31, "score": 25428.292971056962 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Order {\n\n Descending,\n\n Ascending,\n\n}\n\n\n\nimpl Default for Order {\n\n fn default() -> Self {\n\n Order::Descending\n\n }\n\n}\n\n\n\nimpl FromStr for Order {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s.to_lowercase().as_str() {\n\n \"asc\" | \"ascending\" => Ok(Order::Ascending),\n", "file_path": "src/sort.rs", "rank": 32, "score": 25423.0590109574 }, { "content": " \"desc\" | \"descending\" => Ok(Order::Descending),\n\n _ => Err(\"Invalid sort order\"),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Order {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let output: &str = match self {\n\n Order::Ascending => \"asc\",\n\n Order::Descending => \"desc\",\n\n };\n\n write!(f, \"{}\", output)\n\n }\n\n}\n", "file_path": "src/sort.rs", "rank": 33, "score": 25418.30600970682 }, { "content": " match s.to_lowercase().as_str() {\n\n \"created\" => Ok(Property::Created),\n\n \"updated\" => Ok(Property::Updated),\n\n \"comments\" => Ok(Property::Comments),\n\n \"reactions\" => Ok(Property::Reactions),\n\n _ => Err(\"Invalid property\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Property {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let s: &str = match self {\n\n Property::Created => \"created\",\n\n Property::Updated => \"updated\",\n\n Property::Comments => \"comments;\",\n\n Property::Reactions => \"reactions\",\n\n };\n\n write!(f, \"{}\", s)\n\n }\n", "file_path": "src/sort.rs", "rank": 34, "score": 25416.528882674556 }, { "content": "\n\nimpl Order {\n\n pub fn order(&self, order: Ordering) -> Ordering {\n\n match self {\n\n Order::Ascending => order,\n\n Order::Descending => order.reverse(),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Property {\n\n fn default() -> Self {\n\n Property::Updated\n\n }\n\n}\n\n\n\nimpl FromStr for Property {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n", "file_path": "src/sort.rs", "rank": 35, "score": 25416.520035738515 }, { "content": "}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Property {\n\n Created,\n\n Updated,\n\n Comments,\n\n Reactions,\n\n}\n\n\n\nimpl Property {\n\n pub fn sort(&self, i0: &Issue, i1: &Issue) -> Ordering {\n\n match self {\n\n Property::Created => i0.created_at.cmp(&i1.created_at),\n\n Property::Updated => i0.updated_at.cmp(&i1.updated_at),\n\n Property::Comments => i0.comments.total_count.cmp(&i1.comments.total_count),\n\n Property::Reactions => i0.reactions.total_count.cmp(&i1.reactions.total_count),\n\n }\n\n }\n\n}\n", "file_path": "src/sort.rs", "rank": 36, "score": 25414.630064006644 }, { "content": " }\n\n}\n\n\n\npub struct SearchIssues {\n\n pub state: StateFilter,\n\n pub assignee: Option<String>,\n\n pub review_requested: Option<String>,\n\n pub archived: bool,\n\n pub labels: Vec<String>,\n\n pub project: Option<Project>,\n\n pub resource_type: Option<Type>,\n\n pub targets: Vec<Target>,\n\n pub sort: Sorting,\n\n pub search: Option<String>,\n\n pub limit: u32,\n\n}\n\n\n\nimpl SearchQuery for SearchIssues {\n\n fn search_type(&self) -> Option<String> {\n\n match self.resource_type {\n", "file_path": "src/search.rs", "rank": 37, "score": 25247.178530706307 }, { "content": "use crate::{list::StateFilter, project::Project};\n\nuse crate::{sort::Sorting, Target};\n\nuse itertools::Itertools;\n\nuse serde::Deserialize;\n\nuse serde::Serialize;\n\nuse serde_json::json;\n\nuse std::fmt::Display;\n\n\n\n#[derive(Serialize, Debug)]\n\npub struct GraphQLQuery {\n\n pub query: String,\n\n pub variables: serde_json::Value,\n\n pub operation_name: String,\n\n}\n\n\n", "file_path": "src/search.rs", "rank": 38, "score": 25238.16974679323 }, { "content": " Some(Type::Issue) => Some(String::from(\"type:issue\")),\n\n Some(Type::PullRequest) => Some(String::from(\"type:pr\")),\n\n Some(Type::ReviewRequest) => {\n\n let reviewer = self.review_requested.as_ref().expect(\"Reviewer was not sent\");\n\n let query: String = format!(\"type:pr review-requested:{}\", reviewer);\n\n Some(query)\n\n }\n\n None => None,\n\n }\n\n }\n\n\n\n fn build(&self) -> GraphQLQuery {\n\n let parts: Vec<String> = [\n\n self.search_type(),\n\n self.state(),\n\n self.assignee(),\n\n Some(self.archived()),\n\n self.users(),\n\n self.labels(),\n\n self.project(),\n", "file_path": "src/search.rs", "rank": 39, "score": 25237.987507704656 }, { "content": " Some(self.sort()),\n\n self.search(),\n\n ]\n\n .iter()\n\n .filter_map(|v| v.clone())\n\n .collect();\n\n\n\n let search_query: String = parts.join(\" \");\n\n\n\n log::debug!(\"Search query: '{}'\", search_query);\n\n GraphQLQuery {\n\n variables: json!({\n\n \"searchQuery\": search_query,\n\n \"limit\": self.limit\n\n }),\n\n query: String::from(include_str!(\"../data/graphql/queries/search_issues.graphql\")),\n\n operation_name: String::from(\"SearchIssues\"),\n\n }\n\n }\n\n}\n", "file_path": "src/search.rs", "rank": 40, "score": 25234.97824979303 }, { "content": "\n\nimpl SearchIssues {\n\n fn state(&self) -> Option<String> {\n\n match self.state {\n\n StateFilter::All => None,\n\n StateFilter::Open => Some(String::from(\"state:open\")),\n\n StateFilter::Closed => Some(String::from(\"state:closed\")),\n\n }\n\n }\n\n\n\n fn assignee(&self) -> Option<String> {\n\n match &self.assignee {\n\n Some(name) => Some(format!(\"assignee:{}\", name)),\n\n None => None,\n\n }\n\n }\n\n\n\n fn archived(&self) -> String {\n\n String::from(\"archived:false\")\n\n }\n", "file_path": "src/search.rs", "rank": 41, "score": 25233.013313412088 }, { "content": "\n\n fn users(&self) -> Option<String> {\n\n if self.targets.is_empty() {\n\n None\n\n } else {\n\n let users: String = self.targets.iter().map(|user| user.to_string()).join(\" \");\n\n Some(users)\n\n }\n\n }\n\n\n\n fn labels(&self) -> Option<String> {\n\n if self.labels.is_empty() {\n\n None\n\n } else {\n\n Some(self.labels.iter().map(|l| format!(\"label:{}\", l)).join(\" \"))\n\n }\n\n }\n\n\n\n fn project(&self) -> Option<String> {\n\n self.project.clone().map(|p| format!(\"project:{}\", p))\n", "file_path": "src/search.rs", "rank": 42, "score": 25230.758158942754 }, { "content": " }\n\n\n\n fn sort(&self) -> String {\n\n format!(\"sort:{}\", self.sort)\n\n }\n\n\n\n fn search(&self) -> Option<String> {\n\n self.search.clone().map(|s| format!(\"in:title,body {}\", s))\n\n }\n\n}\n", "file_path": "src/search.rs", "rank": 43, "score": 25226.330684952238 }, { "content": "use crate::{search::GraphQLQuery, AppErr};\n\nuse serde::Deserialize;\n\nuse sha2::{Digest, Sha256};\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n\nuse std::{fs::File, str::FromStr};\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct User {\n\n pub login: String,\n\n pub id: String,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Username(pub String);\n\n\n\nimpl Username {\n\n pub async fn from_token(token: &str) -> Result<Username, AppErr> {\n\n match get_saved_username(token) {\n\n Some(username) => Ok(Username(username)),\n", "file_path": "src/user.rs", "rank": 44, "score": 25117.57950487824 }, { "content": " None => {\n\n let username: String = api_lookup_username(token).await?.login;\n\n save_username(token, &username)?;\n\n Ok(Username(username))\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Username {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n Ok(Username(s.to_string()))\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for AppErr {\n\n fn from(_: std::io::Error) -> Self {\n\n AppErr::TokenWriteError\n", "file_path": "src/user.rs", "rank": 45, "score": 25107.342628256516 }, { "content": " }\n\n}\n\n\n\nimpl From<reqwest::Error> for AppErr {\n\n fn from(e: reqwest::Error) -> Self {\n\n log::error!(\"Request failed {}\", e);\n\n AppErr::ApiError\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "src/user.rs", "rank": 46, "score": 25097.621171059618 }, { "content": "}\n\n\n\nimpl Issue {\n\n pub fn has_review_request(&self, user: &str) -> bool {\n\n match &self.review_requets {\n\n Some(req) => req\n\n .nodes\n\n .iter()\n\n .any(|n| n.requested_reviewer.has_login(user)),\n\n None => false,\n\n }\n\n }\n\n\n\n pub fn link(&self) -> String {\n\n let repo: &String = &self.repository.name_with_owner;\n\n let kind: &str = match self.kind {\n\n Type::Issue => \"issues\",\n\n Type::PullRequest | Type::ReviewRequest => \"pull\",\n\n };\n\n let number: u32 = self.number;\n", "file_path": "src/issue.rs", "rank": 47, "score": 24987.673942340658 }, { "content": " pub reactions: Reactions,\n\n pub assignees: AssigneeNode,\n\n #[serde(alias = \"reviewRequests\")]\n\n pub review_requets: Option<ReviewRequestNode>,\n\n pub labels: LabelNode,\n\n pub repository: Repository,\n\n #[serde(alias = \"__typename\")]\n\n pub kind: Type,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Team {\n\n pub name: String,\n\n pub members: Vec<UserFields>,\n\n}\n\n\n\nimpl PartialEq for Issue {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.id == other.id\n\n }\n", "file_path": "src/issue.rs", "rank": 48, "score": 24980.942960092925 }, { "content": " }\n\n}\n\n\n\nimpl ghrs::Closeable for Issue {\n\n fn is_open(&self) -> bool {\n\n match self.state {\n\n ghrs::State::Open => true,\n\n ghrs::State::Closed => false,\n\n }\n\n }\n\n}\n", "file_path": "src/issue.rs", "rank": 49, "score": 24979.641372841892 }, { "content": "use crate::{github_resources::ghrs, search::Type};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Root {\n\n pub data: Data,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Data {\n\n pub search: Search,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Search {\n\n pub edges: Vec<Node>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Node {\n", "file_path": "src/issue.rs", "rank": 50, "score": 24978.202732217396 }, { "content": " pub members: Option<AssigneeNode>,\n\n}\n\n\n\nimpl Assignable {\n\n pub fn has_login(&self, login: &str) -> bool {\n\n match self.assignable_type.as_str() {\n\n \"Team\" => self\n\n .members\n\n .as_ref()\n\n .expect(\"Type is Team so there must be members\")\n\n .nodes\n\n .iter()\n\n .any(|member| member.login == login),\n\n \"User\" => self\n\n .login\n\n .as_ref()\n\n .expect(\"Type is User so there must be a login\")\n\n .eq(login),\n\n _ => panic!(\"Unrecognized type: {}\", self.assignable_type),\n\n }\n", "file_path": "src/issue.rs", "rank": 51, "score": 24976.081774453876 }, { "content": " pub node: Issue,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Issue {\n\n pub url: String,\n\n #[serde(alias = \"databaseId\")]\n\n pub id: u64,\n\n pub number: u32,\n\n pub title: String,\n\n #[serde(alias = \"bodyText\")]\n\n pub body: Option<String>,\n\n #[serde(alias = \"createdAt\")]\n\n pub created_at: String,\n\n #[serde(alias = \"updatedAt\")]\n\n pub updated_at: String,\n\n #[serde(alias = \"issueState\")]\n\n #[serde(alias = \"pullRequestState\")]\n\n pub state: ghrs::State,\n\n pub comments: Comments,\n", "file_path": "src/issue.rs", "rank": 52, "score": 24974.07799957401 }, { "content": " format!(\"https://github.com/{}/{}/{}\", repo, kind, number)\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct LabelNode {\n\n pub nodes: Vec<Label>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Label {\n\n pub name: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct UserFields {\n\n pub login: String,\n\n pub id: String,\n\n}\n\n\n", "file_path": "src/issue.rs", "rank": 53, "score": 24974.04580462662 }, { "content": " pub nodes: Vec<UserFields>,\n\n}\n\n#[derive(Debug, Deserialize)]\n\npub struct ReviewRequestNode {\n\n pub nodes: Vec<RequestedReviewer>,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct RequestedReviewer {\n\n #[serde(alias = \"requestedReviewer\")]\n\n pub requested_reviewer: Assignable,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Assignable {\n\n #[serde(alias = \"__typename\")]\n\n pub assignable_type: String,\n\n pub login: Option<String>,\n\n pub id: Option<String>,\n\n pub name: Option<String>,\n", "file_path": "src/issue.rs", "rank": 54, "score": 24973.26132415156 }, { "content": "#[derive(Debug, Deserialize)]\n\npub struct Repository {\n\n #[serde(alias = \"nameWithOwner\")]\n\n pub name_with_owner: String,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Comments {\n\n #[serde(alias = \"totalCount\")]\n\n pub total_count: u32,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Reactions {\n\n #[serde(alias = \"totalCount\")]\n\n pub total_count: u32,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct AssigneeNode {\n", "file_path": "src/issue.rs", "rank": 55, "score": 24970.142723126515 }, { "content": "use std::str::FromStr;\n\n\n\nuse crate::{\n\n args::read_repo_from_file,\n\n list::StateFilter,\n\n project::Project,\n\n sort::{Order, Property, Sorting},\n\n target::Target,\n\n user::Username,\n\n AppErr,\n\n};\n\nuse structopt::StructOpt;\n\nuse termcolor::ColorChoice;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"giss\", author, about)]\n\npub struct Config {\n\n /// Name of target(s)\n\n ///\n\n /// Name of the targets for the action. Can be a combination of one or several repositories,\n", "file_path": "src/cfg.rs", "rank": 64, "score": 23.534325152419026 }, { "content": "use std::{io::Write, sync::mpsc::RecvTimeoutError};\n\nuse std::{sync::mpsc::Receiver, time::Duration};\n\n\n\nuse itertools::Itertools;\n\nuse termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor};\n\nuse user::Username;\n\n\n\nuse crate::{\n\n cfg::Config,\n\n issue::{Issue, Label, UserFields},\n\n search::Type,\n\n sort::Sorting,\n\n user, AppErr,\n\n};\n\n\n\npub struct DisplayConfig {\n\n colors: ColorChoice,\n\n sorting: Sorting,\n\n user: Option<Username>,\n\n limit: u32,\n", "file_path": "src/ui.rs", "rank": 65, "score": 22.87724846242108 }, { "content": " Some(repo) => match repo.parse::<Target>() {\n\n Ok(target) => Ok(vec![target]),\n\n Err(_) => Err(AppErr::InvalidTarget(repo)),\n\n },\n\n None => Err(AppErr::NoTarget),\n\n }\n\n } else {\n\n Ok(self.target.clone())\n\n }\n\n }\n\n\n\n pub fn limit(&self) -> u32 {\n\n self.limit\n\n }\n\n\n\n pub fn state(&self) -> StateFilter {\n\n if self.open && self.closed {\n\n StateFilter::All\n\n } else if self.closed {\n\n StateFilter::Closed\n", "file_path": "src/cfg.rs", "rank": 66, "score": 22.63712529763702 }, { "content": "}\n\n\n\nimpl Config {\n\n pub fn token(&self) -> Result<String, AppErr> {\n\n self.token.clone().ok_or(AppErr::MissingToken)\n\n }\n\n\n\n pub fn username(&self) -> Option<Username> {\n\n match &self.user {\n\n Some(user) => Some(user.clone()),\n\n None => match self.token.clone() {\n\n Some(token) => futures::executor::block_on(Username::from_token(&token)).ok(),\n\n None => None,\n\n },\n\n }\n\n }\n\n\n\n pub fn target(&self) -> Result<Vec<Target>, AppErr> {\n\n if self.target.is_empty() {\n\n match read_repo_from_file() {\n", "file_path": "src/cfg.rs", "rank": 67, "score": 22.598003616035754 }, { "content": " Assigned only\n\n\n\n Only include issues and pull requests assigned to user\n\n -c, --closed\n\n Show closed issues or pull requests\n\n\n\n Include issues, pull request or review requests that are closed or merged\n\n -D, --debug\n\n Prind debug information\n\n\n\n Print debug information about current build for binary, useful for when an issue is encountered and reported\n\n -h, --help\n\n Prints help information\n\n\n\n -i, --issues\n\n List issues\n\n\n\n -L, --links\n\n Show links\n\n\n\n Show links to each issue or pull request in the output\n\n -o, --open\n\n Show open issues or pull requests\n\n\n\n Include issues, pull request or review requests that are open. If neither this flag nor --closed/-c is\n\n given, default behavior will be to display open issues or pull requests.\n\n -p, --pull-requests\n\n List pull requests\n\n\n\n -r, --review-requests\n\n List review requests\n\n\n\n -V, --version\n\n Prints version information\n\n\n\n\n\nOPTIONS:\n\n --colors <colors>\n\n Set use of colors\n\n\n\n Enable or disable output with colors. By default, the application will try to figure out if colors are\n\n supported by the terminal in the current context, and use it if possible. Possible values are \"on\", \"true\",\n\n \"off\", \"false\", \"auto\". [default: auto]\n\n -l, --labels <labels>...\n\n Filter by label\n\n\n\n Only include issues, pull requests or review reuests which has (all) the given label(s).\n\n -n, --limit <limit>\n\n Limit the number of issues or pull requests to list [default: 10]\n\n\n\n -O, --order <order>\n\n Ordering\n\n\n\n Can be either ascending (asc|ascending) or decending (desc|descending)\n\n -P, --project <project>\n\n Filter by project\n\n\n\n Only include isses, pull request or review requests which is assoicated with the given project.\n\n -S, --search <search>\n", "file_path": "README.md", "rank": 68, "score": 21.936496800800526 }, { "content": "\n\n pub fn pulls(&self) -> bool {\n\n self.pull_requests || self.all()\n\n }\n\n\n\n pub fn sorting(&self) -> Sorting {\n\n Sorting(self.sort_by.unwrap_or_default(), self.order.unwrap_or_default())\n\n }\n\n\n\n pub fn search(&self) -> Option<String> {\n\n self.search.clone()\n\n }\n\n\n\n pub fn label(&self) -> Vec<String> {\n\n self.labels.clone()\n\n }\n\n\n\n pub fn project(&self) -> Option<Project> {\n\n self.project.clone()\n\n }\n", "file_path": "src/cfg.rs", "rank": 69, "score": 20.998676161517583 }, { "content": " return Ok(());\n\n }\n\n\n\n setup_logging(cfg.verbosity());\n\n\n\n let token: String = cfg.token()?;\n\n let targets: Vec<Target> = cfg.target()?;\n\n let user: Option<Username> = cfg.username();\n\n log::debug!(\"Config: {:?}\", &cfg);\n\n\n\n let filter: FilterConfig = (&cfg).into();\n\n let display: DisplayConfig = (&cfg).into();\n\n\n\n let bounds: usize = cfg.limit() as usize * 2;\n\n let (send, recv) = std::sync::mpsc::sync_channel::<Issue>(bounds);\n\n\n\n let rt = Runtime::new().unwrap();\n\n rt.spawn(async move {\n\n match list::list_issues(send, &user, &targets, &token, &filter).await {\n\n Ok(_) => log::debug!(\"API requests completed\"),\n", "file_path": "src/main.rs", "rank": 71, "score": 20.326495933253454 }, { "content": "mod ui;\n\nmod user;\n\n\n\nuse crate::structopt::StructOpt;\n\nuse cfg::Config;\n\nuse dbg::dbg_info;\n\nuse issue::Issue;\n\nuse list::FilterConfig;\n\nuse logger::setup_logging;\n\nuse target::Target;\n\nuse tokio::runtime::Runtime;\n\nuse ui::DisplayConfig;\n\nuse user::Username;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), AppErr> {\n\n let cfg: Config = Config::from_args();\n\n\n\n if cfg.print_debug() {\n\n println!(\"{}\", dbg_info());\n", "file_path": "src/main.rs", "rank": 72, "score": 18.768171497460454 }, { "content": " } else {\n\n StateFilter::Open\n\n }\n\n }\n\n\n\n pub fn assigned_only(&self) -> bool {\n\n self.assigned\n\n }\n\n\n\n fn all(&self) -> bool {\n\n !self.issues && !self.pull_requests && !self.review_requests\n\n }\n\n\n\n pub fn issues(&self) -> bool {\n\n self.issues || self.all()\n\n }\n\n\n\n pub fn reviews(&self) -> bool {\n\n self.review_requests || self.all()\n\n }\n", "file_path": "src/cfg.rs", "rank": 73, "score": 18.48496277765093 }, { "content": "\n\n /// Show open issues or pull requests\n\n ///\n\n /// Include issues, pull request or review requests that are open. If neither this flag nor\n\n /// --closed/-c is given, default behavior will be to display open issues or pull requests.\n\n #[structopt(short, long)]\n\n open: bool,\n\n\n\n /// Show closed issues or pull requests\n\n ///\n\n /// Include issues, pull request or review requests that are closed or merged\n\n #[structopt(short, long)]\n\n closed: bool,\n\n\n\n /// Filter by label\n\n ///\n\n /// Only include issues, pull requests or review reuests which has (all) the given label(s).\n\n #[structopt(short, long)]\n\n labels: Vec<String>,\n\n\n", "file_path": "src/cfg.rs", "rank": 74, "score": 17.912587113988955 }, { "content": "pub struct Verbosity(u8);\n\n\n\nimpl Verbosity {\n\n pub fn level(&self) -> u8 {\n\n self.0\n\n }\n\n}\n\n\n\nimpl FromStr for Verbosity {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s.parse::<u8>() {\n\n Ok(n) => match n {\n\n 0..=5 => Ok(Verbosity(n)),\n\n _ => Err(format!(\"Unsupported verbosity level '{}'\", n)),\n\n },\n\n Err(e) => Err(e.to_string()),\n\n }\n\n }\n", "file_path": "src/cfg.rs", "rank": 75, "score": 16.50338996891022 }, { "content": " links: bool,\n\n}\n\n\n\nimpl From<&Config> for DisplayConfig {\n\n fn from(cfg: &Config) -> Self {\n\n DisplayConfig {\n\n colors: cfg.colors(),\n\n limit: cfg.limit(),\n\n user: cfg.username(),\n\n sorting: cfg.sorting(),\n\n links: cfg.show_links(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 76, "score": 16.145323933798952 }, { "content": " /// organizations or users. Any repository specified must be qualified with the owner or\n\n /// organization name. For example 'org/repo'. When no target is specified, repository in\n\n /// current directory will be used, if possible.\n\n target: Vec<Target>,\n\n\n\n /// GitHub API token\n\n ///\n\n /// API token that will be used when authenticating towards GitHub's API\n\n #[structopt(short, long, env = \"GITHUB_TOKEN\", hide_env_values = true)]\n\n token: Option<String>,\n\n\n\n /// Assigned only\n\n ///\n\n /// Only include issues and pull requests assigned to user\n\n #[structopt(short, long)]\n\n assigned: bool,\n\n\n\n /// Limit the number of issues or pull requests to list\n\n #[structopt(short = \"n\", long, default_value = \"10\")]\n\n limit: u32,\n", "file_path": "src/cfg.rs", "rank": 77, "score": 15.831608128183195 }, { "content": " Search\n\n\n\n Search by a string, which must be present either in the title or the body of an issue or pull request.\n\n -s, --sort-by <sort-by>\n\n Sort by\n\n\n\n Sort by any of the following properties; \"created\", \"updated\", \"comments\", \"reactions\"\n\n -t, --token <token>\n\n GitHub API token\n\n\n\n API token that will be used when authenticating towards GitHub's API [env: GITHUB_TOKEN]\n\n -u, --user <user>\n\n Username\n\n\n\n Username to use for the query. Will default to the username for the user of the token.\n\n -v, --verbosity <verbosity>\n\n Set verbosity level, 0 - 5\n\n\n\n Set the verbosity level, from 0 (least amount of output) to 5 (most verbose). Note that logging level\n\n configured via RUST_LOG overrides this setting. [default: 1]\n\n\n\nARGS:\n\n <target>...\n\n Name of target(s)\n\n\n\n Name of the targets for the action. Can be a combination of one or several repositories, organizations or\n\n users. Any repository specified must be qualified with the owner or organization name. For example\n\n 'org/repo'. When no target is specified, repository in current directory will be used, if possible.\n\n```\n\n\n", "file_path": "README.md", "rank": 78, "score": 15.57958965922326 }, { "content": " query: crate::search::GraphQLQuery,\n\n ) -> Result<T, ApiError> {\n\n log::debug!(\"{}\", query.variables);\n\n\n\n let request: reqwest::Request = CLIENT\n\n .post(GITHUB_API_V4_URL)\n\n .header(\"User-Agent\", USER_AGENT)\n\n .bearer_auth(token)\n\n .json(&query)\n\n .build()\n\n .expect(\"Failed to build query\");\n\n\n\n let response: reqwest::Response = CLIENT.execute(request).await?;\n\n let status_code: u16 = response.status().as_u16();\n\n match status_code {\n\n 200 => {\n\n log::debug!(\"GitHub API: {}\", status_code);\n\n Ok(response.json().await?)\n\n }\n\n _ => {\n", "file_path": "src/api.rs", "rank": 79, "score": 15.303398479874302 }, { "content": " /// Filter by project\n\n ///\n\n /// Only include isses, pull request or review requests which is assoicated with the\n\n /// given project.\n\n #[structopt(short = \"P\", long)]\n\n project: Option<Project>,\n\n\n\n /// List issues\n\n #[structopt(short, long)]\n\n issues: bool,\n\n\n\n /// List pull requests\n\n #[structopt(short, long)]\n\n pull_requests: bool,\n\n\n\n /// List review requests\n\n #[structopt(short, long)]\n\n review_requests: bool,\n\n\n\n /// Sort by\n", "file_path": "src/cfg.rs", "rank": 80, "score": 14.514155637085835 }, { "content": "pub(crate) mod v4 {\n\n use lazy_static::lazy_static;\n\n use reqwest::Client;\n\n use std::time::Duration;\n\n\n\n use super::ApiError;\n\n\n\n const GITHUB_API_V4_URL: &str = \"https://api.github.com/graphql\";\n\n const USER_AGENT: &str = \"giss\";\n\n\n\n lazy_static! {\n\n pub static ref CLIENT: Client = Client::builder()\n\n .connect_timeout(Duration::from_secs(10))\n\n .timeout(std::time::Duration::from_secs(15))\n\n .build()\n\n .unwrap();\n\n }\n\n\n\n pub async fn request<T: serde::de::DeserializeOwned>(\n\n token: &str,\n", "file_path": "src/api.rs", "rank": 81, "score": 14.055198896332527 }, { "content": " Err(e) => log::error!(\"{:?}\", e),\n\n }\n\n });\n\n\n\n ui::display(recv, display)?;\n\n rt.shutdown_background();\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum AppErr {\n\n MissingToken,\n\n TokenWriteError,\n\n NoTarget,\n\n InvalidTarget(String),\n\n ApiError,\n\n Timeout,\n\n ChannelError,\n\n RateLimited,\n\n}\n", "file_path": "src/main.rs", "rank": 82, "score": 13.816154229891449 }, { "content": "pub mod ghrs {\n\n use serde::Deserialize;\n\n\n\n #[derive(Debug, Deserialize, PartialEq)]\n\n pub enum State {\n\n #[serde(rename(deserialize = \"open\"))]\n\n #[serde(alias = \"OPEN\")]\n\n Open,\n\n #[serde(rename(deserialize = \"closed\"))]\n\n #[serde(alias = \"CLOSED\")]\n\n #[serde(alias = \"MERGED\")]\n\n Closed,\n\n }\n\n\n", "file_path": "src/github_resources.rs", "rank": 83, "score": 13.51233362362677 }, { "content": " .nodes\n\n .iter()\n\n .map(|l: &Label| &l.name)\n\n .map(|s: &String| format!(\"{}{}\", \"#\", s))\n\n .collect::<Vec<String>>()\n\n .join(\", \");\n\n\n\n let mut stdout = StandardStream::stdout(use_colors);\n\n\n\n print_type(&mut stdout, &issue, cfg);\n\n\n\n let target: String = if print_repo {\n\n format!(\"#{} {}\", issue.number, repo)\n\n } else {\n\n format!(\"#{}\", issue.number)\n\n };\n\n\n\n write(&mut stdout, target.as_str(), None);\n\n delimiter(&mut stdout);\n\n write(&mut stdout, &title, None);\n", "file_path": "src/ui.rs", "rank": 84, "score": 13.32972720478257 }, { "content": " /// Username to use for the query. Will default to the username for the user of the token.\n\n #[structopt(short, long)]\n\n user: Option<Username>,\n\n\n\n /// Show links\n\n ///\n\n /// Show links to each issue or pull request in the output\n\n #[structopt(short = \"L\", long)]\n\n links: bool,\n\n\n\n /// Set use of colors\n\n ///\n\n /// Enable or disable output with colors. By default, the application will\n\n /// try to figure out if colors are supported by the terminal in the current context, and use it\n\n /// if possible.\n\n /// Possible values are \"on\", \"true\", \"off\", \"false\", \"auto\".\n\n #[structopt(long = \"colors\", default_value = \"auto\")]\n\n colors: Flag,\n\n\n\n /// Set verbosity level, 0 - 5\n", "file_path": "src/cfg.rs", "rank": 85, "score": 12.631760744798353 }, { "content": "#[macro_use]\n\nextern crate clap;\n\nextern crate dirs_next;\n\nextern crate lazy_static;\n\nextern crate log;\n\nextern crate regex;\n\nextern crate structopt;\n\n\n\nmod api;\n\nmod args;\n\nmod cfg;\n\nmod dbg;\n\nmod github_resources;\n\nmod issue;\n\nmod list;\n\nmod logger;\n\nmod project;\n\nmod search;\n\nmod sort;\n\nmod target;\n", "file_path": "src/main.rs", "rank": 86, "score": 12.029253875800848 }, { "content": "# giss\n\n*giss* is command line client to list GitHub issues and pull requests.\n\n\n\n## Usage\n\nAll commands requires a valid [GitHub API token](https://github.com/settings/tokens). The application will automatically read the environment variable\n\n`GITHUB_TOKEN`, but it can also be given when invoking the application with the `-t` or `--token` flag. The token does not need any permission for reading public repositories, but for private repositories is the `repo` permission required.\n\n\n\n### List Issues & Pull Requests\n\nBy default, simply invoking the name of the binary, `giss`, will list tickets that are either\n\n- issues\n\n- pull requets\n\n- review requests\n\n\n\nin the current repo.\n\nIf the command is not invoked from a Git repository, an explicit repository will have to given as an argument.\n\n\n\n- `giss` - List open tickets in current repo\n\n- `giss mantono/giss` - List open tickets in repository _giss_ that belongs to user/organization _mantono_\n\n- `giss apple` - List open tickets in any repository in organization _apple_\n\n- `giss apple microsoft google` - List tickets in any repository in organizations _apple_, _microsoft_ and _google_\n\n- `giss rust-lang/rust apple/swift golang/go` - List open tickets in repositories for rust, swift and go\n\n- `giss -c` - List only closed tickets in current repo\n\n- `giss -oc` - List both open and closed tickets in current repo\n\n- `giss -a` - List only open tickets assigned to user\\* in current repo\n\n- `giss -i` - List only open issues in current repo\n\n- `giss -p` - List only open pull requests in current repo\n\n- `giss -r` - List only review requests for user\\*\n\n- `giss -a kotlin` - List all open tickets assigned to user in any repository in orgranization _kotlin_\n\n\n\n\\*the user is determined by the owner of the token, unless overriden with the `--user` flag.\n\n\n\nSee `giss --help` for all available options.\n\n\n\n```\n\nUSAGE:\n\n giss [FLAGS] [OPTIONS] [--] [target]...\n\n\n\nFLAGS:\n\n -a, --assigned\n", "file_path": "README.md", "rank": 87, "score": 11.875219318293308 }, { "content": " ///\n\n /// Set the verbosity level, from 0 (least amount of output) to 5 (most verbose). Note that\n\n /// logging level configured via RUST_LOG overrides this setting.\n\n #[structopt(short, long, default_value = \"1\")]\n\n verbosity: Verbosity,\n\n\n\n /// Prind debug information\n\n ///\n\n /// Print debug information about current build for binary, useful for when an issue is\n\n /// encountered and reported\n\n #[structopt(short = \"D\", long)]\n\n debug: bool,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "src/cfg.rs", "rank": 88, "score": 11.74514217544301 }, { "content": "\n\n pub fn show_links(&self) -> bool {\n\n self.links\n\n }\n\n\n\n pub fn verbosity(&self) -> &Verbosity {\n\n &self.verbosity\n\n }\n\n\n\n pub fn colors(&self) -> ColorChoice {\n\n match self.colors {\n\n Flag::True => ColorChoice::Always,\n\n Flag::False => ColorChoice::Never,\n\n Flag::Auto => ColorChoice::Auto,\n\n }\n\n }\n\n\n\n pub fn print_debug(&self) -> bool {\n\n self.debug\n\n }\n\n}\n", "file_path": "src/cfg.rs", "rank": 89, "score": 9.211539108715613 }, { "content": "use env_logger::fmt::{Color, Formatter};\n\nuse log::{Level, LevelFilter, Record};\n\nuse std::io;\n\nuse std::io::Write;\n\n\n\nuse crate::cfg::Verbosity;\n\n\n", "file_path": "src/logger.rs", "rank": 90, "score": 8.156064781479841 }, { "content": " ///\n\n /// Sort by any of the following properties; \"created\", \"updated\", \"comments\", \"reactions\"\n\n #[structopt(short, long)]\n\n sort_by: Option<Property>,\n\n\n\n /// Ordering\n\n ///\n\n /// Can be either ascending (asc|ascending) or decending (desc|descending)\n\n #[structopt(short = \"O\", long)]\n\n order: Option<Order>,\n\n\n\n /// Search\n\n ///\n\n /// Search by a string, which must be present either in the title or the body of an\n\n /// issue or pull request.\n\n #[structopt(short = \"S\", long)]\n\n search: Option<String>,\n\n\n\n /// Username\n\n ///\n", "file_path": "src/cfg.rs", "rank": 91, "score": 7.56499949624342 }, { "content": "\n\n if !assignees.is_empty() {\n\n delimiter(&mut stdout);\n\n write(&mut stdout, &assignees, Some(Color::Cyan));\n\n }\n\n\n\n if !labels.is_empty() {\n\n delimiter(&mut stdout);\n\n write(&mut stdout, &labels, Some(Color::Magenta));\n\n }\n\n\n\n if cfg.links {\n\n delimiter(&mut stdout);\n\n write(&mut stdout, &issue.link(), Some(Color::Blue));\n\n }\n\n\n\n write(&mut stdout, \"\\n\", None);\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 92, "score": 7.429854059463642 }, { "content": " let error: String = response.text().await?;\n\n log::error!(\"GitHub API: {} - {}\", status_code, error);\n\n Err(ApiError::Response(status_code))\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl From<reqwest::Error> for ApiError {\n\n fn from(e: reqwest::Error) -> Self {\n\n match e.status() {\n\n Some(code) => ApiError::Response(code.as_u16()),\n\n None => ApiError::NoResponse(e.to_string()),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ApiError {\n\n NoResponse(String),\n\n Response(u16),\n\n}\n", "file_path": "src/api.rs", "rank": 93, "score": 7.052539669195494 }, { "content": "use std::fs;\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "src/args.rs", "rank": 94, "score": 6.298453021613701 }, { "content": "use built::Options;\n\nuse std::path::{Path, PathBuf};\n\n\n", "file_path": "build.rs", "rank": 95, "score": 5.718782755186433 }, { "content": "## Building\n\nThe application is built with cargo. Simply run the following command in the project directory.\n\n```bash\n\ncargo build --release\n\n```\n\nA binary will be created and put in directory `target/release`.\n\n\n\n\n\n## Install\n\nRun `cargo install --path .`\n", "file_path": "README.md", "rank": 96, "score": 3.879981821898303 } ]
Rust
osm2lanes/src/transform/tags_to_lanes/counts.rs
a-b-street/osm2lanes
e24d9762dc50f8d83b57f0a0737e2626823133a4
use super::{Infer, Oneway}; use crate::locale::Locale; use crate::tag::{Highway, TagKey, Tags}; use crate::transform::tags_to_lanes::modes::BusLaneCount; use crate::transform::{RoadWarnings, TagsToLanesMsg}; #[derive(Debug)] pub enum Counts { One, Directional { forward: Infer<usize>, backward: Infer<usize>, centre_turn_lane: Infer<bool>, }, } impl Counts { #[allow( clippy::integer_arithmetic, clippy::integer_division, clippy::too_many_lines )] pub(super) fn new( tags: &Tags, oneway: Oneway, highway: &Highway, centre_turn_lane: &CentreTurnLaneScheme, bus: &BusLaneCount, locale: &Locale, warnings: &mut RoadWarnings, ) -> Self { let lanes = LanesDirectionScheme::from_tags(tags, oneway, locale, warnings); let centre_turn_lane = match (lanes.both_ways, centre_turn_lane.some()) { (Some(()), None | Some(true)) => Infer::Direct(true), (None, Some(true)) => Infer::Calculated(true), (None, Some(false)) => Infer::Calculated(false), (None, None) => Infer::Default(false), (Some(()), Some(false)) => { warnings.push(TagsToLanesMsg::ambiguous_tags( tags.subset(&[LANES + "both_ways", CENTRE_TURN_LANE]), )); Infer::Default(true) }, }; let both_ways: usize = if centre_turn_lane.some().unwrap_or(false) { 1 } else { 0 }; if oneway.into() { if lanes.both_ways.is_some() || lanes.backward.is_some() { warnings.push(TagsToLanesMsg::ambiguous_tags(tags.subset(&[ "oneway", "lanes:both_ways", "lanes:backward", ]))); } if let Some(total) = lanes.total { let forward = total - both_ways - bus.backward; let result = Self::Directional { forward: Infer::Calculated(forward), backward: Infer::Calculated(bus.backward), centre_turn_lane, }; if lanes.forward.map_or(false, |direct| direct != forward) { warnings.push(TagsToLanesMsg::ambiguous_tags(tags.subset(&[ "oneway", "lanes", "lanes:forward", ]))); } result } else if let Some(f) = lanes.forward { Self::Directional { forward: Infer::Direct(f), backward: Infer::Default(0), centre_turn_lane, } } else { let assumed_forward = 1; Self::Directional { forward: Infer::Default(assumed_forward + bus.forward), backward: Infer::Default(0), centre_turn_lane, } } } else { match (lanes.total, lanes.forward, lanes.backward) { (Some(l), Some(f), Some(b)) => { if l != f + b + both_ways { warnings.push(TagsToLanesMsg::ambiguous_tags(tags.subset(&[ "lanes", "lanes:forward", "lanes:backward", "lanes:both_ways", "center_turn_lanes", ]))); } Self::Directional { forward: Infer::Direct(f), backward: Infer::Direct(b), centre_turn_lane, } }, (None, Some(f), Some(b)) => Self::Directional { forward: Infer::Direct(f), backward: Infer::Direct(b), centre_turn_lane, }, (Some(l), Some(f), None) => Self::Directional { forward: Infer::Direct(f), backward: Infer::Calculated(l - f - both_ways), centre_turn_lane, }, (Some(l), None, Some(b)) => Self::Directional { forward: Infer::Calculated(l - b - both_ways), backward: Infer::Direct(b), centre_turn_lane, }, (Some(1), None, None) => Self::One, (Some(l), None, None) => { if l % 2 == 0 && centre_turn_lane.some().unwrap_or(false) { Self::Directional { forward: Infer::Default(l / 2), backward: Infer::Default(l / 2), centre_turn_lane, } } else { let remaining_lanes = l - both_ways - bus.forward - bus.backward; if remaining_lanes % 2 != 0 { warnings.push(TagsToLanesMsg::ambiguous_str("Total lane count cannot be evenly divided between the forward and backward")); } let half = (remaining_lanes + 1) / 2; Self::Directional { forward: Infer::Default(half + bus.forward), backward: Infer::Default( remaining_lanes - half - both_ways + bus.backward, ), centre_turn_lane, } } }, (None, None, None) => { if locale.has_split_lanes(highway.r#type()) || bus.forward > 0 || bus.backward > 0 { Self::Directional { forward: Infer::Default(1 + bus.forward), backward: Infer::Default(1 + bus.backward), centre_turn_lane, } } else { Self::One } }, (None, _, _) => { if locale.has_split_lanes(highway.r#type()) { let forward = Infer::from(lanes.forward).or_default(1 + bus.forward); let backward = Infer::from(lanes.backward).or_default(1 + bus.forward); Self::Directional { forward, backward, centre_turn_lane, } } else { Self::One } }, } } } } const LANES: TagKey = TagKey::from("lanes"); pub(in crate::transform::tags_to_lanes) struct LanesDirectionScheme { total: Option<usize>, forward: Option<usize>, backward: Option<usize>, both_ways: Option<()>, } impl LanesDirectionScheme { pub fn from_tags( tags: &Tags, _oneway: Oneway, _locale: &Locale, warnings: &mut RoadWarnings, ) -> Self { let both_ways = tags .get_parsed(LANES + "both_ways", warnings) .filter(|&v: &usize| { if v == 1 { true } else { warnings.push(TagsToLanesMsg::unsupported( "lanes:both_ways must be 1", tags.subset(&[LANES + "both_ways"]), )); false } }) .map(|_v| {}); Self { total: tags.get_parsed(LANES, warnings), forward: tags.get_parsed(LANES + "forward", warnings), backward: tags.get_parsed(LANES + "backward", warnings), both_ways, } } } const CENTRE_TURN_LANE: TagKey = TagKey::from("centre_turn_lane"); pub(in crate::transform::tags_to_lanes) struct CentreTurnLaneScheme(pub Option<bool>); impl CentreTurnLaneScheme { pub fn from_tags( tags: &Tags, _oneway: Oneway, _locale: &Locale, warnings: &mut RoadWarnings, ) -> Self { if let Some(v) = tags.get(CENTRE_TURN_LANE) { warnings.push(TagsToLanesMsg::deprecated_tags( tags.subset(&[CENTRE_TURN_LANE]), )); match v { "yes" => Self(Some(true)), "no" => Self(Some(false)), _ => { warnings.push(TagsToLanesMsg::unsupported_tags( tags.subset(&[CENTRE_TURN_LANE]), )); Self(None) }, } } else { Self(None) } } pub fn some(&self) -> Option<bool> { self.0 } }
use super::{Infer, Oneway}; use crate::locale::Locale; use crate::tag::{Highway, TagKey, Tags}; use crate::transform::tags_to_lanes::modes::BusLaneCount; use crate::transform::{RoadWarnings, TagsToLanesMsg}; #[derive(Debug)] pub enum Counts { One, Directional { forward: Infer<usize>, backward: Infer<usize>, centre_turn_lane: Infer<bool>, }, } impl Counts { #[allow( clippy::integer_arithmetic, clippy::integer_division, clippy::too_many_lines )] pub(super) fn new( tags: &Tags, oneway: Oneway, highway: &Highway, centre_turn_lane: &CentreTurnLaneScheme, bus: &BusLaneCount, locale: &Locale, warnings: &mut RoadWarnings, ) -> Self { let lanes = LanesDirectionScheme::from_tags(tags, oneway, locale, warnings); let centre_turn_lane = match (lanes.both_ways, centre_turn_lane.some()) { (Some(()), None | Some(true)) => Infer::Direct(true), (None, Some(true)) => Infer::Calculated(true), (None, Some(false)) => Infer::Calculated(false), (None, None) => Infer::Default(false), (Some(()), Some(false)) => { warnings.push(TagsToLanesMsg::ambiguous_tags( tags.subset(&[LANES + "both_ways", CENTRE_TURN_LANE]), )); Infer::Default(true) }, }; let both_ways: usize = if centre_turn_lane.some().unwrap_or(false) { 1 } else { 0 }; if oneway.into() { if lanes.both_ways.is_some() || lanes.backward.is_some() { warnings.push(TagsToLanesMsg::ambiguous_tags(tags.subset(&[ "oneway",
l / 2), centre_turn_lane, } } else { let remaining_lanes = l - both_ways - bus.forward - bus.backward; if remaining_lanes % 2 != 0 { warnings.push(TagsToLanesMsg::ambiguous_str("Total lane count cannot be evenly divided between the forward and backward")); } let half = (remaining_lanes + 1) / 2; Self::Directional { forward: Infer::Default(half + bus.forward), backward: Infer::Default( remaining_lanes - half - both_ways + bus.backward, ), centre_turn_lane, } } }, (None, None, None) => { if locale.has_split_lanes(highway.r#type()) || bus.forward > 0 || bus.backward > 0 { Self::Directional { forward: Infer::Default(1 + bus.forward), backward: Infer::Default(1 + bus.backward), centre_turn_lane, } } else { Self::One } }, (None, _, _) => { if locale.has_split_lanes(highway.r#type()) { let forward = Infer::from(lanes.forward).or_default(1 + bus.forward); let backward = Infer::from(lanes.backward).or_default(1 + bus.forward); Self::Directional { forward, backward, centre_turn_lane, } } else { Self::One } }, } } } } const LANES: TagKey = TagKey::from("lanes"); pub(in crate::transform::tags_to_lanes) struct LanesDirectionScheme { total: Option<usize>, forward: Option<usize>, backward: Option<usize>, both_ways: Option<()>, } impl LanesDirectionScheme { pub fn from_tags( tags: &Tags, _oneway: Oneway, _locale: &Locale, warnings: &mut RoadWarnings, ) -> Self { let both_ways = tags .get_parsed(LANES + "both_ways", warnings) .filter(|&v: &usize| { if v == 1 { true } else { warnings.push(TagsToLanesMsg::unsupported( "lanes:both_ways must be 1", tags.subset(&[LANES + "both_ways"]), )); false } }) .map(|_v| {}); Self { total: tags.get_parsed(LANES, warnings), forward: tags.get_parsed(LANES + "forward", warnings), backward: tags.get_parsed(LANES + "backward", warnings), both_ways, } } } const CENTRE_TURN_LANE: TagKey = TagKey::from("centre_turn_lane"); pub(in crate::transform::tags_to_lanes) struct CentreTurnLaneScheme(pub Option<bool>); impl CentreTurnLaneScheme { pub fn from_tags( tags: &Tags, _oneway: Oneway, _locale: &Locale, warnings: &mut RoadWarnings, ) -> Self { if let Some(v) = tags.get(CENTRE_TURN_LANE) { warnings.push(TagsToLanesMsg::deprecated_tags( tags.subset(&[CENTRE_TURN_LANE]), )); match v { "yes" => Self(Some(true)), "no" => Self(Some(false)), _ => { warnings.push(TagsToLanesMsg::unsupported_tags( tags.subset(&[CENTRE_TURN_LANE]), )); Self(None) }, } } else { Self(None) } } pub fn some(&self) -> Option<bool> { self.0 } }
"lanes:both_ways", "lanes:backward", ]))); } if let Some(total) = lanes.total { let forward = total - both_ways - bus.backward; let result = Self::Directional { forward: Infer::Calculated(forward), backward: Infer::Calculated(bus.backward), centre_turn_lane, }; if lanes.forward.map_or(false, |direct| direct != forward) { warnings.push(TagsToLanesMsg::ambiguous_tags(tags.subset(&[ "oneway", "lanes", "lanes:forward", ]))); } result } else if let Some(f) = lanes.forward { Self::Directional { forward: Infer::Direct(f), backward: Infer::Default(0), centre_turn_lane, } } else { let assumed_forward = 1; Self::Directional { forward: Infer::Default(assumed_forward + bus.forward), backward: Infer::Default(0), centre_turn_lane, } } } else { match (lanes.total, lanes.forward, lanes.backward) { (Some(l), Some(f), Some(b)) => { if l != f + b + both_ways { warnings.push(TagsToLanesMsg::ambiguous_tags(tags.subset(&[ "lanes", "lanes:forward", "lanes:backward", "lanes:both_ways", "center_turn_lanes", ]))); } Self::Directional { forward: Infer::Direct(f), backward: Infer::Direct(b), centre_turn_lane, } }, (None, Some(f), Some(b)) => Self::Directional { forward: Infer::Direct(f), backward: Infer::Direct(b), centre_turn_lane, }, (Some(l), Some(f), None) => Self::Directional { forward: Infer::Direct(f), backward: Infer::Calculated(l - f - both_ways), centre_turn_lane, }, (Some(l), None, Some(b)) => Self::Directional { forward: Infer::Calculated(l - b - both_ways), backward: Infer::Direct(b), centre_turn_lane, }, (Some(1), None, None) => Self::One, (Some(l), None, None) => { if l % 2 == 0 && centre_turn_lane.some().unwrap_or(false) { Self::Directional { forward: Infer::Default(l / 2), backward: Infer::Default(
random
[ { "content": "fn set_cycleway(lanes: &[Lane], tags: &mut Tags, oneway: bool) -> Result<(), LanesToTagsMsg> {\n\n let left_cycle_lane: Option<Direction> = lanes\n\n .iter()\n\n .take_while(|lane| !lane.is_motor())\n\n .find(|lane| lane.is_bicycle())\n\n .and_then(Lane::direction);\n\n let right_cycle_lane: Option<Direction> = lanes\n\n .iter()\n\n .rev()\n\n .take_while(|lane| !lane.is_motor())\n\n .find(|lane| lane.is_bicycle())\n\n .and_then(Lane::direction);\n\n match (left_cycle_lane.is_some(), right_cycle_lane.is_some()) {\n\n (false, false) => {},\n\n (true, false) => tags.checked_insert(\"cycleway:left\", \"lane\")?,\n\n (false, true) => tags.checked_insert(\"cycleway:right\", \"lane\")?,\n\n (true, true) => tags.checked_insert(\"cycleway:both\", \"lane\")?,\n\n }\n\n\n\n // if the way has oneway=yes and you are allowed to cycle against that oneway flow\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 0, "score": 188591.67045499737 }, { "content": "fn set_busway(lanes: &[Lane], tags: &mut Tags, oneway: bool) -> Result<(), LanesToTagsMsg> {\n\n let left_bus_lane = lanes\n\n .iter()\n\n .take_while(|lane| !lane.is_motor())\n\n .find(|lane| lane.is_bus());\n\n let right_bus_lane = lanes\n\n .iter()\n\n .rev()\n\n .take_while(|lane| !lane.is_motor())\n\n .find(|lane| lane.is_bus());\n\n if left_bus_lane.is_none() && right_bus_lane.is_none() && lanes.iter().any(Lane::is_bus) {\n\n tags.checked_insert(\n\n \"bus:lanes\",\n\n lanes\n\n .iter()\n\n .map(|lane| if lane.is_bus() { \"designated\" } else { \"\" })\n\n .collect::<Vec<_>>()\n\n .as_slice()\n\n .join(\"|\"),\n\n )?;\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 1, "score": 188591.67045499737 }, { "content": "fn set_lanes(lanes: &[Lane], tags: &mut Tags) -> Result<usize, LanesToTagsMsg> {\n\n let lane_count = lanes\n\n .iter()\n\n .filter(|lane| {\n\n matches!(\n\n lane,\n\n Lane::Travel {\n\n designated: Designated::Motor | Designated::Bus,\n\n ..\n\n }\n\n )\n\n })\n\n .count();\n\n tags.checked_insert(\"lanes\", lane_count.to_string())?;\n\n Ok(lane_count)\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 2, "score": 183422.33327873377 }, { "content": "/// From an OpenStreetMap way's tags,\n\n/// determine the lanes along the road from left to right.\n\n///\n\n/// # Errors\n\n///\n\n/// Warnings or errors are produced for situations that may make the lanes inaccurate, such as:\n\n///\n\n/// - Unimplemented or unsupported tags\n\n/// - Ambiguous tags\n\n/// - Unknown internal errors\n\n///\n\n/// If the issue may be recoverable, a warning is preferred.\n\n/// A config option allows all warnings to be treated as errors.\n\n///\n\npub fn tags_to_lanes(\n\n tags: &Tags,\n\n locale: &Locale,\n\n config: &Config,\n\n) -> Result<RoadFromTags, RoadError> {\n\n let mut warnings = RoadWarnings::default();\n\n\n\n // Early return if we find unimplemented or unsupported tags.\n\n unsupported(tags, locale, &mut warnings)?;\n\n\n\n // Create the road builder and start giving it schemes.\n\n let mut road: RoadBuilder = RoadBuilder::from(tags, locale, &mut warnings)?;\n\n\n\n modes::non_motorized(tags, locale, &mut road, &mut warnings)?;\n\n\n\n modes::bus(tags, locale, &mut road, &mut warnings)?;\n\n\n\n modes::bicycle(tags, locale, &mut road, &mut warnings)?;\n\n\n\n modes::parking(tags, locale, &mut road)?;\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 3, "score": 168708.35291415447 }, { "content": "/// Convert Lanes back to Tags\n\n///\n\n/// # Errors\n\n///\n\n/// Any of:\n\n/// - internal error\n\n/// - unimplemented or unsupported functionality\n\n/// - the OSM tag spec cannot represent the lanes\n\npub fn lanes_to_tags(\n\n road: &Road,\n\n locale: &Locale,\n\n config: &Config,\n\n) -> Result<Tags, LanesToTagsMsg> {\n\n let mut tags = Tags::default();\n\n\n\n if !road\n\n .lanes\n\n .iter()\n\n .any(|lane| lane.is_motor() || lane.is_bus())\n\n {\n\n tags.checked_insert(\"highway\", \"path\")?;\n\n return Ok(tags);\n\n }\n\n\n\n tags.checked_insert(\"highway\", road.highway.r#type().to_string())?;\n\n if road.highway.is_construction() {\n\n return Err(LanesToTagsMsg::unimplemented(\"construction=*\"));\n\n }\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 4, "score": 168702.54180826427 }, { "content": "fn set_parking(lanes: &[Lane], tags: &mut Tags) -> Result<(), LanesToTagsMsg> {\n\n match (\n\n lanes\n\n .iter()\n\n .take_while(|lane| !lane.is_motor())\n\n .any(|lane| matches!(lane, Lane::Parking { .. })),\n\n lanes\n\n .iter()\n\n .skip_while(|lane| !lane.is_motor())\n\n .any(|lane| matches!(lane, Lane::Parking { .. })),\n\n ) {\n\n (false, false) => {},\n\n (true, false) => tags.checked_insert(\"parking:lane:left\", \"parallel\")?,\n\n (false, true) => tags.checked_insert(\"parking:lane:right\", \"parallel\")?,\n\n (true, true) => tags.checked_insert(\"parking:lane:both\", \"parallel\")?,\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 5, "score": 161815.1222212746 }, { "content": "fn set_shoulder(lanes: &[Lane], tags: &mut Tags) -> Result<(), LanesToTagsMsg> {\n\n match (\n\n lanes.first().unwrap().is_shoulder(),\n\n lanes.last().unwrap().is_shoulder(),\n\n ) {\n\n (false, false) => {\n\n // TODO do we want to always be explicit about this?\n\n tags.checked_insert(\"shoulder\", \"no\")?;\n\n },\n\n (true, false) => {\n\n tags.checked_insert(\"shoulder\", \"left\")?;\n\n },\n\n (false, true) => {\n\n tags.checked_insert(\"shoulder\", \"right\")?;\n\n },\n\n (true, true) => tags.checked_insert(\"shoulder\", \"both\")?,\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 6, "score": 161815.1222212746 }, { "content": "fn set_pedestrian(lanes: &[Lane], tags: &mut Tags) -> Result<(), LanesToTagsMsg> {\n\n match (\n\n lanes.first().unwrap().is_foot(),\n\n lanes.last().unwrap().is_foot(),\n\n ) {\n\n (false, false) => {\n\n // TODO do we want to always be explicit about this?\n\n tags.checked_insert(\"sidewalk\", \"no\")?;\n\n },\n\n (true, false) => tags.checked_insert(\"sidewalk\", \"left\")?,\n\n (false, true) => tags.checked_insert(\"sidewalk\", \"right\")?,\n\n (true, true) => tags.checked_insert(\"sidewalk\", \"both\")?,\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 7, "score": 161815.1222212746 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn unsupported(\n\n tags: &Tags,\n\n _locale: &Locale,\n\n warnings: &mut RoadWarnings,\n\n) -> Result<(), TagsToLanesMsg> {\n\n // https://wiki.openstreetmap.org/wiki/Key:access#Transport_mode_restrictions\n\n const ACCESS_KEYS: [&str; 43] = [\n\n \"access\",\n\n \"dog\",\n\n \"ski\",\n\n \"inline_skates\",\n\n \"horse\",\n\n \"vehicle\",\n\n \"bicycle\",\n\n \"electric_bicycle\",\n\n \"carriage\",\n\n \"hand_cart\",\n\n \"quadracycle\",\n\n \"trailer\",\n\n \"caravan\",\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/unsupported.rs", "rank": 8, "score": 154782.54031689954 }, { "content": "fn get_max_speed(lanes: &[Lane], tags: &mut Tags) -> Result<Option<Speed>, LanesToTagsMsg> {\n\n let max_speeds: Vec<Speed> = lanes\n\n .iter()\n\n .filter_map(|lane| match lane {\n\n Lane::Travel { max_speed, .. } => *max_speed,\n\n _ => None,\n\n })\n\n .collect();\n\n if let Some(max_speed) = max_speeds.first() {\n\n // Check if all are the same\n\n // See benches/benchmark_all_same.rs\n\n if max_speeds.windows(2).all(|w| {\n\n let speeds: &[Speed; 2] = w.try_into().unwrap();\n\n speeds[0] == speeds[1]\n\n }) {\n\n tags.checked_insert(\"maxspeed\", max_speed.to_string())?;\n\n Ok(Some(*max_speed))\n\n } else {\n\n Err(LanesToTagsMsg::unimplemented(\n\n \"different max speeds per lane\",\n\n ))\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 9, "score": 151795.8031380189 }, { "content": "enum Lane {\n\n None,\n\n Lane,\n\n Opposite,\n\n}\n\n\n\nimpl Tags {\n\n fn get_bus_lane<T>(&self, key: T, warnings: &mut RoadWarnings) -> Lane\n\n where\n\n T: AsRef<str>,\n\n TagKey: From<T>,\n\n {\n\n match self.get(&key) {\n\n None => Lane::None,\n\n Some(\"lane\") => Lane::Lane,\n\n Some(\"opposite_lane\") => Lane::Opposite,\n\n Some(v) => {\n\n warnings.push(TagsToLanesMsg::unsupported_tag(key, v));\n\n Lane::None\n\n },\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/modes/bus/busway.rs", "rank": 10, "score": 139123.98948326544 }, { "content": "#[derive(Clone, Copy)]\n\nenum DirectionChange {\n\n // One of the sides is bidirectional\n\n None,\n\n Same,\n\n Opposite,\n\n}\n\n\n\n/// Given a pair of lanes, inside to outside\n\n/// what should the semantic separator between them be\n\n#[allow(clippy::unnecessary_wraps)]\n\npub(in crate::transform::tags_to_lanes) fn lane_pair_to_semantic_separator(\n\n lanes: [&LaneBuilder; 2],\n\n road: &RoadBuilder,\n\n tags: &Tags,\n\n locale: &Locale,\n\n warnings: &mut RoadWarnings,\n\n) -> Option<Separator> {\n\n let [inside, outside] = lanes;\n\n let direction_change = match [inside.direction.some(), outside.direction.some()] {\n\n [None | Some(Direction::Both), _] | [_, None | Some(Direction::Both)] => {\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/separator/mod.rs", "rank": 11, "score": 136973.81947586592 }, { "content": "/// Returns oneway\n\nfn set_oneway(\n\n lanes: &[Lane],\n\n tags: &mut Tags,\n\n locale: &Locale,\n\n lane_count: usize,\n\n) -> Result<bool, LanesToTagsMsg> {\n\n if lanes.iter().filter(|lane| lane.is_motor()).all(|lane| {\n\n matches!(\n\n lane,\n\n Lane::Travel {\n\n direction: Some(Direction::Forward),\n\n ..\n\n }\n\n )\n\n }) {\n\n tags.checked_insert(\"oneway\", \"yes\")?;\n\n Ok(true)\n\n } else {\n\n // Forward\n\n let forward_lanes = lanes\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 12, "score": 136272.96738890602 }, { "content": "fn locale_additions(\n\n max_speed: Option<Speed>,\n\n locale: &Locale,\n\n tags: &mut Tags,\n\n) -> Result<(), LanesToTagsMsg> {\n\n if max_speed == Some(Speed::Kph(100.0)) && locale.country == Some(Country::the_netherlands()) {\n\n tags.checked_insert(\"motorroad\", \"yes\")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 13, "score": 136081.48258924222 }, { "content": "pub fn benchmark_tests(c: &mut Criterion) {\n\n let tests = get_tests();\n\n let mut group = c.benchmark_group(\"tests\");\n\n for test in &tests {\n\n let locale = Locale::builder()\n\n .driving_side(test.driving_side)\n\n .iso_3166_option(test.iso_3166_2.as_deref())\n\n .build();\n\n let config = TagsToLanesConfig::new(\n\n !test.test_has_warnings(),\n\n test.test_include_separators() && test.expected_has_separators(),\n\n );\n\n group.measurement_time(std::time::Duration::from_millis(1000));\n\n group.warm_up_time(std::time::Duration::from_millis(500));\n\n group.bench_with_input(BenchmarkId::from_parameter(test), test, |b, test| {\n\n b.iter(|| {\n\n assert!(tags_to_lanes(&test.tags, &locale, &config).is_ok());\n\n });\n\n });\n\n }\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, benchmark_tests);\n\ncriterion_main!(benches);\n", "file_path": "osm2lanes/benches/benchmark_tests.rs", "rank": 14, "score": 135010.3343704145 }, { "content": "pub fn benchmark_tests(c: &mut Criterion) {\n\n let tests = get_tests();\n\n let mut group = c.benchmark_group(\"tests\");\n\n for test in tests.iter().filter(|t| {\n\n let mut hasher = std::collections::hash_map::DefaultHasher::new();\n\n std::hash::Hasher::write(&mut hasher, t.to_string().as_bytes());\n\n let hash = std::hash::Hasher::finish(&hasher);\n\n hash % SAMPLE_RATE == 0\n\n }) {\n\n let locale = Locale::builder()\n\n .driving_side(test.driving_side)\n\n .iso_3166_option(test.iso_3166_2.as_deref())\n\n .build();\n\n let config = TagsToLanesConfig::new(\n\n !test.test_has_warnings(),\n\n test.test_include_separators() && test.expected_has_separators(),\n\n );\n\n group.bench_with_input(BenchmarkId::from_parameter(test), test, |b, test| {\n\n b.iter(|| {\n\n assert!(tags_to_lanes(&test.tags, &locale, &config).is_ok());\n\n });\n\n });\n\n }\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, benchmark_tests);\n\ncriterion_main!(benches);\n", "file_path": "osm2lanes/benches/benchmark_tests_sample.rs", "rank": 15, "score": 132274.82549741282 }, { "content": "#[wasm_bindgen]\n\npub fn js_tags_to_lanes(val: &JsValue) -> JsValue {\n\n utils::set_panic_hook();\n\n\n\n let input: Input = val.into_serde().unwrap();\n\n\n\n let mut config = TagsToLanesConfig::default();\n\n config.error_on_warnings = false;\n\n config.include_separators = true;\n\n\n\n let locale = Locale::builder()\n\n .driving_side(if input.drive_on_right {\n\n DrivingSide::Right\n\n } else {\n\n DrivingSide::Left\n\n })\n\n .build();\n\n\n\n let mut tags = Tags::default();\n\n for (key, value) in input.key_values {\n\n tags.checked_insert(key, value).unwrap();\n\n }\n\n let lanes = tags_to_lanes(&tags, &locale, &config).unwrap();\n\n JsValue::from_serde(&lanes).unwrap()\n\n}\n", "file_path": "osm2lanes-npm/src/lib.rs", "rank": 16, "score": 129621.98163370913 }, { "content": "pub fn lanes<R: RenderContext>(\n\n rc: &mut R,\n\n (canvas_width, canvas_height): (u32, u32),\n\n road: &Road,\n\n locale: &Locale,\n\n) -> Result<(), RenderError> {\n\n let canvas_width = canvas_width as f64;\n\n let canvas_height = canvas_height as f64;\n\n let default_lane_width = Lane::DEFAULT_WIDTH;\n\n\n\n let grassy_verge = Metre::new(1.0);\n\n let asphalt_buffer = Metre::new(0.1);\n\n\n\n let scale = Scale(\n\n canvas_width / (road.width(locale) + 2.0 * grassy_verge + 2.0 * asphalt_buffer).val(),\n\n );\n\n\n\n // Background\n\n rc.clear(None, PietColor::OLIVE);\n\n\n", "file_path": "osm2lanes-web/src/draw.rs", "rank": 17, "score": 118483.67843725506 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn lanes_bus(\n\n tags: &Tags,\n\n _locale: &Locale,\n\n _road: &mut RoadBuilder,\n\n warnings: &mut RoadWarnings,\n\n) -> Result<(), TagsToLanesMsg> {\n\n warnings.push(TagsToLanesMsg::unimplemented_tags(tags.subset(&[\n\n LANES + \"psv\",\n\n LANES + \"psv\" + \"forward\",\n\n LANES + \"psv\" + \"backward\",\n\n LANES + \"psv\" + \"left\",\n\n LANES + \"psv\" + \"right\",\n\n LANES + \"bus\",\n\n LANES + \"bus\" + \"forward\",\n\n LANES + \"bus\" + \"backward\",\n\n LANES + \"bus\" + \"left\",\n\n LANES + \"bus\" + \"right\",\n\n ])));\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/modes/bus/mod.rs", "rank": 18, "score": 108928.06387143032 }, { "content": "fn bus_lanes(\n\n tags: &Tags,\n\n locale: &Locale,\n\n road: &mut RoadBuilder,\n\n _warnings: &mut RoadWarnings,\n\n) -> Result<(), TagsToLanesMsg> {\n\n match (\n\n tags.get(\"bus:lanes\"),\n\n (\n\n tags.get(\"bus:lanes:forward\"),\n\n tags.get(\"bus:lanes:backward\"),\n\n ),\n\n tags.get(\"psv:lanes\"),\n\n (\n\n tags.get(\"psv:lanes:forward\"),\n\n tags.get(\"psv:lanes:backward\"),\n\n ),\n\n ) {\n\n // lanes:bus or lanes:psv\n\n (Some(lanes), (None, None), None, (None, None))\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/modes/bus/mod.rs", "rank": 19, "score": 108923.91523490721 }, { "content": "enum Sidewalk {\n\n None,\n\n No,\n\n Yes,\n\n Separate,\n\n}\n\n\n\nimpl Sidewalk {\n\n /// This processes sidewalk tags by the OSM spec.\n\n /// No can be implied, e.g. we assume that sidewalk:left=yes implies sidewalk:right=no\n\n /// None is when information may be incomplete and should be inferred,\n\n /// e.g. when sidewalk=* is missing altogether,\n\n /// but this may later become a No when combined with data from shoulder=*\n\n /// We catch any tag combinations that violate the OSM spec\n\n #[allow(clippy::unnested_or_patterns)]\n\n fn from_tags(\n\n tags: &Tags,\n\n locale: &Locale,\n\n warnings: &mut RoadWarnings,\n\n ) -> Result<(Self, Self), TagsToLanesMsg> {\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/modes/foot_shoulder.rs", "rank": 20, "score": 106779.6047560379 }, { "content": "enum Shoulder {\n\n None,\n\n Yes,\n\n No,\n\n}\n\n\n\nimpl Shoulder {\n\n fn from_tags(\n\n tags: &Tags,\n\n locale: &Locale,\n\n _warnings: &mut RoadWarnings,\n\n ) -> Result<(Self, Self), TagsToLanesMsg> {\n\n Ok(match tags.get(SHOULDER) {\n\n None => (Shoulder::None, Shoulder::None),\n\n Some(\"no\") => (Shoulder::No, Shoulder::No),\n\n Some(\"yes\" | \"both\") => (Shoulder::Yes, Shoulder::Yes),\n\n Some(s) if s == locale.driving_side.tag().as_str() => (Shoulder::Yes, Shoulder::No),\n\n Some(s) if s == locale.driving_side.opposite().tag().as_str() => {\n\n (Shoulder::No, Shoulder::Yes)\n\n },\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/modes/foot_shoulder.rs", "rank": 21, "score": 106779.6047560379 }, { "content": "fn check_roundtrip(\n\n config: &Config,\n\n tags: &Tags,\n\n locale: &Locale,\n\n lanes: &[Lane],\n\n) -> Result<(), LanesToTagsMsg> {\n\n if config.check_roundtrip {\n\n let rountrip = tags_to_lanes(\n\n tags,\n\n locale,\n\n &TagsToLanesConfig {\n\n error_on_warnings: true,\n\n ..TagsToLanesConfig::default()\n\n },\n\n )?;\n\n if lanes != rountrip.road.lanes {\n\n return Err(LanesToTagsMsg::roundtrip());\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 22, "score": 105124.61618837451 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn motor_lane_pair_to_semantic_separator(\n\n [inside, _outside]: [&LaneBuilder; 2],\n\n direction_change: DirectionChange,\n\n road: &RoadBuilder,\n\n _tags: &Tags,\n\n locale: &Locale,\n\n _warnings: &mut RoadWarnings,\n\n) -> Option<Separator> {\n\n match road\n\n .lanes_ltr(locale)\n\n .filter(|lane| {\n\n matches!(lane.r#type.some(), Some(LaneType::Travel))\n\n && matches!(\n\n lane.designated.some(),\n\n Some(Designated::Motor | Designated::Bus),\n\n )\n\n })\n\n .count()\n\n {\n\n 2 => Some(Separator::Centre {\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/separator/mod.rs", "rank": 23, "score": 104429.75397617435 }, { "content": "#[derive(Clone)]\n\nenum TagKeyEnum {\n\n Static(&'static str),\n\n String(String),\n\n}\n\n\n\nimpl TagKey {\n\n #[must_use]\n\n pub const fn from(string: &'static str) -> Self {\n\n TagKey(TagKeyEnum::Static(string))\n\n }\n\n\n\n #[must_use]\n\n pub fn as_str(&self) -> &str {\n\n match &self.0 {\n\n TagKeyEnum::Static(v) => v,\n\n TagKeyEnum::String(v) => v.as_str(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "osm2lanes/src/tag/key.rs", "rank": 24, "score": 101579.49271624362 }, { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "osm2lanes-npm/src/utils.rs", "rank": 25, "score": 99790.793425665 }, { "content": "fn bench_all_same(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"slice_all_same\");\n\n let small_vec = vec![20; 5];\n\n group.bench_with_input(\n\n BenchmarkId::new(\"all_same_vec\", \"small_vec\"),\n\n &small_vec,\n\n |b, small_vec| b.iter(|| all_same_vec(black_box(small_vec))),\n\n );\n\n group.bench_with_input(\n\n BenchmarkId::new(\"all_same_hashmap\", \"small_vec\"),\n\n &small_vec,\n\n |b, small_vec| b.iter(|| all_same_hashmap(black_box(small_vec))),\n\n );\n\n let big_vec = vec![20; 500];\n\n group.bench_with_input(\n\n BenchmarkId::new(\"all_same_vec\", \"big_vec\"),\n\n &big_vec,\n\n |b, big_vec| b.iter(|| all_same_vec(black_box(big_vec))),\n\n );\n\n group.bench_with_input(\n\n BenchmarkId::new(\"all_same_hashmap\", \"big_vec\"),\n\n &big_vec,\n\n |b, big_vec| b.iter(|| all_same_hashmap(black_box(big_vec))),\n\n );\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, bench_all_same);\n\ncriterion_main!(benches);\n", "file_path": "osm2lanes/benches/benchmark_slice_all_same.rs", "rank": 26, "score": 95652.8814312807 }, { "content": "/// Get Test Cases from tests.yml\n\npub fn get_tests() -> Vec<TestCase> {\n\n let tests: Vec<TestCase> =\n\n serde_yaml::from_reader(BufReader::new(File::open(\"../data/tests.yml\").unwrap()))\n\n .expect(\"invalid yaml in data/tests.yml\");\n\n let tests: Vec<TestCase> = tests\n\n .into_iter()\n\n .filter(|test| test.test_enabled())\n\n .collect();\n\n tests\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use assert_json_diff::assert_json_eq;\n\n\n\n use super::*;\n\n use crate::locale::{DrivingSide, Locale};\n\n use crate::road::{Lane, Marking, Printable, Road};\n\n use crate::tag::Highway;\n", "file_path": "osm2lanes/src/test.rs", "rank": 41, "score": 91433.10467558751 }, { "content": "pub fn draw_arrow<R: RenderContext>(\n\n rc: &mut R,\n\n mid: Point,\n\n direction: Direction,\n\n) -> Result<(), RenderError> {\n\n fn draw_point<R: RenderContext>(\n\n rc: &mut R,\n\n mid: Point,\n\n direction: Direction,\n\n ) -> Result<(), RenderError> {\n\n let dir_sign = match direction {\n\n Direction::Forward => -1.0,\n\n Direction::Backward => 1.0,\n\n _ => unreachable!(),\n\n };\n\n for x in [-10.0, 10.0] {\n\n rc.stroke(\n\n Line::new(\n\n Point {\n\n x: mid.x,\n", "file_path": "osm2lanes-web/src/draw.rs", "rank": 42, "score": 89612.78754187629 }, { "content": "pub trait TagsWrite {\n\n ///\n\n /// # Errors\n\n ///\n\n /// If duplicate key is inserted.\n\n ///\n\n fn checked_insert<K: Into<TagKey>, V: Into<String>>(\n\n &mut self,\n\n k: K,\n\n v: V,\n\n ) -> Result<(), DuplicateKeyError>;\n\n}\n\n\n\nimpl TagsWrite for Tags {\n\n ///\n\n /// # Errors\n\n ///\n\n /// If duplicate key is inserted. \n\n ///\n\n fn checked_insert<K: Into<TagKey>, V: Into<String>>(\n", "file_path": "osm2lanes/src/tag/mod.rs", "rank": 43, "score": 89044.24387704078 }, { "content": "/// Display lane detail as printable characters\n\npub trait Printable {\n\n fn as_ascii(&self) -> char;\n\n fn as_utf8(&self) -> char;\n\n}\n\n\n\nimpl Printable for Lane {\n\n fn as_ascii(&self) -> char {\n\n match self {\n\n Self::Travel {\n\n designated: Designated::Foot,\n\n ..\n\n } => 's',\n\n Self::Travel {\n\n designated: Designated::Bicycle,\n\n ..\n\n } => 'b',\n\n Self::Travel {\n\n designated: Designated::Motor,\n\n ..\n\n } => 'd',\n", "file_path": "osm2lanes/src/road/lane.rs", "rank": 44, "score": 83639.13730555204 }, { "content": "fn is_default<T>(v: &T) -> bool\n\nwhere\n\n T: PartialEq + Default,\n\n{\n\n T::default().eq(v)\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Highway {\n\n #[serde(\n\n serialize_with = \"serialize_display\",\n\n deserialize_with = \"deserialize_from_str\"\n\n )]\n\n highway: HighwayType,\n\n #[serde(default, skip_serializing_if = \"is_default\")]\n\n lifecycle: Lifecycle,\n\n}\n", "file_path": "osm2lanes/src/tag/osm.rs", "rank": 45, "score": 71157.7143367976 }, { "content": "#[derive(Debug)]\n\npub(in crate::transform::tags_to_lanes) enum Access {\n\n None,\n\n No,\n\n Yes,\n\n Designated,\n\n}\n\n\n\nimpl std::str::FromStr for Access {\n\n type Err = String;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"\" => Ok(Self::None),\n\n \"no\" => Ok(Self::No),\n\n \"yes\" => Ok(Self::Yes),\n\n \"designated\" => Ok(Self::Designated),\n\n _ => Err(s.to_owned()),\n\n }\n\n }\n\n}\n\n\n\nimpl Access {\n\n pub(in crate::transform::tags_to_lanes) fn split(lanes: &str) -> Result<Vec<Self>, String> {\n\n lanes.split('|').map(str::parse).collect()\n\n }\n\n}\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/access_by_lane.rs", "rank": 46, "score": 65535.32836551226 }, { "content": " pub oneway: Oneway,\n\n}\n\n\n\nimpl RoadBuilder {\n\n #[allow(clippy::items_after_statements, clippy::too_many_lines)]\n\n pub fn from(\n\n tags: &Tags,\n\n locale: &Locale,\n\n warnings: &mut RoadWarnings,\n\n ) -> Result<Self, RoadError> {\n\n let highway = Highway::from_tags(tags);\n\n let highway = match highway {\n\n Err(None) => return Err(TagsToLanesMsg::unsupported_str(\"way is not highway\").into()),\n\n Err(Some(s)) => return Err(TagsToLanesMsg::unsupported_tag(HIGHWAY, &s).into()),\n\n Ok(highway) => match highway {\n\n highway if highway.is_supported() => highway,\n\n _ => {\n\n return Err(TagsToLanesMsg::unimplemented_tags(tags.subset(&LIFECYCLE)).into());\n\n },\n\n },\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 47, "score": 58834.55257312102 }, { "content": "\n\n impl Oneway {\n\n pub fn from_tags(\n\n tags: &Tags,\n\n _locale: &Locale,\n\n _warnings: &mut RoadWarnings,\n\n ) -> Result<Self, TagsToLanesMsg> {\n\n Ok(\n\n match (tags.get(ONEWAY), tags.is(\"junction\", \"roundabout\")) {\n\n (Some(\"yes\"), _) => Self::Yes,\n\n (Some(\"no\"), false) => Self::No,\n\n (Some(\"no\"), true) => {\n\n return Err(TagsToLanesMsg::ambiguous_tags(\n\n tags.subset(&[\"oneway\", \"junction\"]),\n\n ));\n\n },\n\n (Some(value), _) => {\n\n return Err(TagsToLanesMsg::unimplemented_tag(ONEWAY, value));\n\n },\n\n (None, roundabout) => Self::from(roundabout),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 48, "score": 58831.124140354324 }, { "content": " Err(_e) => {\n\n warnings.push(TagsToLanesMsg::unsupported_tags(tags.subset(&[MAXSPEED])));\n\n None\n\n },\n\n };\n\n\n\n let width = locale.travel_width(&designated, highway.r#type());\n\n let width = Width {\n\n min: Infer::None,\n\n target: Infer::Default(width),\n\n max: Infer::None,\n\n };\n\n\n\n let bus_lane_counts = BusLaneCount::from_tags(tags, locale, oneway, warnings)?;\n\n let centre_turn_lanes = CentreTurnLaneScheme::from_tags(tags, oneway, locale, warnings);\n\n let lane_counts = Counts::new(\n\n tags,\n\n oneway,\n\n &highway,\n\n &centre_turn_lanes,\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 49, "score": 58829.58223445052 }, { "content": " )]\n\n pub fn into_ltr(\n\n mut self,\n\n tags: &Tags,\n\n locale: &Locale,\n\n include_separators: bool,\n\n warnings: &mut RoadWarnings,\n\n ) -> Result<(Vec<Lane>, Highway, Oneway), RoadError> {\n\n let lanes: Vec<Lane> = if include_separators {\n\n let forward_edge = self\n\n .forward_outside()\n\n .and_then(lane_to_outer_edge_separator);\n\n let backward_edge = self\n\n .backward_outside()\n\n .and_then(lane_to_outer_edge_separator);\n\n let middle_separator = match [self.forward_inside(), self.backward_inside()] {\n\n [Some(forward), Some(backward)] => lane_pair_to_semantic_separator(\n\n [forward, backward],\n\n &self,\n\n tags,\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 50, "score": 58827.2506640268 }, { "content": "\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n Self {\n\n error_on_warnings: false,\n\n include_separators: true,\n\n }\n\n }\n\n}\n\n\n\nmod oneway {\n\n use super::TagsToLanesMsg;\n\n use crate::locale::Locale;\n\n use crate::tag::{Tags, ONEWAY};\n\n use crate::transform::RoadWarnings;\n\n\n\n #[derive(Clone, Copy, PartialEq)]\n\n pub enum Oneway {\n\n Yes,\n\n No,\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 51, "score": 58827.22956342795 }, { "content": "#![allow(clippy::module_name_repetitions)] // TODO: fix upstream\n\n\n\nuse celes::Country;\n\n\n\npub use self::error::LanesToTagsMsg;\n\nuse super::{tags_to_lanes, TagsToLanesConfig};\n\nuse crate::locale::Locale;\n\nuse crate::metric::Speed;\n\nuse crate::road::{Designated, Direction, Lane, Road};\n\nuse crate::tag::{Tags, TagsWrite};\n\n\n\n#[non_exhaustive]\n\npub struct Config {\n\n pub check_roundtrip: bool,\n\n}\n\n\n\nimpl Config {\n\n #[must_use]\n\n pub fn new(check_roundtrip: bool) -> Self {\n\n Config { check_roundtrip }\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 52, "score": 58826.755609323794 }, { "content": "use std::collections::VecDeque;\n\nuse std::iter;\n\n\n\nuse super::infer::Infer;\n\nuse super::oneway::Oneway;\n\nuse super::separator::{\n\n lane_pair_to_semantic_separator, lane_to_inner_edge_separator, lane_to_outer_edge_separator,\n\n semantic_separator_to_lane,\n\n};\n\nuse super::TagsToLanesMsg;\n\nuse crate::locale::{DrivingSide, Locale};\n\nuse crate::metric::{Metre, Speed};\n\nuse crate::road::{Access as LaneAccess, Designated, Direction, Lane};\n\nuse crate::tag::{Access as AccessValue, Highway, TagKey, Tags, HIGHWAY, LIFECYCLE};\n\nuse crate::transform::error::{RoadError, RoadWarnings};\n\nuse crate::transform::tags_to_lanes::counts::{CentreTurnLaneScheme, Counts};\n\nuse crate::transform::tags_to_lanes::modes::BusLaneCount;\n\n\n\n#[derive(Debug)]\n\npub(in crate::transform) struct LaneBuilderError(pub &'static str);\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 53, "score": 58825.258445887484 }, { "content": " None => {},\n\n }\n\n match right_cycle_lane {\n\n Some(Direction::Forward) => {\n\n tags.checked_insert(\"cycleway:right:oneway\", \"yes\")?;\n\n },\n\n Some(Direction::Backward) => {\n\n tags.checked_insert(\"cycleway:right:oneway\", \"-1\")?;\n\n },\n\n Some(Direction::Both) => tags.checked_insert(\"cycleway:right:oneway\", \"no\")?,\n\n None => {},\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 54, "score": 58824.965451151795 }, { "content": " /// Push new inner-most forward lane\n\n pub fn _push_forward_inside(&mut self, lane: LaneBuilder) {\n\n self.forward_lanes.push_front(lane);\n\n }\n\n /// Push new outer-most forward lane\n\n pub fn push_forward_outside(&mut self, lane: LaneBuilder) {\n\n self.forward_lanes.push_back(lane);\n\n }\n\n /// Push new inner-most backward lane\n\n pub fn _push_backward_inside(&mut self, lane: LaneBuilder) {\n\n self.backward_lanes.push_front(lane);\n\n }\n\n /// Push new outer-most backward lane\n\n pub fn push_backward_outside(&mut self, lane: LaneBuilder) {\n\n self.backward_lanes.push_back(lane);\n\n }\n\n /// Get lanes left to right\n\n pub fn lanes_ltr<'a>(&'a self, locale: &Locale) -> Box<dyn Iterator<Item = &LaneBuilder> + 'a> {\n\n match locale.driving_side {\n\n DrivingSide::Left => Box::new(\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 55, "score": 58824.493515169735 }, { "content": " if road.highway.is_proposed() {\n\n return Err(LanesToTagsMsg::unimplemented(\"construction=*\"));\n\n }\n\n\n\n let lanes = &road.lanes;\n\n\n\n let lane_count = set_lanes(lanes, &mut tags)?;\n\n let oneway = set_oneway(lanes, &mut tags, locale, lane_count)?;\n\n\n\n set_shoulder(lanes, &mut tags)?;\n\n set_pedestrian(lanes, &mut tags)?;\n\n set_parking(lanes, &mut tags)?;\n\n set_cycleway(lanes, &mut tags, oneway)?;\n\n set_busway(lanes, &mut tags, oneway)?;\n\n\n\n let max_speed = get_max_speed(lanes, &mut tags)?;\n\n\n\n locale_additions(max_speed, locale, &mut tags)?;\n\n\n\n check_roundtrip(config, &tags, locale, lanes)?;\n\n\n\n Ok(tags)\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 56, "score": 58823.336976202234 }, { "content": "\n\n modes::foot_and_shoulder(tags, locale, &mut road, &mut warnings)?;\n\n\n\n let (lanes, highway, _oneway) =\n\n road.into_ltr(tags, locale, config.include_separators, &mut warnings)?;\n\n\n\n let road_from_tags = RoadFromTags {\n\n road: Road { lanes, highway },\n\n warnings,\n\n };\n\n\n\n if config.error_on_warnings && !road_from_tags.warnings.is_empty() {\n\n return Err(road_from_tags.warnings.into());\n\n }\n\n\n\n Ok(road_from_tags)\n\n}\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 57, "score": 58823.06163842919 }, { "content": " self.forward_lanes\n\n .iter_mut()\n\n .rev()\n\n .chain(self.backward_lanes.iter_mut()),\n\n ),\n\n DrivingSide::Right => Box::new(\n\n self.backward_lanes\n\n .iter_mut()\n\n .rev()\n\n .chain(self.forward_lanes.iter_mut()),\n\n ),\n\n }\n\n }\n\n /// Get forward lanes left to right\n\n pub fn _forward_ltr<'a>(\n\n &'a self,\n\n locale: &Locale,\n\n ) -> Box<dyn Iterator<Item = &LaneBuilder> + 'a> {\n\n match locale.driving_side {\n\n DrivingSide::Left => Box::new(self.forward_lanes.iter().rev()),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 58, "score": 58822.76939566554 }, { "content": " DrivingSide::Right => Box::new(self.backward_lanes.iter()),\n\n }\n\n }\n\n /// Get backward lanes left to right\n\n pub fn backward_ltr_mut<'a>(\n\n &'a mut self,\n\n locale: &Locale,\n\n ) -> Box<dyn Iterator<Item = &mut LaneBuilder> + 'a> {\n\n match locale.driving_side {\n\n DrivingSide::Left => Box::new(self.backward_lanes.iter_mut().rev()),\n\n DrivingSide::Right => Box::new(self.backward_lanes.iter_mut()),\n\n }\n\n }\n\n\n\n /// Consume Road Builder to return Lanes left to right\n\n // TODO: a refactor...\n\n #[allow(\n\n clippy::needless_collect,\n\n clippy::unnecessary_wraps,\n\n clippy::too_many_lines\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 59, "score": 58822.760586205004 }, { "content": " DrivingSide::Right => Box::new(self.forward_lanes.iter()),\n\n }\n\n }\n\n /// Get forward lanes left to right\n\n pub fn forward_ltr_mut<'a>(\n\n &'a mut self,\n\n locale: &Locale,\n\n ) -> Box<dyn Iterator<Item = &mut LaneBuilder> + 'a> {\n\n match locale.driving_side {\n\n DrivingSide::Left => Box::new(self.forward_lanes.iter_mut().rev()),\n\n DrivingSide::Right => Box::new(self.forward_lanes.iter_mut()),\n\n }\n\n }\n\n /// Get backward lanes left to right\n\n pub fn _backward_ltr<'a>(\n\n &'a self,\n\n locale: &Locale,\n\n ) -> Box<dyn Iterator<Item = &LaneBuilder> + 'a> {\n\n match locale.driving_side {\n\n DrivingSide::Left => Box::new(self.backward_lanes.iter().rev()),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 60, "score": 58822.3085445379 }, { "content": " // also add oneway:bicycle=no to make it easier\n\n // for bicycle routers to see that the way can be used in two directions.\n\n if oneway\n\n && (left_cycle_lane.map_or(false, |direction| direction == Direction::Backward)\n\n || right_cycle_lane.map_or(false, |direction| direction == Direction::Backward))\n\n {\n\n tags.checked_insert(\"oneway:bicycle\", \"no\")?;\n\n }\n\n // indicate cycling traffic direction relative to the direction the osm way is oriented\n\n // yes: same direction\n\n // -1: contraflow\n\n // no: bidirectional\n\n match left_cycle_lane {\n\n Some(Direction::Forward) => {\n\n tags.checked_insert(\"cycleway:left:oneway\", \"yes\")?;\n\n },\n\n Some(Direction::Backward) => {\n\n tags.checked_insert(\"cycleway:left:oneway\", \"-1\")?;\n\n },\n\n Some(Direction::Both) => tags.checked_insert(\"cycleway:left:oneway\", \"no\")?,\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 61, "score": 58822.177825714614 }, { "content": "#![allow(clippy::module_name_repetitions)] // TODO: fix upstream\n\n\n\nuse crate::locale::Locale;\n\nuse crate::road::Road;\n\nuse crate::tag::Tags;\n\nuse crate::transform::error::{RoadError, RoadWarnings};\n\nuse crate::transform::RoadFromTags;\n\n\n\nmod error;\n\npub use error::TagsToLanesMsg;\n\n\n\nmod access_by_lane;\n\n\n\nmod counts;\n\n\n\nmod modes;\n\n\n\nmod separator;\n\n\n\nmod road;\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 62, "score": 58821.46285791204 }, { "content": " .iter()\n\n .filter(|lane| {\n\n matches!(\n\n lane,\n\n Lane::Travel {\n\n designated: Designated::Motor | Designated::Bus,\n\n direction: Some(Direction::Forward),\n\n ..\n\n }\n\n )\n\n })\n\n .count();\n\n tags.checked_insert(\"lanes:forward\", forward_lanes.to_string())?;\n\n // Backward\n\n let backward_lanes = lanes\n\n .iter()\n\n .filter(|lane| {\n\n matches!(\n\n lane,\n\n Lane::Travel {\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 63, "score": 58821.28715788879 }, { "content": "use road::{LaneBuilder, LaneBuilderError, LaneType, RoadBuilder};\n\n\n\nmod unsupported;\n\nuse unsupported::unsupported;\n\n\n\n#[non_exhaustive]\n\npub struct Config {\n\n pub error_on_warnings: bool,\n\n pub include_separators: bool,\n\n}\n\n\n\nimpl Config {\n\n #[must_use]\n\n pub fn new(error_on_warnings: bool, include_separators: bool) -> Self {\n\n Self {\n\n error_on_warnings,\n\n include_separators,\n\n }\n\n }\n\n}\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 64, "score": 58821.17934279497 }, { "content": " &bus_lane_counts,\n\n locale,\n\n warnings,\n\n );\n\n log::trace!(\"lane counts: {lane_counts:?}\");\n\n\n\n let road = if let Counts::Directional {\n\n forward,\n\n backward,\n\n centre_turn_lane,\n\n } = lane_counts\n\n {\n\n // These are ordered from the road center, going outwards. Most of the members of fwd_side will\n\n // have Direction::Forward, but there can be exceptions with two-way cycletracks.\n\n let mut forward_lanes: VecDeque<_> = iter::repeat_with(|| LaneBuilder {\n\n r#type: Infer::Default(LaneType::Travel),\n\n direction: Infer::Default(Direction::Forward),\n\n designated: Infer::Default(designated),\n\n max_speed: Infer::direct(max_speed),\n\n width: width.clone(),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 65, "score": 58821.0701874852 }, { "content": " backward_lanes: VecDeque::new(),\n\n highway,\n\n oneway,\n\n }\n\n };\n\n\n\n Ok(road)\n\n }\n\n\n\n /// Number of lanes\n\n ///\n\n /// # Panics\n\n ///\n\n /// Too many lanes\n\n pub fn len(&self) -> usize {\n\n self.forward_len()\n\n .checked_add(self.backward_len())\n\n .expect(\"too many lanes\")\n\n }\n\n\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 66, "score": 58820.946674516024 }, { "content": " self.forward_lanes\n\n .iter()\n\n .rev()\n\n .chain(self.backward_lanes.iter()),\n\n ),\n\n DrivingSide::Right => Box::new(\n\n self.backward_lanes\n\n .iter()\n\n .rev()\n\n .chain(self.forward_lanes.iter()),\n\n ),\n\n }\n\n }\n\n /// Get lanes left to right\n\n pub fn lanes_ltr_mut<'a>(\n\n &'a mut self,\n\n locale: &Locale,\n\n ) -> Box<dyn Iterator<Item = &mut LaneBuilder> + 'a> {\n\n match locale.driving_side {\n\n DrivingSide::Left => Box::new(\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 67, "score": 58820.93631730273 }, { "content": " impl LanesToTagsMsg {\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unimplemented(description: &str) -> Self {\n\n LanesToTagsMsg {\n\n location: Location::caller(),\n\n issue: LanesToTagsIssue::Unimplemented(description.to_owned()),\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn roundtrip() -> Self {\n\n LanesToTagsMsg {\n\n location: Location::caller(),\n\n issue: LanesToTagsIssue::Roundtrip(None),\n\n }\n\n }\n\n }\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 68, "score": 58820.598307738976 }, { "content": " } else {\n\n let value = |lane: &Lane| -> &'static str {\n\n if oneway && lane.direction() == Some(Direction::Backward) {\n\n \"opposite_lane\"\n\n } else {\n\n \"lane\"\n\n }\n\n };\n\n match (left_bus_lane, right_bus_lane) {\n\n (None, None) => {},\n\n (Some(left), None) => tags.checked_insert(\"busway:left\", value(left))?,\n\n (None, Some(right)) => tags.checked_insert(\"busway:right\", value(right))?,\n\n (Some(_left), Some(_right)) => tags.checked_insert(\"busway:both\", \"lane\")?,\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 69, "score": 58820.229762766445 }, { "content": " where\n\n F: FnOnce(T) -> U,\n\n {\n\n match self {\n\n Infer::None => Infer::None,\n\n Infer::Default(x) => Infer::Default(f(x)),\n\n Infer::Calculated(x) => Infer::Calculated(f(x)),\n\n Infer::Direct(x) => Infer::Direct(f(x)),\n\n }\n\n }\n\n\n\n /// If `Infer::None`, replaces with `Infer::Default(d)`\n\n #[must_use]\n\n pub fn or_default(self, d: T) -> Self {\n\n match self {\n\n Infer::None => Infer::Default(d),\n\n other => other,\n\n }\n\n }\n\n}\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/infer.rs", "rank": 70, "score": 58819.75679135394 }, { "content": " },\n\n )\n\n }\n\n }\n\n}\n\nuse oneway::Oneway;\n\n\n\nmod infer;\n\npub use infer::Infer;\n\n\n\n/// From an OpenStreetMap way's tags,\n\n/// determine the lanes along the road from left to right.\n\n///\n\n/// # Errors\n\n///\n\n/// Warnings or errors are produced for situations that may make the lanes inaccurate, such as:\n\n///\n\n/// - Unimplemented or unsupported tags\n\n/// - Ambiguous tags\n\n/// - Unknown internal errors\n\n///\n\n/// If the issue may be recoverable, a warning is preferred.\n\n/// A config option allows all warnings to be treated as errors.\n\n///\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 71, "score": 58819.616836209425 }, { "content": " designated: Designated::Motor | Designated::Bus,\n\n direction: Some(Direction::Backward),\n\n ..\n\n }\n\n )\n\n })\n\n .count();\n\n tags.checked_insert(\"lanes:backward\", backward_lanes.to_string())?;\n\n // Both ways\n\n if lanes.iter().any(|lane| {\n\n matches!(\n\n lane,\n\n Lane::Travel {\n\n designated: Designated::Motor,\n\n direction: Some(Direction::Both),\n\n ..\n\n }\n\n )\n\n }) {\n\n tags.checked_insert(\"lanes:both_ways\", \"1\")?;\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 72, "score": 58819.538286276664 }, { "content": " }\n\n\n\n impl std::fmt::Display for LanesToTagsMsg {\n\n #[allow(clippy::panic_in_result_fn)]\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match &self.issue {\n\n LanesToTagsIssue::Unimplemented(description) => {\n\n write!(f, \"unimplemented: '{}' - {}\", description, self.location)\n\n },\n\n LanesToTagsIssue::TagsDuplicateKey(e) => write!(f, \"{} - {}\", e, self.location),\n\n LanesToTagsIssue::Roundtrip(None) => write!(f, \"roundtrip - {}\", self.location),\n\n LanesToTagsIssue::Roundtrip(Some(e)) => {\n\n write!(f, \"roundtrip: {} - {}\", e, self.location)\n\n },\n\n }\n\n }\n\n }\n\n\n\n impl std::error::Error for LanesToTagsMsg {}\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 73, "score": 58819.530511303616 }, { "content": " locale,\n\n warnings,\n\n )\n\n .and_then(|separator| {\n\n semantic_separator_to_lane(\n\n [forward, backward],\n\n &separator,\n\n &self,\n\n tags,\n\n locale,\n\n warnings,\n\n )\n\n }),\n\n [Some(lane), None] | [None, Some(lane)] => {\n\n lane_to_inner_edge_separator(lane.mirror()).map(Lane::mirror)\n\n },\n\n [None, None] => return Err(RoadError::Msg(TagsToLanesMsg::internal(\"no lanes\"))),\n\n };\n\n\n\n self.forward_lanes.make_contiguous();\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 74, "score": 58819.39033259688 }, { "content": "use crate::locale::Locale;\n\nuse crate::tag::{TagKey, Tags};\n\nuse crate::transform::{RoadWarnings, TagsToLanesMsg};\n\n\n\n/// Unsupported\n\n/// Catch-all for unsupported or unimplemented but known constructs\n\n#[allow(clippy::unnecessary_wraps)]\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/unsupported.rs", "rank": 75, "score": 58818.40098357407 }, { "content": " #[track_caller]\n\n pub fn ambiguous_tags(tags: Tags) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Ambiguous {\n\n description: None,\n\n tags: Some(tags),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn ambiguous_str(description: &str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Ambiguous {\n\n description: Some(description.to_owned()),\n\n tags: None,\n\n },\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 76, "score": 58818.01967499554 }, { "content": " width,\n\n },\n\n Some(LaneType::Shoulder) => Lane::Shoulder { width },\n\n None => panic!(),\n\n }\n\n }\n\n\n\n /// Create a mirrored version of the lane\n\n #[must_use]\n\n fn mirror(&self) -> &Self {\n\n // TODO: this doesn't need to do anything for now\n\n // check back after v1.0.0 to see if this is still the case\n\n self\n\n }\n\n}\n\n\n\npub(in crate::transform) struct RoadBuilder {\n\n forward_lanes: VecDeque<LaneBuilder>,\n\n backward_lanes: VecDeque<LaneBuilder>,\n\n pub highway: Highway,\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 77, "score": 58817.93614285955 }, { "content": " }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn separator_locale_unused(inside: LaneBuilder, outside: LaneBuilder) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::SeparatorLocaleUnused { inside, outside },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn separator_unknown(inside: LaneBuilder, outside: LaneBuilder) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::SeparatorUnknown { inside, outside },\n\n }\n\n }\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 78, "score": 58817.76912126023 }, { "content": " match self {\n\n Self::None => None,\n\n Self::Default(v) | Self::Calculated(v) | Self::Direct(v) => Some(v),\n\n }\n\n }\n\n\n\n /// `Infer::Direct` or `Infer::None` from Option\n\n pub fn direct(some: Option<T>) -> Self {\n\n match some {\n\n None => Self::None,\n\n Some(v) => Self::Direct(v),\n\n }\n\n }\n\n /// Conditionally replaces value.\n\n ///\n\n /// # Replaces\n\n /// - The same value at a higher confidence\n\n /// - A different value at a higher confidence\n\n ///\n\n /// # Ignores\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/infer.rs", "rank": 79, "score": 58817.49004636207 }, { "content": " }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unimplemented_tag<K: Into<TagKey>>(key: K, val: &str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Unimplemented {\n\n description: None,\n\n tags: Some(Tags::from_str_pair([key.into().as_str(), val])),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unimplemented_tags(tags: Tags) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 80, "score": 58817.4827935734 }, { "content": " issue: TagsToLanesIssue::Unsupported {\n\n description: None,\n\n tags: Some(tags),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unsupported_tag<K: Into<TagKey>>(key: K, val: &str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Unsupported {\n\n description: None,\n\n tags: Some(Tags::from_str_pair([key.into().as_str(), val])),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 81, "score": 58817.084072060294 }, { "content": " issue: TagsToLanesIssue::Unimplemented {\n\n description: None,\n\n tags: Some(tags),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn ambiguous_tag<K: Into<TagKey>>(key: K, val: &str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Ambiguous {\n\n description: None,\n\n tags: Some(Tags::from_str_pair([key.into().as_str(), val])),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 82, "score": 58817.084072060294 }, { "content": " #[track_caller]\n\n pub fn deprecated_tags(tags: Tags) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Deprecated {\n\n deprecated_tags: tags,\n\n suggested_tags: None,\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn deprecated_tag<K: Into<TagKey>>(key: K, val: &str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Deprecated {\n\n deprecated_tags: Tags::from_str_pair([key.into().as_str(), val]),\n\n suggested_tags: None,\n\n },\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 83, "score": 58816.91332699105 }, { "content": " #[track_caller]\n\n pub fn unsupported_str(description: &str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Unsupported {\n\n description: Some(description.to_owned()),\n\n tags: None,\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unimplemented(description: &str, tags: Tags) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Unimplemented {\n\n description: Some(description.to_owned()),\n\n tags: Some(tags),\n\n },\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 84, "score": 58816.77184772844 }, { "content": "\n\nimpl<T> Default for Infer<T> {\n\n fn default() -> Self {\n\n Self::None\n\n }\n\n}\n\n\n\nimpl<T> From<Option<T>> for Infer<T> {\n\n fn from(some: Option<T>) -> Self {\n\n match some {\n\n Some(val) => Self::Direct(val),\n\n None => Self::None,\n\n }\n\n }\n\n}\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/infer.rs", "rank": 85, "score": 58816.632669067876 }, { "content": "\n\nimpl std::fmt::Display for TagsToLanesMsg {\n\n #[allow(clippy::panic_in_result_fn)]\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match &self.issue {\n\n TagsToLanesIssue::Deprecated {\n\n deprecated_tags, ..\n\n } => write!(\n\n f,\n\n \"deprecated: '{}' - {}\",\n\n deprecated_tags.to_vec().as_slice().join(\" \"),\n\n self.location,\n\n ),\n\n TagsToLanesIssue::Unsupported { description, tags }\n\n | TagsToLanesIssue::Unimplemented { description, tags }\n\n | TagsToLanesIssue::Ambiguous { description, tags } => {\n\n let tags = tags.as_ref().map(|tags| {\n\n let tags = tags.to_vec();\n\n if tags.is_empty() {\n\n String::from(\"no tags\")\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 86, "score": 58816.328547029174 }, { "content": "use super::TagsToLanesMsg;\n\n\n\n#[derive(Debug)]\n\npub struct InferConflict;\n\n\n\nimpl std::fmt::Display for InferConflict {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"inferred values conflict\")\n\n }\n\n}\n\n\n\nimpl std::error::Error for InferConflict {}\n\n\n\nimpl From<InferConflict> for TagsToLanesMsg {\n\n fn from(_conflict: InferConflict) -> Self {\n\n TagsToLanesMsg::internal(\"infer conflict\")\n\n }\n\n}\n\n\n\n// TODO: implement try when this is closed: https://github.com/rust-lang/rust/issues/84277\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/infer.rs", "rank": 87, "score": 58816.25443359413 }, { "content": "\n\n #[must_use]\n\n #[track_caller]\n\n pub fn internal(e: &'static str) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Internal(e),\n\n }\n\n }\n\n}\n\n\n\nimpl std::convert::From<DuplicateKeyError> for TagsToLanesMsg {\n\n #[track_caller]\n\n fn from(e: DuplicateKeyError) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::TagsDuplicateKey(e),\n\n }\n\n }\n\n}\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 88, "score": 58816.093437017815 }, { "content": " }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unsupported(description: &str, tags: Tags) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n\n issue: TagsToLanesIssue::Unsupported {\n\n description: Some(description.to_owned()),\n\n tags: Some(tags),\n\n },\n\n }\n\n }\n\n\n\n #[must_use]\n\n #[track_caller]\n\n pub fn unsupported_tags(tags: Tags) -> Self {\n\n TagsToLanesMsg {\n\n location: Location::caller(),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 89, "score": 58815.94222188425 }, { "content": "/// A value with various levels of inference\n\n#[derive(Copy, Clone, Debug)]\n\npub enum Infer<T> {\n\n None,\n\n Default(T),\n\n Calculated(T),\n\n Direct(T),\n\n}\n\n\n\nimpl<T> Infer<T>\n\nwhere\n\n T: PartialEq<T>,\n\n{\n\n /// `Infer::None`\n\n pub fn is_none(&self) -> bool {\n\n matches!(self, Self::None)\n\n }\n\n\n\n /// Convert any non-`Infer::None` value into `Option::Some`\n\n pub fn some(self) -> Option<T> {\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/infer.rs", "rank": 90, "score": 58815.88309640493 }, { "content": " description: Option<String>,\n\n tags: Option<Tags>,\n\n },\n\n /// Locale not used\n\n SeparatorLocaleUnused {\n\n inside: LaneBuilder,\n\n outside: LaneBuilder,\n\n },\n\n /// Locale not used\n\n SeparatorUnknown {\n\n inside: LaneBuilder,\n\n outside: LaneBuilder,\n\n },\n\n /// Internal errors\n\n TagsDuplicateKey(DuplicateKeyError),\n\n Internal(&'static str),\n\n}\n\n\n\nimpl TagsToLanesMsg {\n\n #[must_use]\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/error.rs", "rank": 91, "score": 58815.797410118386 }, { "content": " pub r#type: Infer<LaneType>,\n\n // note: direction is always relative to the way\n\n pub direction: Infer<Direction>,\n\n pub designated: Infer<Designated>,\n\n pub width: Width,\n\n pub max_speed: Infer<Speed>,\n\n pub access: Access,\n\n}\n\n\n\nimpl LaneBuilder {\n\n #[allow(clippy::panic)]\n\n #[must_use]\n\n fn build(self) -> Lane {\n\n let width = self.width.target.some();\n\n assert!(\n\n width.unwrap_or(Lane::DEFAULT_WIDTH).val()\n\n >= self.width.min.some().unwrap_or(Metre::MIN).val()\n\n );\n\n assert!(\n\n width.unwrap_or(Lane::DEFAULT_WIDTH).val()\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 92, "score": 58815.70085600384 }, { "content": " locale,\n\n warnings,\n\n )\n\n })\n\n })\n\n .collect();\n\n\n\n self.backward_lanes.make_contiguous();\n\n let backward_separators: Vec<Option<Lane>> = self\n\n .backward_lanes\n\n .as_slices()\n\n .0\n\n .windows(2)\n\n .map(|window| {\n\n let lanes: &[LaneBuilder; 2] = window.try_into().unwrap();\n\n lane_pair_to_semantic_separator(\n\n [&lanes[0], &lanes[1]],\n\n &self,\n\n tags,\n\n locale,\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 93, "score": 58815.679769901406 }, { "content": " /// Get outer-most backward lane\n\n pub fn backward_outside(&self) -> Option<&LaneBuilder> {\n\n self.backward_lanes.back()\n\n }\n\n /// Get inner-most forward lane\n\n pub fn forward_inside_mut(&mut self) -> Option<&mut LaneBuilder> {\n\n self.forward_lanes.front_mut()\n\n }\n\n /// Get outer-most forward lane\n\n pub fn forward_outside_mut(&mut self) -> Option<&mut LaneBuilder> {\n\n self.forward_lanes.back_mut()\n\n }\n\n /// Get inner-most backward lane\n\n pub fn _backward_inside_mut(&mut self) -> Option<&mut LaneBuilder> {\n\n self.backward_lanes.front_mut()\n\n }\n\n /// Get outer-most backward lane\n\n pub fn backward_outside_mut(&mut self) -> Option<&mut LaneBuilder> {\n\n self.backward_lanes.back_mut()\n\n }\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 94, "score": 58815.5857926672 }, { "content": " }\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n Self {\n\n check_roundtrip: true,\n\n }\n\n }\n\n}\n\n\n\nimpl Lane {\n\n fn is_shoulder(&self) -> bool {\n\n matches!(self, Lane::Shoulder { .. })\n\n }\n\n}\n\n\n\nmod error {\n\n use std::panic::Location;\n\n\n", "file_path": "osm2lanes/src/transform/lanes_to_tags/mod.rs", "rank": 95, "score": 58815.52504018748 }, { "content": "\n\nimpl std::fmt::Display for LaneBuilderError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl std::error::Error for LaneBuilderError {}\n\n\n\nimpl std::convert::From<LaneBuilderError> for RoadError {\n\n fn from(error: LaneBuilderError) -> Self {\n\n Self::Msg(TagsToLanesMsg::internal(error.0))\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum LaneType {\n\n Travel,\n\n Parking,\n\n Shoulder,\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 96, "score": 58814.96030771168 }, { "content": " /// Number of forward lanes\n\n pub fn forward_len(&self) -> usize {\n\n self.forward_lanes.len()\n\n }\n\n /// Number of backward lanes\n\n pub fn backward_len(&self) -> usize {\n\n self.backward_lanes.len()\n\n }\n\n /// Get inner-most forward lane\n\n pub fn forward_inside(&self) -> Option<&LaneBuilder> {\n\n self.forward_lanes.front()\n\n }\n\n /// Get outer-most forward lane\n\n pub fn forward_outside(&self) -> Option<&LaneBuilder> {\n\n self.forward_lanes.back()\n\n }\n\n /// Get inner-most backward lane\n\n pub fn backward_inside(&self) -> Option<&LaneBuilder> {\n\n self.backward_lanes.front()\n\n }\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 97, "score": 58814.04787840534 }, { "content": " }\n\n\n\n impl std::convert::From<bool> for Oneway {\n\n fn from(oneway: bool) -> Self {\n\n if oneway {\n\n Oneway::Yes\n\n } else {\n\n Oneway::No\n\n }\n\n }\n\n }\n\n\n\n impl std::convert::From<Oneway> for bool {\n\n fn from(oneway: Oneway) -> Self {\n\n match oneway {\n\n Oneway::Yes => true,\n\n Oneway::No => false,\n\n }\n\n }\n\n }\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/mod.rs", "rank": 98, "score": 58813.66010816574 }, { "content": " width,\n\n ..Default::default()\n\n });\n\n }\n\n\n\n RoadBuilder {\n\n forward_lanes,\n\n backward_lanes,\n\n highway,\n\n oneway,\n\n }\n\n } else {\n\n RoadBuilder {\n\n forward_lanes: VecDeque::from(vec![LaneBuilder {\n\n r#type: Infer::Default(LaneType::Travel),\n\n direction: Infer::Default(Direction::Both),\n\n designated: Infer::Default(designated),\n\n width,\n\n ..Default::default()\n\n }]),\n", "file_path": "osm2lanes/src/transform/tags_to_lanes/road.rs", "rank": 99, "score": 58813.65200401711 } ]
Rust
src/options.rs
sunsided/realsense-rust
6908a3d6ca172b8d7f388c10434b1800ceefafbe
use crate::{ common::*, error::{ErrorChecker, Result as RsResult}, kind::Rs2Option, }; pub trait ToOptions { fn to_options(&self) -> RsResult<HashMap<Rs2Option, OptionHandle>> { let options_ptr = self.get_options_ptr(); unsafe { let list_ptr = { let mut checker = ErrorChecker::new(); let list_ptr = realsense_sys::rs2_get_options_list( options_ptr.as_ptr(), checker.inner_mut_ptr(), ); checker.check()?; list_ptr }; let len = { let mut checker = ErrorChecker::new(); let len = realsense_sys::rs2_get_options_list_size(list_ptr, checker.inner_mut_ptr()); checker.check()?; len }; let handles = (0..len) .map(|index| { let mut checker = ErrorChecker::new(); let val = realsense_sys::rs2_get_option_from_list( list_ptr, index, checker.inner_mut_ptr(), ); checker.check()?; let option = Rs2Option::from_u32(val).unwrap(); let handle = OptionHandle { ptr: options_ptr, option, }; RsResult::Ok((option, handle)) }) .collect::<RsResult<HashMap<_, _>>>()?; Ok(handles) } } fn get_options_ptr(&self) -> NonNull<realsense_sys::rs2_options>; } #[derive(Debug, Clone)] pub struct OptionHandle { ptr: NonNull<realsense_sys::rs2_options>, option: Rs2Option, } impl OptionHandle { pub fn get_value(&self) -> RsResult<f32> { unsafe { let mut checker = ErrorChecker::new(); let val = realsense_sys::rs2_get_option( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; Ok(val) } } pub fn set_value(&self, value: f32) -> RsResult<()> { unsafe { let mut checker = ErrorChecker::new(); realsense_sys::rs2_set_option( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, value, checker.inner_mut_ptr(), ); checker.check()?; Ok(()) } } pub fn is_read_only(&self) -> RsResult<bool> { unsafe { let mut checker = ErrorChecker::new(); let val = realsense_sys::rs2_is_option_read_only( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; Ok(val != 0) } } pub fn name<'a>(&'a self) -> RsResult<&'a str> { unsafe { let mut checker = ErrorChecker::new(); let ptr = realsense_sys::rs2_get_option_name( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; let desc = CStr::from_ptr(ptr).to_str().unwrap(); Ok(desc) } } pub fn option_description<'a>(&'a self) -> RsResult<&'a str> { unsafe { let mut checker = ErrorChecker::new(); let ptr = realsense_sys::rs2_get_option_description( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; let desc = CStr::from_ptr(ptr).to_str().unwrap(); Ok(desc) } } pub fn value_description<'a>(&'a self, value: f32) -> RsResult<&'a str> { unsafe { let mut checker = ErrorChecker::new(); let ptr = realsense_sys::rs2_get_option_value_description( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, value, checker.inner_mut_ptr(), ); checker.check()?; let desc = CStr::from_ptr(ptr).to_str().unwrap(); Ok(desc) } } }
use crate::{ common::*, error::{ErrorChecker, Result as RsResult}, kind::Rs2Option, }; pub trait ToOptions { fn to_options(&self) -> RsResult<HashMap<Rs2Option, OptionHandle>> { let options_ptr = self.get_options_ptr(); unsafe { let list_ptr = { let mut checker = ErrorChecker::new(); let list_ptr = realsense_sys::rs2_get_options_list( options_ptr.as_ptr(), checker.inner_mut_ptr(), ); checker.check()?; list_ptr }; let len = { let mut checker = ErrorChecker::new(); let le
let mut checker = ErrorChecker::new(); let val = realsense_sys::rs2_is_option_read_only( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; Ok(val != 0) } } pub fn name<'a>(&'a self) -> RsResult<&'a str> { unsafe { let mut checker = ErrorChecker::new(); let ptr = realsense_sys::rs2_get_option_name( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; let desc = CStr::from_ptr(ptr).to_str().unwrap(); Ok(desc) } } pub fn option_description<'a>(&'a self) -> RsResult<&'a str> { unsafe { let mut checker = ErrorChecker::new(); let ptr = realsense_sys::rs2_get_option_description( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; let desc = CStr::from_ptr(ptr).to_str().unwrap(); Ok(desc) } } pub fn value_description<'a>(&'a self, value: f32) -> RsResult<&'a str> { unsafe { let mut checker = ErrorChecker::new(); let ptr = realsense_sys::rs2_get_option_value_description( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, value, checker.inner_mut_ptr(), ); checker.check()?; let desc = CStr::from_ptr(ptr).to_str().unwrap(); Ok(desc) } } }
n = realsense_sys::rs2_get_options_list_size(list_ptr, checker.inner_mut_ptr()); checker.check()?; len }; let handles = (0..len) .map(|index| { let mut checker = ErrorChecker::new(); let val = realsense_sys::rs2_get_option_from_list( list_ptr, index, checker.inner_mut_ptr(), ); checker.check()?; let option = Rs2Option::from_u32(val).unwrap(); let handle = OptionHandle { ptr: options_ptr, option, }; RsResult::Ok((option, handle)) }) .collect::<RsResult<HashMap<_, _>>>()?; Ok(handles) } } fn get_options_ptr(&self) -> NonNull<realsense_sys::rs2_options>; } #[derive(Debug, Clone)] pub struct OptionHandle { ptr: NonNull<realsense_sys::rs2_options>, option: Rs2Option, } impl OptionHandle { pub fn get_value(&self) -> RsResult<f32> { unsafe { let mut checker = ErrorChecker::new(); let val = realsense_sys::rs2_get_option( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, checker.inner_mut_ptr(), ); checker.check()?; Ok(val) } } pub fn set_value(&self, value: f32) -> RsResult<()> { unsafe { let mut checker = ErrorChecker::new(); realsense_sys::rs2_set_option( self.ptr.as_ptr(), self.option as realsense_sys::rs2_option, value, checker.inner_mut_ptr(), ); checker.check()?; Ok(()) } } pub fn is_read_only(&self) -> RsResult<bool> { unsafe {
random
[ { "content": "#[cfg(all(feature = \"with-image\", feature = \"with-nalgebra\"))]\n\nfn main() -> Result<()> {\n\n example::main()\n\n}\n\n\n", "file_path": "examples/capture_images.rs", "rank": 1, "score": 91129.60982840325 }, { "content": "fn main() -> Result<()> {\n\n println!(\"Looking for RealSense devices\");\n\n let ctx = realsense_rust::Context::new()?;\n\n let devices = ctx.query_devices(None)?;\n\n let mut devices_found: bool = false;\n\n for device in devices {\n\n let device = device.unwrap();\n\n let name = device.name().unwrap().unwrap();\n\n let sn = device.serial_number().unwrap().unwrap();\n\n println!(\"Found {} SN {}\", name, sn);\n\n devices_found = true;\n\n device.hardware_reset()?;\n\n }\n\n ensure!(devices_found, \"No devices found\");\n\n Ok(())\n\n}\n", "file_path": "examples/reset_devices.rs", "rank": 2, "score": 91129.60982840325 }, { "content": "fn main() -> Result<()> {\n\n if cfg!(feature = \"doc-only\") {\n\n return Ok(());\n\n }\n\n\n\n // Probe libary\n\n let library = probe_library(\"realsense2\")?;\n\n\n\n // Verify version\n\n let (include_dir, version) = library\n\n .include_paths\n\n .iter()\n\n .collect::<HashSet<_>>()\n\n .into_iter()\n\n .filter_map(|path| {\n\n let dir = Path::new(path).join(\"librealsense2\");\n\n if dir.is_dir() {\n\n match get_version_from_header_dir(&dir) {\n\n Some(version) => Some((dir, version)),\n\n None => None,\n", "file_path": "realsense-sys/build.rs", "rank": 3, "score": 91129.60982840325 }, { "content": "/// The trait provides common methods on frames of all kinds.\n\npub trait GenericFrame\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Obtains the metadata of frame.\n\n fn metadata(&self, kind: FrameMetaDataValue) -> RsResult<u64> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_get_frame_metadata(\n\n self.ptr().as_ptr(),\n\n kind as realsense_sys::rs2_frame_metadata_value,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val as u64)\n\n }\n\n }\n\n\n\n /// Gets frame number.\n\n fn number(&self) -> RsResult<u64> {\n", "file_path": "src/frame.rs", "rank": 4, "score": 90997.36923635322 }, { "content": "/// The trait provides methods on frames with disparity data.\n\n///\n\n/// Frame types with this trait also implements [DepthFrame](DepthFrame) trait.\n\npub trait DisparityFrame\n\nwhere\n\n Self: DepthFrame,\n\n{\n\n /// Retrieves the distance between the two IR sensors.\n\n fn baseline(&self) -> RsResult<f32> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let baseline = realsense_sys::rs2_depth_stereo_frame_get_baseline(\n\n self.ptr().as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(baseline)\n\n }\n\n }\n\n}\n\n\n\n/// The type returned by [Frame::<Any>::try_extend](Frame::try_extend).\n\n///\n", "file_path": "src/frame.rs", "rank": 5, "score": 90993.78631342805 }, { "content": "/// The trait provides methods on frames with depth data.\n\n///\n\n/// Frame types with this trait also implements [VideoFrame](VideoFrame) trait.\n\npub trait DepthFrame\n\nwhere\n\n Self: VideoFrame,\n\n{\n\n /// Gets distance at given coordinates.\n\n fn distance(&self, x: usize, y: usize) -> RsResult<f32> {\n\n let distance = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let distance = realsense_sys::rs2_depth_frame_get_distance(\n\n self.ptr().as_ptr(),\n\n x as c_int,\n\n y as c_int,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n distance\n\n };\n\n Ok(distance)\n\n }\n\n\n\n /// Gets the length in meter per distance unit.\n\n fn depth_units(&self) -> RsResult<f32> {\n\n let sensor = self.sensor()?;\n\n let sensor = sensor.try_extend_to::<sensor_marker::Depth>()?.unwrap();\n\n let depth_units = sensor.depth_units()?;\n\n Ok(depth_units)\n\n }\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 6, "score": 90993.78631342805 }, { "content": " /// The marker trait for frame kinds.\n\n pub trait FrameKind {}\n\n\n", "file_path": "src/frame.rs", "rank": 7, "score": 90993.1435737421 }, { "content": " /// The marker traits of all kinds of sensor.\n\n pub trait SensorKind {}\n\n\n", "file_path": "src/sensor.rs", "rank": 8, "score": 90993.1435737421 }, { "content": " /// Marker trait for pipeline marker types.\n\n pub trait PipelineState {\n\n /// Clone the state with the underlying pointer. It is intended for internal use only.\n\n ///\n\n /// # Safety\n\n /// You can to prevent [Drop::drop] to be called twice by calling this method.\n\n unsafe fn unsafe_clone(&self) -> Self;\n\n }\n\n\n\n /// A marker type indicating the [Pipeline] is started.\n\n #[derive(Debug)]\n\n pub struct Active {\n\n pub profile: PipelineProfile,\n\n pub config: Option<Config>,\n\n }\n\n\n\n impl PipelineState for Active {\n\n unsafe fn unsafe_clone(&self) -> Self {\n\n Self {\n\n profile: self.profile.unsafe_clone(),\n\n config: self.config.as_ref().map(|config| config.unsafe_clone()),\n", "file_path": "src/pipeline.rs", "rank": 9, "score": 90993.07852916833 }, { "content": "/// The trait provides methods on frames with video data.\n\npub trait VideoFrame\n\nwhere\n\n Self: GenericFrame,\n\n{\n\n /// Gets image resolution.\n\n fn resolution(&self) -> RsResult<Resolution> {\n\n let width = self.width()?;\n\n let height = self.height()?;\n\n let resolution = Resolution { width, height };\n\n Ok(resolution)\n\n }\n\n\n\n /// Gets image width in pixels.\n\n fn width(&self) -> RsResult<usize> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val =\n\n realsense_sys::rs2_get_frame_width(self.ptr().as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n Ok(val as usize)\n", "file_path": "src/frame.rs", "rank": 10, "score": 90993.01534192843 }, { "content": "#[test]\n\nfn async_test() -> Result<()> {\n\n // lock global mutex\n\n let mut counter = GLOBAL_MUTEX.lock().unwrap();\n\n\n\n // init async runtime\n\n let mut runtime = Runtime::new()?;\n\n\n\n runtime.block_on(async {\n\n // init pipeline\n\n let pipeline = Pipeline::new()?;\n\n let config = Config::new()?\n\n .enable_stream(StreamKind::Depth, 0, 640, 0, Format::Z16, 30)?\n\n .enable_stream(StreamKind::Color, 0, 640, 0, Format::Rgb8, 30)?;\n\n let mut pipeline = pipeline.start_async(Some(config)).await?;\n\n\n\n // show stream info\n\n let profile = pipeline.profile();\n\n for (idx, stream_result) in profile.streams()?.try_into_iter()?.enumerate() {\n\n let stream = stream_result?;\n\n println!(\"stream data {}: {:#?}\", idx, stream.get_data()?);\n", "file_path": "tests/device_test.rs", "rank": 11, "score": 88024.08673446285 }, { "content": "#[test]\n\nfn sync_test() -> Result<()> {\n\n // lock global mutex\n\n let mut counter = GLOBAL_MUTEX.lock().unwrap();\n\n\n\n // init pipeline\n\n let pipeline = Pipeline::new()?;\n\n let config = Config::new()?\n\n .enable_stream(StreamKind::Depth, 0, 640, 0, Format::Z16, 30)?\n\n .enable_stream(StreamKind::Color, 0, 640, 0, Format::Rgb8, 30)?;\n\n let mut pipeline = pipeline.start(Some(config))?;\n\n let profile = pipeline.profile();\n\n\n\n // show stream info\n\n for (idx, stream_result) in profile.streams()?.try_into_iter()?.enumerate() {\n\n let stream = stream_result?;\n\n println!(\"stream data {}: {:#?}\", idx, stream.get_data()?);\n\n }\n\n\n\n // process frames\n\n for _ in 0..16 {\n", "file_path": "tests/device_test.rs", "rank": 12, "score": 88024.08673446285 }, { "content": " /// The marker traits of all kinds of sensor except [Any](Any).\n\n pub trait NonAnySensorKind\n\n where\n\n Self: SensorKind,\n\n {\n\n const EXTENSION: Extension;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Any;\n\n impl SensorKind for Any {}\n\n\n\n #[derive(Debug)]\n\n pub struct Tm2;\n\n impl SensorKind for Tm2 {}\n\n impl NonAnySensorKind for Tm2 {\n\n const EXTENSION: Extension = Extension::Tm2Sensor;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Pose;\n", "file_path": "src/sensor.rs", "rank": 13, "score": 87888.47911420702 }, { "content": " /// The marker traits for frame kinds except [Any](Any).\n\n pub trait NonAnyFrameKind\n\n where\n\n Self: FrameKind,\n\n {\n\n const EXTENSION: Extension;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Composite;\n\n\n\n impl FrameKind for Composite {}\n\n impl NonAnyFrameKind for Composite {\n\n const EXTENSION: Extension = Extension::CompositeFrame;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Any;\n\n\n\n impl FrameKind for Any {}\n\n\n", "file_path": "src/frame.rs", "rank": 14, "score": 87888.47911420702 }, { "content": " /// The marker traits of all kinds of StreamProfile.\n\n pub trait StreamProfileKind {}\n\n\n", "file_path": "src/stream_profile.rs", "rank": 15, "score": 85096.1650204905 }, { "content": " pub trait ProcessingBlockKind {}\n", "file_path": "src/processing_block.rs", "rank": 16, "score": 85091.73933574633 }, { "content": " /// The marker traits of all kinds of StreamProfile except [Any](Any).\n\n pub trait NonAnyStreamProfileKind\n\n where\n\n Self: StreamProfileKind,\n\n {\n\n const EXTENSION: Extension;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Any;\n\n impl StreamProfileKind for Any {}\n\n\n\n #[derive(Debug)]\n\n pub struct Video;\n\n impl StreamProfileKind for Video {}\n\n impl NonAnyStreamProfileKind for Video {\n\n const EXTENSION: Extension = Extension::VideoProfile;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Motion;\n", "file_path": "src/stream_profile.rs", "rank": 17, "score": 82571.21113076212 }, { "content": " pub trait ExtendableProcessingBlockKind\n\n where\n\n Self: ProcessingBlockKind,\n\n {\n\n const EXTENSION: Extension;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct Any;\n\n impl ProcessingBlockKind for Any {}\n\n\n\n #[derive(Debug)]\n\n pub struct DecimationFilter;\n\n impl ProcessingBlockKind for DecimationFilter {}\n\n impl ExtendableProcessingBlockKind for DecimationFilter {\n\n const EXTENSION: Extension = Extension::DecimationFilter;\n\n }\n\n\n\n #[derive(Debug)]\n\n pub struct ThresholdFilter;\n", "file_path": "src/processing_block.rs", "rank": 18, "score": 82566.84863325785 }, { "content": "fn probe_library(pkg_name: &str) -> Result<Library> {\n\n let package = pkg_config::probe_library(pkg_name)?;\n\n let lib = Library {\n\n pkg_name: pkg_name.to_owned(),\n\n libs: package.libs,\n\n link_paths: package.link_paths,\n\n framework_paths: package.framework_paths,\n\n include_paths: package.include_paths,\n\n version: package.version,\n\n prefix: PathBuf::from(pkg_config::get_variable(pkg_name, \"prefix\")?),\n\n libdir: PathBuf::from(pkg_config::get_variable(pkg_name, \"libdir\")?),\n\n };\n\n Ok(lib)\n\n}\n\n\n", "file_path": "realsense-sys/build.rs", "rank": 19, "score": 68549.45348149781 }, { "content": "#[cfg(not(all(feature = \"with-image\", feature = \"with-nalgebra\")))]\n\nfn main() {\n\n panic!(\"please enable with-image and with-nalgebra features to run this example\");\n\n}\n", "file_path": "examples/capture_images.rs", "rank": 20, "score": 49205.12664882545 }, { "content": "#[cfg(not(all(feature = \"with-image\", feature = \"with-nalgebra\")))]\n\nfn main() {\n\n panic!(\"please enable with-image and with-nalgebra features to run this example\");\n\n}\n", "file_path": "examples/capture_images_async.rs", "rank": 21, "score": 47471.60192298738 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_quaternion() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_quaternion>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(rs2_quaternion))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_quaternion>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_quaternion))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_quaternion>())).x as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_quaternion),\n\n \"::\",\n\n stringify!(x)\n\n )\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 22, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_vertex() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_vertex>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(rs2_vertex))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_vertex>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_vertex))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_vertex>())).xyz as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_vertex),\n\n \"::\",\n\n stringify!(xyz)\n\n )\n\n );\n\n}\n\n#[doc = \" \\\\brief Pixel location within 2D image. (0,0) is the topmost, left corner. Positive X is right, positive Y is down\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct rs2_pixel {\n\n pub ij: [::std::os::raw::c_int; 2usize],\n\n}\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 23, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_pixel() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_pixel>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(rs2_pixel))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_pixel>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_pixel))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_pixel>())).ij as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_pixel),\n\n \"::\",\n\n stringify!(ij)\n\n )\n\n );\n\n}\n\n#[doc = \" \\\\brief 3D vector in Euclidean coordinate space\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct rs2_vector {\n\n pub x: f32,\n\n pub y: f32,\n\n pub z: f32,\n\n}\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 24, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_intrinsics() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_intrinsics>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(rs2_intrinsics))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_intrinsics>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_intrinsics))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_intrinsics>())).width as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_intrinsics),\n\n \"::\",\n\n stringify!(width)\n\n )\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 25, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_extrinsics() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_extrinsics>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(rs2_extrinsics))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_extrinsics>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_extrinsics))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_extrinsics>())).rotation as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_extrinsics),\n\n \"::\",\n\n stringify!(rotation)\n\n )\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 26, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_pose() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_pose>(),\n\n 84usize,\n\n concat!(\"Size of: \", stringify!(rs2_pose))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_pose>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_pose))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_pose>())).translation as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_pose),\n\n \"::\",\n\n stringify!(translation)\n\n )\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 27, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_vector() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_vector>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(rs2_vector))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_vector>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_vector))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_vector>())).x as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_vector),\n\n \"::\",\n\n stringify!(x)\n\n )\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 28, "score": 42121.853583637974 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_dsm_params() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_dsm_params>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(rs2_dsm_params))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_dsm_params>(),\n\n 8usize,\n\n concat!(\"Alignment of \", stringify!(rs2_dsm_params))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rs2_dsm_params>())).timestamp as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_dsm_params),\n\n \"::\",\n\n stringify!(timestamp)\n\n )\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 29, "score": 41076.64905350607 }, { "content": "#[test]\n\nfn bindgen_test_layout_rs2_motion_device_intrinsic() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rs2_motion_device_intrinsic>(),\n\n 72usize,\n\n concat!(\"Size of: \", stringify!(rs2_motion_device_intrinsic))\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rs2_motion_device_intrinsic>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(rs2_motion_device_intrinsic))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::std::ptr::null::<rs2_motion_device_intrinsic>())).data as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rs2_motion_device_intrinsic),\n\n \"::\",\n", "file_path": "realsense-sys/bindings/bindings.rs", "rank": 30, "score": 40118.97124578969 }, { "content": "pub use crate::kind::{Format, StreamKind};\n\n#[cfg(feature = \"with-image\")]\n\npub use image::{\n\n buffer::ConvertBuffer,\n\n flat::{FlatSamples, SampleLayout},\n\n Bgr, Bgra, ColorType, DynamicImage, ImageBuffer, Luma, Rgb, Rgba,\n\n};\n\n#[cfg(feature = \"with-nalgebra\")]\n\npub use nalgebra::{\n\n Isometry3, MatrixMN, Quaternion, Translation3, Unit, UnitQuaternion, Vector3, U3,\n\n};\n\npub use num_derive::FromPrimitive;\n\npub use num_traits::FromPrimitive;\n\npub use safe_transmute::guard::PedanticGuard;\n\n#[cfg(any(unix))]\n\npub use std::os::unix::ffi::OsStrExt;\n\n#[cfg(any(windows))]\n\npub use std::os::windows::ffi::OsStrExt;\n\npub use std::{\n\n borrow::Borrow,\n", "file_path": "src/common.rs", "rank": 31, "score": 35362.54713902267 }, { "content": " collections::HashMap,\n\n convert::{AsMut, AsRef},\n\n error::Error as StdError,\n\n ffi::{CStr, CString},\n\n fmt::{Debug, Display, Formatter, Result as FormatResult},\n\n iter::FusedIterator,\n\n marker::PhantomData,\n\n mem::MaybeUninit,\n\n ops::{Deref, DerefMut},\n\n os::raw::{c_int, c_uchar, c_uint, c_void},\n\n path::Path,\n\n ptr::NonNull,\n\n slice,\n\n sync::atomic::{AtomicPtr, Ordering},\n\n time::Duration,\n\n};\n", "file_path": "src/common.rs", "rank": 32, "score": 35356.408999145795 }, { "content": "type Fallible<T> = Result<T, anyhow::Error>;\n\n\n\nlazy_static! {\n\n /// this lock prevenst multiple tests control RealSense device concurrently\n\n static ref GLOBAL_MUTEX: Mutex<usize> = Mutex::new(0);\n\n}\n\n\n", "file_path": "tests/device_test.rs", "rank": 33, "score": 34742.85980222489 }, { "content": "fn get_version_from_header_dir<P>(dir: P) -> Option<Version>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let header_path = dir.as_ref().join(\"rs.h\");\n\n\n\n let mut major_opt: Option<String> = None;\n\n let mut minor_opt: Option<String> = None;\n\n let mut patch_opt: Option<String> = None;\n\n let mut build_opt: Option<String> = None;\n\n\n\n let mut reader = BufReader::new(File::open(header_path).ok()?);\n\n loop {\n\n let mut line = String::new();\n\n match reader.read_line(&mut line) {\n\n Ok(0) | Err(_) => return None,\n\n _ => (),\n\n }\n\n\n\n const PREFIX: &str = \"#define RS2_API_\";\n", "file_path": "realsense-sys/build.rs", "rank": 34, "score": 30947.767777003733 }, { "content": "fn get_error_message<'a>(ptr: NonNull<realsense_sys::rs2_error>) -> &'a str {\n\n unsafe {\n\n let ptr = realsense_sys::rs2_get_error_message(ptr.as_ptr());\n\n CStr::from_ptr(ptr).to_str().unwrap()\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 35, "score": 28607.447005463226 }, { "content": "//! Defines the sensor context.\n\n\n\nuse crate::{\n\n common::*,\n\n device_hub::DeviceHub,\n\n device_list::DeviceList,\n\n error::{ErrorChecker, Result as RsResult},\n\n};\n\n\n\n#[derive(Debug)]\n\npub struct Context {\n\n pub(crate) ptr: NonNull<realsense_sys::rs2_context>,\n\n}\n\n\n\nimpl Context {\n\n /// Create an instance.\n\n pub fn new() -> RsResult<Self> {\n\n let ptr = {\n\n let mut checker = ErrorChecker::new();\n\n let context = unsafe {\n", "file_path": "src/context.rs", "rank": 36, "score": 25.49384916712922 }, { "content": "//! Configuration type for [Pipeline](crate::pipeline::Pipeline).\n\n\n\nuse crate::{\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n kind::{Format, StreamKind},\n\n};\n\n\n\n/// The pipeline configuration that will be consumed by [Pipeline::start()](crate::pipeline::Pipeline::start).\n\n#[derive(Debug)]\n\npub struct Config {\n\n pub(crate) ptr: NonNull<realsense_sys::rs2_config>,\n\n}\n\n\n\nimpl Config {\n\n /// Create an instance.\n\n pub fn new() -> RsResult<Self> {\n\n let ptr = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_config(checker.inner_mut_ptr());\n", "file_path": "src/config.rs", "rank": 37, "score": 25.24437967186782 }, { "content": "//! Defines the device types.\n\n\n\nuse crate::{\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n kind::CameraInfo,\n\n sensor_list::SensorList,\n\n};\n\n\n\n/// Represents a device instance.\n\n#[derive(Debug)]\n\npub struct Device {\n\n pub(crate) ptr: NonNull<realsense_sys::rs2_device>,\n\n}\n\n\n\nimpl Device {\n\n /// Discover available sensors on device.\n\n pub fn query_sensors(&self) -> RsResult<SensorList> {\n\n let list = unsafe {\n\n let mut checker = ErrorChecker::new();\n", "file_path": "src/device.rs", "rank": 38, "score": 24.846672472726983 }, { "content": "//! Defines the type of device hubs.\n\n\n\nuse crate::{\n\n common::*,\n\n device::Device,\n\n error::{ErrorChecker, Result as RsResult},\n\n};\n\n\n\n/// Represents a collection of devices.\n\n#[derive(Debug)]\n\npub struct DeviceHub {\n\n pub(crate) ptr: NonNull<realsense_sys::rs2_device_hub>,\n\n}\n\n\n\nimpl DeviceHub {\n\n /// Block and wait until a device is available.\n\n pub fn wait_for_device(&self) -> RsResult<Device> {\n\n let device = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_device_hub_wait_for_device(\n", "file_path": "src/device_hub.rs", "rank": 39, "score": 23.93959396326739 }, { "content": "//! Defines the iterable list of sensors.\n\n\n\nuse crate::{\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n sensor::{marker as sensor_marker, Sensor},\n\n};\n\n\n\n/// An iterable list of sensors.\n\n#[derive(Debug)]\n\npub struct SensorList {\n\n ptr: NonNull<realsense_sys::rs2_sensor_list>,\n\n}\n\n\n\nimpl SensorList {\n\n /// Gets the sensor instance at given index.\n\n ///\n\n /// It returns error if index is out of bound given by [SensorList::len].\n\n pub fn get(&mut self, index: usize) -> RsResult<Self> {\n\n let sensor = unsafe {\n", "file_path": "src/sensor_list.rs", "rank": 40, "score": 23.435473695922372 }, { "content": "//! Defines the error type used by the crate.\n\n\n\nuse crate::common::*;\n\n\n\n#[derive(Debug)]\n\npub(crate) struct ErrorChecker {\n\n checked: bool,\n\n ptr: *mut realsense_sys::rs2_error,\n\n}\n\n\n\nimpl ErrorChecker {\n\n pub fn new() -> ErrorChecker {\n\n ErrorChecker {\n\n checked: false,\n\n ptr: std::ptr::null_mut(),\n\n }\n\n }\n\n\n\n pub fn inner_mut_ptr(&mut self) -> *mut *mut realsense_sys::rs2_error {\n\n &mut self.ptr as *mut _\n", "file_path": "src/error.rs", "rank": 41, "score": 23.02431715322398 }, { "content": "//! Defines the profile type of streams.\n\n\n\nuse crate::{\n\n base::{Extrinsics, Intrinsics, MotionIntrinsics, Resolution, StreamProfileData},\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n kind::{Extension, Format, StreamKind},\n\n};\n\n\n\n/// Marker traits and types for [StreamProfile].\n\npub mod marker {\n\n use super::*;\n\n\n\n /// The marker traits of all kinds of StreamProfile.\n", "file_path": "src/stream_profile.rs", "rank": 42, "score": 22.640295552958193 }, { "content": "//! Defines the profile type of pipeline.\n\n\n\nuse crate::{\n\n common::*,\n\n device::Device,\n\n error::{ErrorChecker, Result as RsResult},\n\n stream_profile_list::StreamProfileList,\n\n};\n\n\n\n#[derive(Debug)]\n\npub struct PipelineProfile {\n\n ptr: NonNull<realsense_sys::rs2_pipeline_profile>,\n\n}\n\n\n\nimpl PipelineProfile {\n\n /// Gets corresponding device of pipeline.\n\n pub fn device(&self) -> RsResult<Device> {\n\n let ptr = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_pipeline_profile_get_device(\n", "file_path": "src/pipeline_profile.rs", "rank": 43, "score": 22.414286163409034 }, { "content": "//! Defines the pipeline type.\n\n\n\nuse crate::{\n\n base::DEFAULT_TIMEOUT,\n\n common::*,\n\n config::Config,\n\n context::Context,\n\n error::{Error as RsError, ErrorChecker, Result as RsResult},\n\n frame::{marker::Composite, Frame, GenericFrame},\n\n pipeline_profile::PipelineProfile,\n\n};\n\n\n\n/// Marker traits and types for [Pipeline].\n\npub mod marker {\n\n use super::*;\n\n\n\n /// Marker trait for pipeline marker types.\n", "file_path": "src/pipeline.rs", "rank": 44, "score": 22.10847033905469 }, { "content": " pub fn len(&self) -> RsResult<usize> {\n\n let len = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let len = realsense_sys::rs2_embedded_frames_count(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n len as usize\n\n };\n\n Ok(len)\n\n }\n\n\n\n /// Checks if the composite-frame contains no sub-frames.\n\n pub fn is_empty(&self) -> RsResult<bool> {\n\n Ok(self.len()? == 0)\n\n }\n\n\n\n /// Gets the frame in frameset by index.\n\n ///\n", "file_path": "src/frame.rs", "rank": 45, "score": 22.05774146166725 }, { "content": "//! Defines the iterable list of devices.\n\n\n\nuse crate::{\n\n common::*,\n\n device::Device,\n\n error::{ErrorChecker, Result as RsResult},\n\n};\n\n\n\n/// An iterable list of devices.\n\n#[derive(Debug)]\n\npub struct DeviceList {\n\n ptr: NonNull<realsense_sys::rs2_device_list>,\n\n}\n\n\n\nimpl DeviceList {\n\n /// Gets the device at given index.\n\n ///\n\n /// The method returns error if index is out of bound given by [DeviceList::len].\n\n pub fn get(&self, index: usize) -> RsResult<Device> {\n\n let device = unsafe {\n", "file_path": "src/device_list.rs", "rank": 46, "score": 21.917227394923227 }, { "content": "//! Defines the sensor type.\n\n\n\nuse crate::{\n\n common::*,\n\n device::Device,\n\n error::{ErrorChecker, Result as RsResult},\n\n kind::{CameraInfo, Extension, Rs2Option},\n\n options::ToOptions,\n\n processing_block_list::ProcessingBlockList,\n\n stream_profile_list::StreamProfileList,\n\n};\n\n\n\n/// Marker traits and types for [Sensor].\n\npub mod marker {\n\n use super::*;\n\n\n\n /// The marker traits of all kinds of sensor.\n", "file_path": "src/sensor.rs", "rank": 47, "score": 21.768054868743278 }, { "content": "//! Defines the iterable list of stream profiles.\n\n\n\nuse crate::{\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n stream_profile::{marker as stream_marker, StreamProfile},\n\n};\n\n\n\n/// An iterable list of streams.\n\n#[derive(Debug)]\n\npub struct StreamProfileList {\n\n ptr: NonNull<realsense_sys::rs2_stream_profile_list>,\n\n}\n\n\n\nimpl StreamProfileList {\n\n /// Gets the stream profile at given index.\n\n ///\n\n /// The method returns error if the index is out of bound given by [StreamProfileList::len].\n\n pub fn get(&mut self, index: usize) -> RsResult<StreamProfile<stream_marker::Any>> {\n\n let profile = unsafe {\n", "file_path": "src/stream_profile_list.rs", "rank": 48, "score": 21.71003153184544 }, { "content": "//! Defines the iterable list of processing blocks.\n\n\n\nuse crate::{\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n processing_block::{marker as processing_block_marker, ProcessingBlock},\n\n};\n\n\n\n/// The iterable list of [ProcessingBlock](ProcessingBlock)s.\n\n#[derive(Debug)]\n\npub struct ProcessingBlockList {\n\n ptr: NonNull<realsense_sys::rs2_processing_block_list>,\n\n}\n\n\n\nimpl ProcessingBlockList {\n\n /// Retrieves the [ProcessingBlock](ProcessingBlock) instance at index.\n\n pub fn get(&mut self, index: usize) -> RsResult<ProcessingBlock<processing_block_marker::Any>> {\n\n let block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_get_processing_block(\n", "file_path": "src/processing_block_list.rs", "rank": 49, "score": 20.743449783837963 }, { "content": "//! Defines the frame type including sensor data.\n\n\n\n#[cfg(feature = \"with-image\")]\n\nuse crate::base::Rs2Image;\n\nuse crate::{\n\n base::{PoseData, Resolution, StreamProfileData},\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n kind::{Extension, Format, FrameMetaDataValue, StreamKind, TimestampDomain},\n\n sensor::{marker as sensor_marker, Sensor},\n\n stream_profile::{marker as stream_marker, StreamProfile},\n\n};\n\n\n\n/// Marker types and traits for [Frame].\n\npub mod marker {\n\n use super::*;\n\n\n\n /// The marker trait for frame kinds.\n", "file_path": "src/frame.rs", "rank": 50, "score": 20.637821954092356 }, { "content": " }\n\n }\n\n\n\n /// Checks if the list is empty.\n\n pub fn is_empty(&mut self) -> RsResult<bool> {\n\n Ok(self.len()? == 0)\n\n }\n\n\n\n /// Converts to iterator type.\n\n pub fn try_into_iter(mut self) -> RsResult<ProcessingBlockListIntoIter> {\n\n let len = self.len()?;\n\n let ptr = unsafe { self.take() };\n\n let iter = ProcessingBlockListIntoIter { len, index: 0, ptr };\n\n Ok(iter)\n\n }\n\n\n\n pub(crate) unsafe fn take(self) -> NonNull<realsense_sys::rs2_processing_block_list> {\n\n let ptr = self.ptr;\n\n std::mem::forget(self);\n\n ptr\n", "file_path": "src/processing_block_list.rs", "rank": 51, "score": 20.437097007026217 }, { "content": " P: AsRef<Path>,\n\n {\n\n let cstring = CString::new(file.as_ref().as_os_str().as_bytes()).unwrap();\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n realsense_sys::rs2_context_add_device(\n\n self.ptr.as_ptr(),\n\n cstring.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub(crate) unsafe fn unsafe_clone(&self) -> Self {\n\n Self { ptr: self.ptr }\n\n }\n\n\n\n // /// Remove device file from context. (unimplemented)\n", "file_path": "src/context.rs", "rank": 53, "score": 20.331107957673183 }, { "content": " checker.check()?;\n\n Ok(len as usize)\n\n }\n\n }\n\n\n\n /// Checks if the profile list is empty.\n\n pub fn is_empty(&mut self) -> RsResult<bool> {\n\n Ok(self.len()? == 0)\n\n }\n\n\n\n /// Turns into iterable [StreamProfileListIntoIter] instance.\n\n pub fn try_into_iter(mut self) -> RsResult<StreamProfileListIntoIter> {\n\n let len = self.len()?;\n\n let ptr = unsafe { self.take() };\n\n let iter = StreamProfileListIntoIter {\n\n len,\n\n index: 0,\n\n ptr,\n\n fused: len == 0,\n\n };\n", "file_path": "src/stream_profile_list.rs", "rank": 54, "score": 20.149839849627188 }, { "content": " let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_sensor(\n\n self.ptr.as_ptr(),\n\n index as c_int,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Self::from_ptr(NonNull::new(ptr as *mut _).unwrap())\n\n };\n\n Ok(sensor)\n\n }\n\n\n\n /// Gets the number of sensors in list.\n\n pub fn len(&mut self) -> RsResult<usize> {\n\n let len = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let len =\n\n realsense_sys::rs2_get_sensors_count(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n len\n", "file_path": "src/sensor_list.rs", "rank": 55, "score": 20.02637980751939 }, { "content": " let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_device(\n\n self.ptr.as_ptr(),\n\n index as c_int,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Device::from_ptr(NonNull::new(ptr).unwrap())\n\n };\n\n Ok(device)\n\n }\n\n\n\n /// Gets the length of the list.\n\n pub fn len(&self) -> RsResult<usize> {\n\n let len = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let len =\n\n realsense_sys::rs2_get_device_count(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n len\n", "file_path": "src/device_list.rs", "rank": 56, "score": 19.89928311327434 }, { "content": " let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_config_enable_device(\n\n self.ptr.as_ptr(),\n\n serial.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n ptr\n\n };\n\n Ok(self)\n\n }\n\n\n\n /// Enable device from a file path.\n\n pub fn enable_device_from_file<P>(self, file: &CStr) -> RsResult<Self> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_config_enable_device_from_file(\n\n self.ptr.as_ptr(),\n\n file.as_ptr(),\n\n checker.inner_mut_ptr(),\n", "file_path": "src/config.rs", "rank": 57, "score": 19.42501160946089 }, { "content": " let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_get_stream_profile(\n\n self.ptr.as_ptr(),\n\n index as c_int,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n StreamProfile::from_parts(NonNull::new(ptr as *mut _).unwrap(), false)\n\n };\n\n Ok(profile)\n\n }\n\n\n\n /// Gets the length of list.\n\n pub fn len(&mut self) -> RsResult<usize> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let len = realsense_sys::rs2_get_stream_profiles_count(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n", "file_path": "src/stream_profile_list.rs", "rank": 58, "score": 19.023896757498388 }, { "content": " checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n DeviceList::from_ptr(NonNull::new(list).unwrap())\n\n },\n\n None => unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let list =\n\n realsense_sys::rs2_query_devices(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n DeviceList::from_ptr(NonNull::new(list).unwrap())\n\n },\n\n };\n\n\n\n Ok(list)\n\n }\n\n\n\n /// Add device file to context.\n\n pub fn add_device<P>(&mut self, file: P) -> RsResult<()>\n\n where\n", "file_path": "src/context.rs", "rank": 59, "score": 18.992720306174746 }, { "content": " /// The method throws error if index is out of bound given by [Frame::len].\n\n pub fn get(&self, index: usize) -> RsResult<Option<Frame<marker::Any>>> {\n\n let len = self.len()?;\n\n if index >= len {\n\n return Ok(None);\n\n }\n\n\n\n let frame = unsafe {\n\n // extract frame\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_extract_frame(\n\n self.ptr.as_ptr(),\n\n index as c_int,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n\n\n Frame::from_ptr(NonNull::new(ptr).unwrap())\n\n };\n\n Ok(Some(frame))\n", "file_path": "src/frame.rs", "rank": 60, "score": 18.88649502685722 }, { "content": "\n\n pub fn is_info_supported(&self, kind: CameraInfo) -> RsResult<bool> {\n\n let val = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_supports_sensor_info(\n\n self.ptr.as_ptr(),\n\n kind as realsense_sys::rs2_camera_info,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n val\n\n };\n\n Ok(val != 0)\n\n }\n\n\n\n pub(crate) unsafe fn take(self) -> NonNull<realsense_sys::rs2_sensor> {\n\n let ptr = self.ptr;\n\n std::mem::forget(self);\n\n ptr\n\n }\n", "file_path": "src/sensor.rs", "rank": 61, "score": 18.721994657434596 }, { "content": "\n\n pub(crate) unsafe fn from_ptr(ptr: NonNull<realsense_sys::rs2_sensor>) -> Self {\n\n Self {\n\n ptr,\n\n _phantom: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl Sensor<marker::Any> {\n\n pub fn is_extendable_to<Kind>(&self) -> RsResult<bool>\n\n where\n\n Kind: marker::NonAnySensorKind,\n\n {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_is_sensor_extendable_to(\n\n self.ptr.as_ptr(),\n\n Kind::EXTENSION as realsense_sys::rs2_extension,\n\n checker.inner_mut_ptr(),\n", "file_path": "src/sensor.rs", "rank": 62, "score": 18.48120837014998 }, { "content": "//! Defines the queue type of frames.\n\n\n\nuse crate::{\n\n base::DEFAULT_TIMEOUT,\n\n common::*,\n\n error::{Error as RsError, ErrorChecker, Result as RsResult},\n\n frame::{\n\n marker::{Any, FrameKind},\n\n Frame, GenericFrame,\n\n },\n\n};\n\n\n\n/// The queue of frames.\n\n#[derive(Debug)]\n\npub struct FrameQueue {\n\n pub(crate) ptr: NonNull<realsense_sys::rs2_frame_queue>,\n\n}\n\n\n\nimpl FrameQueue {\n\n /// Creates an instance with given capacity.\n", "file_path": "src/frame_queue.rs", "rank": 63, "score": 18.216917871658488 }, { "content": " self.ptr.as_ptr(),\n\n index as c_int,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n ProcessingBlock::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(block)\n\n }\n\n\n\n /// Returns the length of list.\n\n pub fn len(&mut self) -> RsResult<usize> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_get_recommended_processing_blocks_count(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val as usize)\n", "file_path": "src/processing_block_list.rs", "rank": 64, "score": 18.12916660314189 }, { "content": " };\n\n Ok(len as usize)\n\n }\n\n\n\n /// Checks if the list is empty.\n\n pub fn is_empty(&mut self) -> RsResult<bool> {\n\n Ok(self.len()? == 0)\n\n }\n\n\n\n /// Turns into [SensorListIntoIter] iterable type.\n\n pub fn try_into_iter(mut self) -> RsResult<SensorListIntoIter> {\n\n let len = self.len()?;\n\n let ptr = unsafe { self.take() };\n\n let iter = SensorListIntoIter {\n\n len,\n\n index: 0,\n\n ptr,\n\n fused: len == 0,\n\n };\n\n Ok(iter)\n", "file_path": "src/sensor_list.rs", "rank": 66, "score": 18.025169538143324 }, { "content": "\n\n/// The mod collects common used traits from this crate.\n\npub mod prelude {\n\n pub use crate::frame::{DepthFrame, DisparityFrame, GenericFrame, VideoFrame};\n\n}\n\n\n\n#[cfg(feature = \"with-image\")]\n\npub use base::Rs2Image;\n\npub use base::{Extrinsics, Intrinsics, MotionIntrinsics, PoseData, Resolution, StreamProfileData};\n\npub use config::Config;\n\npub use context::Context;\n\npub use device::Device;\n\npub use device_hub::DeviceHub;\n\npub use device_list::{DeviceList, DeviceListIntoIter};\n\npub use error::{Error, Result};\n\npub use frame::{\n\n CompositeFrameIntoIter, DepthFrame, DisparityFrame, ExtendedFrame, Frame, GenericFrame,\n\n VideoFrame,\n\n};\n\npub use frame_queue::FrameQueue;\n", "file_path": "src/lib.rs", "rank": 67, "score": 17.6872527708836 }, { "content": " Ok(block)\n\n }\n\n\n\n pub(crate) unsafe fn new_from_ptr(\n\n ptr: NonNull<realsense_sys::rs2_processing_block>,\n\n ) -> RsResult<Self> {\n\n Self::new_from_ptr_and_capacity(ptr, 1)\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::Any> {\n\n pub fn is_extendable_to<Kind>(&self) -> RsResult<bool>\n\n where\n\n Kind: marker::ExtendableProcessingBlockKind,\n\n {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_is_processing_block_extendable_to(\n\n self.ptr.as_ptr(),\n\n Kind::EXTENSION as realsense_sys::rs2_extension,\n", "file_path": "src/processing_block.rs", "rank": 68, "score": 17.428943038498264 }, { "content": " };\n\n Ok(processing_block)\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::Colorizer> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_colorizer(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n\n\n pub fn with_options(color_scheme: ColorScheme) -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_colorizer(checker.inner_mut_ptr());\n", "file_path": "src/processing_block.rs", "rank": 69, "score": 17.417082666300438 }, { "content": "//! Defines the processing block type.\n\n\n\nuse crate::{\n\n base::StreamProfileData,\n\n common::*,\n\n error::{ErrorChecker, Result as RsResult},\n\n frame::{marker as frame_marker, ExtendedFrame, Frame, GenericFrame},\n\n frame_queue::FrameQueue,\n\n kind::{ColorScheme, Extension, HoleFillingMode, PersistenceControl, Rs2Option, StreamKind},\n\n options::ToOptions,\n\n};\n\n\n\npub mod marker {\n\n use super::*;\n\n\n", "file_path": "src/processing_block.rs", "rank": 70, "score": 17.353503124892047 }, { "content": " };\n\n Ok(len as usize)\n\n }\n\n\n\n /// Turns into [DeviceListIntoIter] instance that implements [IntoIterator] trait.\n\n pub fn try_into_iter(self) -> RsResult<DeviceListIntoIter> {\n\n let len = self.len()?;\n\n let ptr = unsafe { self.take() };\n\n let iter = DeviceListIntoIter {\n\n index: 0,\n\n len,\n\n ptr,\n\n fused: len == 0,\n\n };\n\n Ok(iter)\n\n }\n\n\n\n /// Checks if the device list is empty.\n\n pub fn is_empty(&self) -> RsResult<bool> {\n\n Ok(self.len()? == 0)\n", "file_path": "src/device_list.rs", "rank": 71, "score": 17.283661517290074 }, { "content": " }\n\n\n\n pub fn count(&self) -> RsResult<&CStr> {\n\n self.info(CameraInfo::Count)\n\n }\n\n\n\n pub fn info(&self, kind: CameraInfo) -> RsResult<&CStr> {\n\n let ptr = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_get_sensor_info(\n\n self.ptr.as_ptr(),\n\n kind as realsense_sys::rs2_camera_info,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n ptr\n\n };\n\n let string = unsafe { CStr::from_ptr(ptr) };\n\n Ok(string)\n\n }\n", "file_path": "src/sensor.rs", "rank": 72, "score": 17.199689447272174 }, { "content": " );\n\n let tcs =\n\n safe_transmute::transmute_many::<TextureCoordinate, PedanticGuard>(bytes).unwrap();\n\n debug_assert_eq!(tcs.len(), n_points);\n\n Ok(tcs)\n\n }\n\n }\n\n\n\n /// Gets number of points in frame.\n\n pub fn points_count(&self) -> RsResult<usize> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_get_frame_points_count(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val as usize)\n\n }\n\n }\n", "file_path": "src/frame.rs", "rank": 73, "score": 17.10812725433125 }, { "content": "\n\n Ok(ExtendedProcessingBlock::Other(frame_any))\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::ThresholdFilter> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_threshold(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n\n\n pub fn with_options(min_dist: Option<f32>, max_dist: Option<f32>) -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_threshold(checker.inner_mut_ptr());\n", "file_path": "src/processing_block.rs", "rank": 74, "score": 16.924966727641475 }, { "content": " let ptr = realsense_sys::rs2_query_sensors(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n SensorList::from_ptr(NonNull::new(ptr).unwrap())\n\n };\n\n Ok(list)\n\n }\n\n\n\n pub fn hardware_reset(&self) -> RsResult<()> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n realsense_sys::rs2_hardware_reset(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn name(&self) -> RsResult<Option<&str>> {\n\n self.info(CameraInfo::Name)\n\n }\n\n\n", "file_path": "src/device.rs", "rank": 75, "score": 16.87076719010233 }, { "content": "}\n\n\n\nimpl ProcessingBlock<marker::DecimationFilter> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_decimation_filter_block(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n\n\n pub fn with_options(magnitude: f32) -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_decimation_filter_block(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n", "file_path": "src/processing_block.rs", "rank": 76, "score": 16.85415982587299 }, { "content": "impl Frame<marker::Any> {\n\n pub fn is_extendable_to<Kind>(&self) -> RsResult<bool>\n\n where\n\n Kind: marker::NonAnyFrameKind,\n\n {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_is_frame_extendable_to(\n\n self.ptr.as_ptr(),\n\n Kind::EXTENSION as realsense_sys::rs2_extension,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val != 0)\n\n }\n\n }\n\n\n\n pub fn try_extend_to<Kind>(self) -> RsResult<Result<Frame<Kind>, Self>>\n\n where\n\n Kind: marker::NonAnyFrameKind,\n", "file_path": "src/frame.rs", "rank": 77, "score": 16.74732218492432 }, { "content": " // pub fn remove_device<P>(&mut self, file: P) -> RsResult<()>\n\n // where\n\n // P: AsRef<Path>,\n\n // {\n\n // todo!();\n\n // }\n\n}\n\n\n\nimpl Drop for Context {\n\n fn drop(&mut self) {\n\n unsafe { realsense_sys::rs2_delete_context(self.ptr.as_ptr()) }\n\n }\n\n}\n\n\n\nunsafe impl Send for Context {}\n", "file_path": "src/context.rs", "rank": 79, "score": 16.597666531006162 }, { "content": " /// It will return error if the attribute is not available on sensor.\n\n pub fn get_option(&self, option: Rs2Option) -> RsResult<f32> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_get_option(\n\n self.ptr.as_ptr().cast::<realsense_sys::rs2_options>(),\n\n option as realsense_sys::rs2_option,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val)\n\n }\n\n }\n\n\n\n // pub fn set_option(&mut self, option: Rs2Option, value: f32) -> RsResult<()> {\n\n // unsafe {\n\n // let mut checker = ErrorChecker::new();\n\n // let val = realsense_sys::rs2_set_option(\n\n // self.ptr.as_ptr().cast::<realsense_sys::rs2_options>(),\n\n // option as realsense_sys::rs2_option,\n", "file_path": "src/sensor.rs", "rank": 80, "score": 16.559494352981467 }, { "content": " );\n\n checker.check()?;\n\n ptr\n\n };\n\n Ok(self)\n\n }\n\n\n\n pub(crate) unsafe fn unsafe_clone(&self) -> Self {\n\n Self { ptr: self.ptr }\n\n }\n\n}\n\n\n\nimpl Drop for Config {\n\n fn drop(&mut self) {\n\n unsafe {\n\n realsense_sys::rs2_delete_config(self.ptr.as_ptr());\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Send for Config {}\n", "file_path": "src/config.rs", "rank": 81, "score": 16.515865569668794 }, { "content": "impl ProcessingBlock<marker::HuffmanDepthDecompress> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr =\n\n realsense_sys::rs2_create_huffman_depth_decompress_block(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::RatesPrinter> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_rates_printer_block(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n", "file_path": "src/processing_block.rs", "rank": 82, "score": 16.470919094368757 }, { "content": " self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n ptr\n\n };\n\n\n\n let device = unsafe { Device::from_ptr(NonNull::new(ptr).unwrap()) };\n\n Ok(device)\n\n }\n\n\n\n /// Gets iterable list of streams of pipeline.\n\n pub fn streams(&self) -> RsResult<StreamProfileList> {\n\n let ptr = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_pipeline_profile_get_streams(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n", "file_path": "src/pipeline_profile.rs", "rank": 83, "score": 16.368726057396152 }, { "content": " /// This method consumes the pipeline instance and returns pipeline markered inactive.\n\n pub fn stop(self) -> RsResult<Pipeline<marker::Inactive>> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n realsense_sys::rs2_pipeline_stop(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n }\n\n\n\n let pipeline = unsafe {\n\n let (ptr, context, _) = self.take();\n\n Pipeline {\n\n ptr,\n\n context,\n\n state: marker::Inactive,\n\n }\n\n };\n\n\n\n Ok(pipeline)\n\n }\n\n}\n", "file_path": "src/pipeline.rs", "rank": 84, "score": 16.333082852186987 }, { "content": " pub fn firmware_update_id(&self) -> RsResult<Option<&str>> {\n\n self.info(CameraInfo::FirmwareUpdateId)\n\n }\n\n\n\n pub fn count(&self) -> RsResult<Option<&str>> {\n\n self.info(CameraInfo::Count)\n\n }\n\n\n\n pub fn info(&self, kind: CameraInfo) -> RsResult<Option<&str>> {\n\n if !self.is_info_supported(kind)? {\n\n return Ok(None);\n\n }\n\n\n\n let ptr = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_get_device_info(\n\n self.ptr.as_ptr(),\n\n kind as realsense_sys::rs2_camera_info,\n\n checker.inner_mut_ptr(),\n\n );\n", "file_path": "src/device.rs", "rank": 85, "score": 16.16404470111525 }, { "content": " /// Retrieves list of recommended processing blocks.\n\n pub fn recommended_processing_blocks(&self) -> RsResult<ProcessingBlockList> {\n\n let list = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_get_recommended_processing_blocks(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n ProcessingBlockList::from_ptr(NonNull::new(ptr).unwrap())\n\n };\n\n Ok(list)\n\n }\n\n\n\n pub fn name(&self) -> RsResult<&CStr> {\n\n self.info(CameraInfo::Name)\n\n }\n\n\n\n pub fn serial_number(&self) -> RsResult<&CStr> {\n\n self.info(CameraInfo::SerialNumber)\n", "file_path": "src/sensor.rs", "rank": 86, "score": 16.06514705561071 }, { "content": " pub fn with_capacity(capacity: usize) -> RsResult<Self> {\n\n let queue = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr =\n\n realsense_sys::rs2_create_frame_queue(capacity as c_int, checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::from_ptr(NonNull::new(ptr).unwrap())\n\n };\n\n Ok(queue)\n\n }\n\n\n\n /// Push a frame to the queue.\n\n pub fn enqueue<Kind>(&mut self, frame: Frame<Kind>)\n\n where\n\n Kind: FrameKind,\n\n {\n\n unsafe {\n\n realsense_sys::rs2_enqueue_frame(\n\n frame.ptr.as_ptr(),\n\n self.ptr.cast::<c_void>().as_ptr(),\n", "file_path": "src/frame_queue.rs", "rank": 87, "score": 16.012643351871002 }, { "content": "//! Common types and functions.\n\n\n\nuse crate::common::*;\n\n\n\npub const DEFAULT_TIMEOUT: Duration =\n\n Duration::from_millis(realsense_sys::RS2_DEFAULT_TIMEOUT as u64);\n\n\n\n/// The intrinsic parameters for motion devices.\n\npub struct MotionIntrinsics(pub realsense_sys::rs2_motion_device_intrinsic);\n\n\n\nimpl Deref for MotionIntrinsics {\n\n type Target = realsense_sys::rs2_motion_device_intrinsic;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl DerefMut for MotionIntrinsics {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n", "file_path": "src/base.rs", "rank": 88, "score": 15.883169603524792 }, { "content": " checker.check()?;\n\n ptr\n\n };\n\n\n\n // TODO: deallicate this CStr?\n\n let string = unsafe { CStr::from_ptr(ptr).to_str().unwrap() };\n\n Ok(Some(string))\n\n }\n\n\n\n pub fn is_info_supported(&self, kind: CameraInfo) -> RsResult<bool> {\n\n let val = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_supports_device_info(\n\n self.ptr.as_ptr(),\n\n kind as realsense_sys::rs2_camera_info,\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n val\n\n };\n", "file_path": "src/device.rs", "rank": 89, "score": 15.6872374137107 }, { "content": "\n\n pub fn color_frame(&self) -> RsResult<Option<Frame<marker::Video>>> {\n\n self.first_of::<marker::Video>(StreamKind::Color)\n\n }\n\n\n\n pub fn depth_frame(&self) -> RsResult<Option<Frame<marker::Depth>>> {\n\n self.first_of::<marker::Depth>(StreamKind::Depth)\n\n }\n\n\n\n pub fn pose_frame(&self) -> RsResult<Option<Frame<marker::Pose>>> {\n\n self.first_of::<marker::Pose>(StreamKind::Pose)\n\n }\n\n}\n\n\n\nimpl Frame<marker::Pose> {\n\n /// Gets the pose data.\n\n pub fn pose(&self) -> RsResult<PoseData> {\n\n let pose_data = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let mut pose_data = MaybeUninit::uninit();\n", "file_path": "src/frame.rs", "rank": 90, "score": 15.633358367808315 }, { "content": " // value,\n\n // checker.inner_mut_ptr(),\n\n // );\n\n // checker.check()?;\n\n // }\n\n // Ok(())\n\n // }\n\n\n\n /// List stream profiles on sensor.\n\n pub fn stream_profiles(&self) -> RsResult<StreamProfileList> {\n\n let list = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr =\n\n realsense_sys::rs2_get_stream_profiles(self.ptr.as_ptr(), checker.inner_mut_ptr());\n\n checker.check()?;\n\n StreamProfileList::from_ptr(NonNull::new(ptr).unwrap())\n\n };\n\n Ok(list)\n\n }\n\n\n", "file_path": "src/sensor.rs", "rank": 91, "score": 15.543155960873543 }, { "content": " let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_is_stream_profile_default(\n\n self.ptr.as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val != 0)\n\n }\n\n }\n\n\n\n /// Gets the attributes of stream.\n\n pub fn get_data(&self) -> RsResult<StreamProfileData> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let mut stream = MaybeUninit::uninit();\n\n let mut format = MaybeUninit::uninit();\n\n let mut index = MaybeUninit::uninit();\n\n let mut unique_id = MaybeUninit::uninit();\n\n let mut framerate = MaybeUninit::uninit();\n\n\n", "file_path": "src/stream_profile.rs", "rank": 92, "score": 15.51983939515084 }, { "content": " Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::UnitsTransform> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_units_transform(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::Syncer> {\n\n pub fn create() -> RsResult<Self> {\n", "file_path": "src/processing_block.rs", "rank": 93, "score": 15.494699096404094 }, { "content": " );\n\n }\n\n }\n\n\n\n /// Pops a frame from queue.\n\n ///\n\n /// The method blocks until a frame is available.\n\n pub fn wait(&mut self, timeout: Option<Duration>) -> RsResult<Frame<Any>> {\n\n let timeout_ms = timeout.unwrap_or(DEFAULT_TIMEOUT).as_millis() as c_uint;\n\n\n\n let frame = loop {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = unsafe {\n\n realsense_sys::rs2_wait_for_frame(\n\n self.ptr.as_ptr(),\n\n timeout_ms,\n\n checker.inner_mut_ptr(),\n\n )\n\n };\n\n\n", "file_path": "src/frame_queue.rs", "rank": 94, "score": 15.483832454731767 }, { "content": " Ok(ExtendedStreamProfile::Other(profile_any))\n\n }\n\n}\n\n\n\nimpl StreamProfile<marker::Video> {\n\n /// Gets the resolution of stream.\n\n pub fn resolution(&self) -> RsResult<Resolution> {\n\n let mut width = MaybeUninit::uninit();\n\n let mut height = MaybeUninit::uninit();\n\n let resolution = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n realsense_sys::rs2_get_video_stream_resolution(\n\n self.ptr.as_ptr(),\n\n width.as_mut_ptr(),\n\n height.as_mut_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n\n\n Resolution {\n", "file_path": "src/stream_profile.rs", "rank": 95, "score": 15.468320983320346 }, { "content": " }\n\n\n\n /// Gets the timestamp.\n\n fn timestamp(&self) -> RsResult<f64> {\n\n unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_get_frame_timestamp(\n\n self.ptr().as_ptr(),\n\n checker.inner_mut_ptr(),\n\n );\n\n checker.check()?;\n\n Ok(val as f64)\n\n }\n\n }\n\n\n\n /// Gets the domain of timestamp.\n\n fn timestamp_domain(&self) -> RsResult<TimestampDomain> {\n\n let val = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let val = realsense_sys::rs2_get_frame_timestamp_domain(\n", "file_path": "src/frame.rs", "rank": 97, "score": 15.404326326571628 }, { "content": " );\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n}\n\n\n\nimpl ProcessingBlock<marker::PointCloud> {\n\n pub fn create() -> RsResult<Self> {\n\n let processing_block = unsafe {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = realsense_sys::rs2_create_pointcloud(checker.inner_mut_ptr());\n\n checker.check()?;\n\n Self::new_from_ptr(NonNull::new(ptr).unwrap())?\n\n };\n\n Ok(processing_block)\n\n }\n\n\n\n pub fn calculate(\n", "file_path": "src/processing_block.rs", "rank": 98, "score": 15.348979704549732 }, { "content": "{\n\n ptr: NonNull<realsense_sys::rs2_pipeline>,\n\n context: Context,\n\n state: State,\n\n}\n\n\n\nimpl Pipeline<marker::Inactive> {\n\n /// Creates an instance.\n\n pub fn new() -> RsResult<Self> {\n\n let context = Context::new()?;\n\n let pipeline = Self::from_context(context)?;\n\n Ok(pipeline)\n\n }\n\n\n\n /// Consumes a context and creates an instance.\n\n pub fn from_context(context: Context) -> RsResult<Self> {\n\n let ptr = {\n\n let mut checker = ErrorChecker::new();\n\n let ptr = unsafe {\n\n realsense_sys::rs2_create_pipeline(context.ptr.as_ptr(), checker.inner_mut_ptr())\n", "file_path": "src/pipeline.rs", "rank": 99, "score": 15.29573660388652 } ]
Rust
src/main.rs
mbaumfalk/scheme
a00669a59d127d24b7bab87c5abe369f7a2a8ddf
extern crate nom; use nom::{ branch::alt, bytes::streaming::{tag, take_until, take_while1}, character::streaming::{ anychar, char, digit1, hex_digit1, line_ending, multispace0, none_of, oct_digit1, }, combinator::opt, error::{Error, ErrorKind::Char}, multi::{many0, many1}, sequence::{delimited, preceded, terminated}, Err::{self, Incomplete}, IResult, }; use std::{ fmt, io::{self, BufRead, Write}, ops::Neg, }; #[derive(Debug)] enum LispData { Nil, Bool(bool), Num(i64), Symbol(String), LispString(String), Vector(Vec<LispData>), Cons(Box<LispData>, Box<LispData>), } use LispData::*; fn write_cdr(data: &LispData, f: &mut fmt::Formatter<'_>) -> fmt::Result { match data { Nil => Ok(()), Cons(a, b) => { write!(f, " {}", a)?; write_cdr(b, f) } _ => write!(f, " . {}", data), } } impl fmt::Display for LispData { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Nil => write!(f, "()"), Bool(b) => write!(f, "#{}", if *b { 't' } else { 'f' }), Num(n) => n.fmt(f), Symbol(s) => s.fmt(f), LispString(s) => write!(f, "{:?}", s), Vector(v) => { write!(f, "#(")?; if let Some(val) = v.get(0) { val.fmt(f)?; } for val in v.iter().skip(1) { write!(f, " {}", val)?; } write!(f, ")") } Cons(a, b) => { write!(f, "({}", a)?; write_cdr(&b, f)?; write!(f, ")") } } } } fn lisptoken(input: &str) -> IResult<&str, char> { none_of("'()# \"\r\n")(input) } fn cons(input: &str) -> IResult<&str, LispData> { let (input, _) = char('(')(input)?; let (input, _) = multispace0(input)?; let (input, middle) = many0(terminated(lisp_data, multispace0))(input)?; let (input, dot) = opt(preceded( terminated(char('.'), multispace0), terminated(lisp_data, multispace0), ))(input)?; let (input, _) = char(')')(input)?; Ok(( input, middle .into_iter() .rev() .fold(dot.unwrap_or(Nil), |a, b| Cons(Box::new(b), Box::new(a))), )) } fn quote(input: &str) -> IResult<&str, LispData> { let (input, _) = char('\'')(input)?; let (input, data) = lisp_data(input)?; Ok(( input, Cons( Box::new(Symbol("quote".to_string())), Box::new(Cons(Box::new(data), Box::new(Nil))), ), )) } fn symbol(input: &str) -> IResult<&str, LispData> { let (input, a) = many1(lisptoken)(input)?; let b: String = a.into_iter().collect(); match b.as_str() { "." => Err(Err::Error(Error::new("dot", Char))), _ => Ok((input, Symbol(b))), } } fn parse_num<'a>( f: fn(&'a str) -> IResult<&'a str, &'a str>, input: &'a str, radix: u32, ) -> IResult<&'a str, LispData> { let (input, negate) = opt(char('-'))(input)?; let (input, data) = f(input)?; match i64::from_str_radix(data, radix) { Ok(n) => Ok((input, Num(if negate.is_some() { n.neg() } else { n }))), Err(_) => Err(Err::Error(Error::new("parseint", Char))), } } fn num(input: &str) -> IResult<&str, LispData> { parse_num(digit1, input, 10) } fn bin_digit1(input: &str) -> IResult<&str, &str> { take_while1(|a| a == '0' || a == '1')(input) } fn sharp(input: &str) -> IResult<&str, LispData> { let (input, _) = char('#')(input)?; let (input, c) = anychar(input)?; match c.to_lowercase().next().unwrap() { 'f' => Ok((input, Bool(false))), 't' => Ok((input, Bool(true))), 'b' => parse_num(bin_digit1, input, 2), 'o' => parse_num(oct_digit1, input, 8), 'd' => num(input), 'x' => parse_num(hex_digit1, input, 16), '(' => { let (input, _) = multispace0(input)?; let (input, vals) = many0(terminated(lisp_data, multispace0))(input)?; let (input, _) = char(')')(input)?; Ok((input, Vector(vals))) } _ => Err(Err::Error(Error::new("#", Char))), } } fn block_comment(input: &str) -> IResult<&str, ()> { let (input, _) = delimited(tag("#|"), take_until("|#"), tag("|#"))(input)?; Ok((input, ())) } fn line_comment(input: &str) -> IResult<&str, ()> { let (input, _) = delimited(char(';'), take_until("\n"), line_ending)(input)?; Ok((input, ())) } fn datum_comment(input: &str) -> IResult<&str, ()> { let (input, _) = preceded(tag("#;"), lisp_data)(input)?; Ok((input, ())) } fn comment(input: &str) -> IResult<&str, LispData> { let (input, _) = alt((block_comment, line_comment, datum_comment))(input)?; lisp_data(input) } fn escaped_char(input: &str) -> IResult<&str, char> { let (input, _) = char('\\')(input)?; let (input, seq) = anychar(input)?; let c = match seq { 'a' => '\x07', 'b' => '\x08', 'n' => '\n', 'r' => '\r', 't' => '\t', '"' => '\"', '\\' => '\\', '|' => '|', _ => return Err(Err::Error(Error::new("escape", Char))), }; Ok((input, c)) } fn string(input: &str) -> IResult<&str, LispData> { let (input, data) = delimited( char('"'), many0(alt((none_of("\\\""), escaped_char))), char('"'), )(input)?; Ok((input, LispString(data.into_iter().collect()))) } fn lisp_data(input: &str) -> IResult<&str, LispData> { let (input, _) = multispace0(input)?; alt((quote, cons, comment, sharp, num, string, symbol))(input) } fn main() -> io::Result<()> { let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut buffer = String::new(); let mut stdout = io::stdout(); loop { if buffer.is_empty() { print!("> "); stdout.flush()?; } match lisp_data(&buffer) { Ok((rest, val)) => { buffer = rest.to_string(); println!("{}", val); print!("> "); stdout.flush()?; } Err(Incomplete(_)) => { if stdin.read_line(&mut buffer)? == 0 { println!(""); return Ok(()); } } err => { println!("{:?}", err); buffer.clear() } } } }
extern crate nom; use nom::{ branch::alt, bytes::streaming::{tag, take_until, take_while1}, character::streaming::{ anychar, char, digit1, hex_digit1, line_ending, multispace0, none_of, oct_digit1, }, combinator::opt, error::{Error, ErrorKind::Char}, multi::{many0, many1}, sequence::{delimited, preceded, terminated}, Err::{self, Incomplete}, IResult, }; use std::{ fmt, io::{self, BufRead, Write}, ops::Neg, }; #[derive(Debug)] enum LispData { Nil, Bool(bool), Num(i64), Symbol(String), LispString(String), Vector(Vec<LispData>), Cons(Box<LispData>, Box<LispData>), } use LispData::*; fn write_cdr(data: &LispData, f: &mut fmt::Formatter<'_>) -> fmt::Result { match data { Nil => Ok(()), Cons(a, b) => { write!(f, " {}", a)?; write_cdr(b, f) } _ => write!(f, " . {}", data), } } impl fmt::Display for LispData { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Nil => write!(f, "()"), Bool(b) => write!(f, "#{}", if *b { 't' } else { 'f' }), Num(n) => n.fmt(f), Symbol(s) => s.fmt(f), LispString(s) => write!(f, "{:?}", s), Vector(v) => { write!(f, "#(")?; if let Some(val) = v.get(0) { val.fmt(f)?; } for val in v.iter().skip(1) { write!(f, " {}", val)?; } write!(f, ")") } Cons(
} fn lisptoken(input: &str) -> IResult<&str, char> { none_of("'()# \"\r\n")(input) } fn cons(input: &str) -> IResult<&str, LispData> { let (input, _) = char('(')(input)?; let (input, _) = multispace0(input)?; let (input, middle) = many0(terminated(lisp_data, multispace0))(input)?; let (input, dot) = opt(preceded( terminated(char('.'), multispace0), terminated(lisp_data, multispace0), ))(input)?; let (input, _) = char(')')(input)?; Ok(( input, middle .into_iter() .rev() .fold(dot.unwrap_or(Nil), |a, b| Cons(Box::new(b), Box::new(a))), )) } fn quote(input: &str) -> IResult<&str, LispData> { let (input, _) = char('\'')(input)?; let (input, data) = lisp_data(input)?; Ok(( input, Cons( Box::new(Symbol("quote".to_string())), Box::new(Cons(Box::new(data), Box::new(Nil))), ), )) } fn symbol(input: &str) -> IResult<&str, LispData> { let (input, a) = many1(lisptoken)(input)?; let b: String = a.into_iter().collect(); match b.as_str() { "." => Err(Err::Error(Error::new("dot", Char))), _ => Ok((input, Symbol(b))), } } fn parse_num<'a>( f: fn(&'a str) -> IResult<&'a str, &'a str>, input: &'a str, radix: u32, ) -> IResult<&'a str, LispData> { let (input, negate) = opt(char('-'))(input)?; let (input, data) = f(input)?; match i64::from_str_radix(data, radix) { Ok(n) => Ok((input, Num(if negate.is_some() { n.neg() } else { n }))), Err(_) => Err(Err::Error(Error::new("parseint", Char))), } } fn num(input: &str) -> IResult<&str, LispData> { parse_num(digit1, input, 10) } fn bin_digit1(input: &str) -> IResult<&str, &str> { take_while1(|a| a == '0' || a == '1')(input) } fn sharp(input: &str) -> IResult<&str, LispData> { let (input, _) = char('#')(input)?; let (input, c) = anychar(input)?; match c.to_lowercase().next().unwrap() { 'f' => Ok((input, Bool(false))), 't' => Ok((input, Bool(true))), 'b' => parse_num(bin_digit1, input, 2), 'o' => parse_num(oct_digit1, input, 8), 'd' => num(input), 'x' => parse_num(hex_digit1, input, 16), '(' => { let (input, _) = multispace0(input)?; let (input, vals) = many0(terminated(lisp_data, multispace0))(input)?; let (input, _) = char(')')(input)?; Ok((input, Vector(vals))) } _ => Err(Err::Error(Error::new("#", Char))), } } fn block_comment(input: &str) -> IResult<&str, ()> { let (input, _) = delimited(tag("#|"), take_until("|#"), tag("|#"))(input)?; Ok((input, ())) } fn line_comment(input: &str) -> IResult<&str, ()> { let (input, _) = delimited(char(';'), take_until("\n"), line_ending)(input)?; Ok((input, ())) } fn datum_comment(input: &str) -> IResult<&str, ()> { let (input, _) = preceded(tag("#;"), lisp_data)(input)?; Ok((input, ())) } fn comment(input: &str) -> IResult<&str, LispData> { let (input, _) = alt((block_comment, line_comment, datum_comment))(input)?; lisp_data(input) } fn escaped_char(input: &str) -> IResult<&str, char> { let (input, _) = char('\\')(input)?; let (input, seq) = anychar(input)?; let c = match seq { 'a' => '\x07', 'b' => '\x08', 'n' => '\n', 'r' => '\r', 't' => '\t', '"' => '\"', '\\' => '\\', '|' => '|', _ => return Err(Err::Error(Error::new("escape", Char))), }; Ok((input, c)) } fn string(input: &str) -> IResult<&str, LispData> { let (input, data) = delimited( char('"'), many0(alt((none_of("\\\""), escaped_char))), char('"'), )(input)?; Ok((input, LispString(data.into_iter().collect()))) } fn lisp_data(input: &str) -> IResult<&str, LispData> { let (input, _) = multispace0(input)?; alt((quote, cons, comment, sharp, num, string, symbol))(input) } fn main() -> io::Result<()> { let stdin = io::stdin(); let mut stdin = stdin.lock(); let mut buffer = String::new(); let mut stdout = io::stdout(); loop { if buffer.is_empty() { print!("> "); stdout.flush()?; } match lisp_data(&buffer) { Ok((rest, val)) => { buffer = rest.to_string(); println!("{}", val); print!("> "); stdout.flush()?; } Err(Incomplete(_)) => { if stdin.read_line(&mut buffer)? == 0 { println!(""); return Ok(()); } } err => { println!("{:?}", err); buffer.clear() } } } }
a, b) => { write!(f, "({}", a)?; write_cdr(&b, f)?; write!(f, ")") } } }
function_block-function_prefixed
[]
Rust
day-12/src/main.rs
kecors/AoC-2018
dd213e9087639ace4788c94daa4a85fedc635162
use pom::parser::*; use pom::Error; use std::collections::HashMap; use std::collections::VecDeque; use std::io::{stdin, Read}; #[derive(Debug)] struct Pot { number: i32, plant: bool, } #[derive(Debug)] struct Note { neighbors: Vec<bool>, next_generation: bool, } #[derive(Debug)] struct Engine { pots: VecDeque<Pot>, note_hm: HashMap<Vec<bool>, bool>, } impl Engine { fn new(initial_state: Vec<bool>, notes: Vec<Note>) -> Engine { let mut pots = VecDeque::new(); for (n, plant) in initial_state.into_iter().enumerate() { pots.push_back(Pot { number: n as i32, plant, }); } let mut note_hm = HashMap::new(); for note in notes { note_hm.insert(note.neighbors, note.next_generation); } Engine { pots, note_hm } } fn next_generation(&mut self) { let front_number = if let Some(pot) = self.pots.front() { pot.number } else { unreachable!("Impossible if any plants remain"); }; let back_number = if let Some(pot) = self.pots.back() { pot.number } else { unreachable!("Impossible if any plants remain"); }; for x in 1..=4 { self.pots.push_front(Pot { number: front_number - x, plant: false, }); self.pots.push_back(Pot { number: back_number + x, plant: false, }); } let mut new_pots = VecDeque::new(); for j in 0..self.pots.len() - 4 { let mut neighbor_key = Vec::new(); for k in 0..5 { neighbor_key.push(self.pots[j + k].plant); } let plant = if let Some(plant) = self.note_hm.get(&neighbor_key) { *plant } else { false }; new_pots.push_back(Pot { number: self.pots[j + 2].number, plant, }); } let pot_to_restore = loop { if let Some(pot) = new_pots.pop_front() { if pot.plant { break pot; } } }; new_pots.push_front(pot_to_restore); let pot_to_restore = loop { if let Some(pot) = new_pots.pop_back() { if pot.plant { break pot; } } }; new_pots.push_back(pot_to_restore); self.pots = new_pots; } fn sum(&self) -> i32 { let mut sum = 0; for pot in self.pots.iter() { if pot.plant { sum += pot.number; } } sum } fn range(&self) -> (i32, i32) { let front_number = if let Some(pot) = self.pots.front() { pot.number } else { unreachable!("Impossible if any plants remain"); }; let back_number = if let Some(pot) = self.pots.back() { pot.number } else { unreachable!("Impossible if any plants remain"); }; (front_number, back_number) } fn pattern(&self) -> String { let mut pattern = String::new(); for pot in self.pots.iter() { pattern.push(if pot.plant { '#' } else { '.' }); } pattern } #[allow(dead_code)] fn display(&self) { for pot in self.pots.iter() { print!("{} ", pot.number); } println!(); for pot in self.pots.iter() { print!("{}", if pot.plant { "#" } else { "." }); } println!(); } } fn space<'a>() -> Parser<'a, u8, ()> { one_of(b" \t\r\n").repeat(0..).discard() } fn plant<'a>() -> Parser<'a, u8, bool> { sym(b'#').map(|_| true) | sym(b'.').map(|_| false) } fn initial_state<'a>() -> Parser<'a, u8, Vec<bool>> { let prefix = seq(b"initial state: ").discard(); let plants = plant().repeat(1..); prefix * plants } fn note<'a>() -> Parser<'a, u8, Note> { (plant().repeat(5) + skip(4) * plant()).map(|(neighbors, next_generation)| Note { neighbors, next_generation, }) } fn engine<'a>() -> Parser<'a, u8, Engine> { let notes = (space() * note()).repeat(1..); (initial_state() + notes).map(|(initial_state, notes)| Engine::new(initial_state, notes)) } fn main() -> Result<(), Error> { let mut input = String::new(); stdin().read_to_string(&mut input).unwrap(); let mut engine_p1 = engine().parse(input.as_bytes())?; for _ in 0..20 { engine_p1.next_generation(); } let part1 = engine_p1.sum(); println!( "Part 1: After 20 generations, the sum of the numbers of all pots which contain a plant is {}", part1 ); let mut engine_p2 = engine().parse(input.as_bytes())?; let mut patterns = HashMap::new(); for x in 0..200 { if x > 157 { let p = patterns.entry(engine_p2.pattern()).or_insert_with(Vec::new); p.push((x, engine_p2.range(), engine_p2.sum())); } engine_p2.next_generation(); println!("[{}] sum = {}", x, engine_p2.sum()); engine_p2.display(); println!(); } println!("patterns = {:#?}", patterns); let part2: u64 = (50_000_000_000 - 158) * 86 + 16002; println!( "Part 2: After fifty billion generations, the sum of the numbers of all pots which contain a plant is {}", part2 ); Ok(()) }
use pom::parser::*; use pom::Error; use std::collections::HashMap; use std::collections::VecDeque; use std::io::{stdin, Read}; #[derive(Debug)] struct Pot { number: i32, plant: bool, } #[derive(Debug)] struct Note { neighbors: Vec<bool>, next_generation: bool, } #[derive(Debug)] struct Engine { pots: VecDeque<Pot>, note_hm: HashMap<Vec<bool>, bool>, } impl Engine { fn new(initial_state: Vec<bool>, notes: Vec<Note>) -> Engine { let mut pots = VecDeque::new(); for (n, plant) in initial_state.into_iter().enumerate() { pots.push_back(Pot { number: n as i32, plant, }); } let mut note_hm = HashMap::new(); for note in notes { note_hm.insert(note.neighbors, note.next_generation); } Engine { pots, note_hm } } fn next_generation(&mut self) { let front_number = if let Some(pot) = self.pots.front() { pot.number } else { unreachable!("Impossible if any plants remain"); }; let back_number = if let Some(pot) = self.pots.back() { pot.number } else { unreachable!("Impossible if any plants remain"); }; for x in 1..=4 { self.pots.push_front(Pot {
engine_p2.next_generation(); println!("[{}] sum = {}", x, engine_p2.sum()); engine_p2.display(); println!(); } println!("patterns = {:#?}", patterns); let part2: u64 = (50_000_000_000 - 158) * 86 + 16002; println!( "Part 2: After fifty billion generations, the sum of the numbers of all pots which contain a plant is {}", part2 ); Ok(()) }
number: front_number - x, plant: false, }); self.pots.push_back(Pot { number: back_number + x, plant: false, }); } let mut new_pots = VecDeque::new(); for j in 0..self.pots.len() - 4 { let mut neighbor_key = Vec::new(); for k in 0..5 { neighbor_key.push(self.pots[j + k].plant); } let plant = if let Some(plant) = self.note_hm.get(&neighbor_key) { *plant } else { false }; new_pots.push_back(Pot { number: self.pots[j + 2].number, plant, }); } let pot_to_restore = loop { if let Some(pot) = new_pots.pop_front() { if pot.plant { break pot; } } }; new_pots.push_front(pot_to_restore); let pot_to_restore = loop { if let Some(pot) = new_pots.pop_back() { if pot.plant { break pot; } } }; new_pots.push_back(pot_to_restore); self.pots = new_pots; } fn sum(&self) -> i32 { let mut sum = 0; for pot in self.pots.iter() { if pot.plant { sum += pot.number; } } sum } fn range(&self) -> (i32, i32) { let front_number = if let Some(pot) = self.pots.front() { pot.number } else { unreachable!("Impossible if any plants remain"); }; let back_number = if let Some(pot) = self.pots.back() { pot.number } else { unreachable!("Impossible if any plants remain"); }; (front_number, back_number) } fn pattern(&self) -> String { let mut pattern = String::new(); for pot in self.pots.iter() { pattern.push(if pot.plant { '#' } else { '.' }); } pattern } #[allow(dead_code)] fn display(&self) { for pot in self.pots.iter() { print!("{} ", pot.number); } println!(); for pot in self.pots.iter() { print!("{}", if pot.plant { "#" } else { "." }); } println!(); } } fn space<'a>() -> Parser<'a, u8, ()> { one_of(b" \t\r\n").repeat(0..).discard() } fn plant<'a>() -> Parser<'a, u8, bool> { sym(b'#').map(|_| true) | sym(b'.').map(|_| false) } fn initial_state<'a>() -> Parser<'a, u8, Vec<bool>> { let prefix = seq(b"initial state: ").discard(); let plants = plant().repeat(1..); prefix * plants } fn note<'a>() -> Parser<'a, u8, Note> { (plant().repeat(5) + skip(4) * plant()).map(|(neighbors, next_generation)| Note { neighbors, next_generation, }) } fn engine<'a>() -> Parser<'a, u8, Engine> { let notes = (space() * note()).repeat(1..); (initial_state() + notes).map(|(initial_state, notes)| Engine::new(initial_state, notes)) } fn main() -> Result<(), Error> { let mut input = String::new(); stdin().read_to_string(&mut input).unwrap(); let mut engine_p1 = engine().parse(input.as_bytes())?; for _ in 0..20 { engine_p1.next_generation(); } let part1 = engine_p1.sum(); println!( "Part 1: After 20 generations, the sum of the numbers of all pots which contain a plant is {}", part1 ); let mut engine_p2 = engine().parse(input.as_bytes())?; let mut patterns = HashMap::new(); for x in 0..200 { if x > 157 { let p = patterns.entry(engine_p2.pattern()).or_insert_with(Vec::new); p.push((x, engine_p2.range(), engine_p2.sum())); }
random
[ { "content": "fn number<'a>() -> Parser<'a, u8, i32> {\n\n let integer = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..)) | sym(b'0');\n\n let number = sym(b'-').opt() + integer;\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| i32::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-23/src/main.rs", "rank": 1, "score": 151395.08536500327 }, { "content": "fn number<'a>() -> Parser<'a, u8, i32> {\n\n let integer = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..)) | sym(b'0');\n\n let number = sym(b'-').opt() + integer;\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| i32::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-25/src/main.rs", "rank": 2, "score": 151395.08536500327 }, { "content": "fn solve(serial_number: i32, min_dial: usize, max_dial: usize) -> ((usize, usize, usize)) {\n\n let mut grid = Vec::new();\n\n\n\n for y0 in 0..300 {\n\n let mut row = Vec::new();\n\n for x0 in 0..300 {\n\n let rack_id = (x0 as i32 + 1) + 10;\n\n let mut power_level = rack_id * (y0 as i32 + 1);\n\n power_level += serial_number;\n\n power_level *= rack_id;\n\n power_level = (power_level / 100) % 10;\n\n power_level -= 5;\n\n row.push(power_level);\n\n }\n\n grid.push(row);\n\n }\n\n\n\n let mut memoized_total_power = vec![vec![None; 300]; 300];\n\n let mut largest_total_power = i32::min_value();\n\n let mut solution = None;\n", "file_path": "day-11/src/main.rs", "rank": 6, "score": 101764.35227276338 }, { "content": "fn number<'a>() -> Parser<'a, u8, usize> {\n\n let number = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..)) | sym(b'0');\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| usize::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-19/src/main.rs", "rank": 7, "score": 99358.6324742207 }, { "content": "fn number<'a>() -> Parser<'a, u8, usize> {\n\n let number = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..)) | sym(b'0');\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| usize::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-17/src/main.rs", "rank": 8, "score": 99358.6324742207 }, { "content": "fn number<'a>() -> Parser<'a, u8, usize> {\n\n let number = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..)) | sym(b'0');\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| usize::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-22/src/main.rs", "rank": 9, "score": 99358.6324742207 }, { "content": "fn number<'a>() -> Parser<'a, u8, u32> {\n\n let number = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..)) | sym(b'0');\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| u32::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-24/src/main.rs", "rank": 10, "score": 99358.6324742207 }, { "content": "fn number<'a>() -> Parser<'a, u8, u32> {\n\n let number = (one_of(b\"123456789\") - one_of(b\"0123456789\").repeat(0..2)) | sym(b'0');\n\n number\n\n .collect()\n\n .convert(str::from_utf8)\n\n .convert(|s| u32::from_str_radix(s, 10))\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 11, "score": 99358.6324742207 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n map: Vec<Vec<Terrain>>,\n\n unit_locations: HashMap<Location, Unit>,\n\n round_number: u32,\n\n targets_remain: bool,\n\n}\n\n\n\nimpl Engine {\n\n fn new(input: &str) -> Engine {\n\n let mut map = Vec::new();\n\n let mut unit_locations = HashMap::new();\n\n\n\n for (y, line) in input.lines().enumerate() {\n\n let mut row = Vec::new();\n\n for (x, ch) in line.chars().enumerate() {\n\n match ch {\n\n '#' => {\n\n row.push(Terrain::Wall);\n\n }\n\n '.' => {\n", "file_path": "day-15/src/main.rs", "rank": 13, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n board: Vec<u32>,\n\n elf_a: usize,\n\n elf_b: usize,\n\n}\n\n\n\nimpl Engine {\n\n fn new() -> Engine {\n\n Engine {\n\n board: vec![3, 7],\n\n elf_a: 0,\n\n elf_b: 1,\n\n }\n\n }\n\n\n\n fn generate_new_recipes(&mut self) -> Vec<u32> {\n\n let mut new_recipes = Vec::new();\n\n\n\n let sum = self.board[self.elf_a] + self.board[self.elf_b];\n\n if sum >= 10 {\n", "file_path": "day-14/src/main.rs", "rank": 14, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n ip_register: usize,\n\n ip: usize,\n\n registers: Vec<usize>,\n\n instructions: Vec<Instruction>,\n\n}\n\n\n\nimpl Engine {\n\n fn new(ip_register: usize, instructions: Vec<Instruction>) -> Engine {\n\n let ip = 0;\n\n let registers = vec![0; 6];\n\n\n\n Engine {\n\n ip_register,\n\n ip,\n\n registers,\n\n instructions,\n\n }\n\n }\n\n\n", "file_path": "day-19/src/main.rs", "rank": 15, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n tokens: Vec<Token>,\n\n sides: HashMap<Room, Vec<Room>>,\n\n distances: HashMap<Room, u32>,\n\n}\n\n\n\nimpl Engine {\n\n fn determine_sides(&mut self) {\n\n use Token::*;\n\n\n\n let mut branches: Vec<Room> = Vec::new();\n\n let mut room = Room { x: 0, y: 0 };\n\n\n\n for token in self.tokens.iter() {\n\n match token {\n\n North => {\n\n let sides = self.sides.entry(room).or_insert_with(Vec::new);\n\n room.y += 1;\n\n sides.push(room);\n\n }\n", "file_path": "day-20/src/main.rs", "rank": 16, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n nanobots: Vec<Nanobot>,\n\n}\n\n\n\nimpl Engine {\n\n fn in_range_of_strongest(&self) -> usize {\n\n let strongest = self\n\n .nanobots\n\n .iter()\n\n .max_by(|a, b| a.r.cmp(&b.r))\n\n .expect(\"One or more nanobots required\");\n\n\n\n let mut in_range_count = 0;\n\n\n\n for nanobot in self.nanobots.iter() {\n\n if nanobot.in_range_of_nanobot(&strongest) {\n\n in_range_count += 1;\n\n }\n\n }\n\n\n", "file_path": "day-23/src/main.rs", "rank": 17, "score": 99265.48346207541 }, { "content": "struct Engine {\n\n numbers: Vec<u32>,\n\n numbers_index: usize,\n\n nodes: Vec<Node>,\n\n next_node_index: usize,\n\n}\n\n\n\nimpl Engine {\n\n fn new(numbers: Vec<u32>) -> Engine {\n\n let numbers_index = 0;\n\n let nodes = Vec::new();\n\n let next_node_index = 0;\n\n Engine {\n\n numbers,\n\n numbers_index,\n\n nodes,\n\n next_node_index,\n\n }\n\n }\n\n\n", "file_path": "day-08/src/main.rs", "rank": 18, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n samples: Vec<Sample>,\n\n sample_valid_operators: HashMap<usize, HashSet<Operator>>,\n\n opcodes: Vec<Option<Operator>>,\n\n program: Program,\n\n}\n\n\n\nimpl Engine {\n\n fn generate_sample_valid_operators(&mut self) {\n\n use self::Operator::*;\n\n\n\n let operators = [\n\n Addr, Addi, Mulr, Muli, Banr, Bani, Borr, Bori, Setr, Seti, Gtir, Gtri, Gtrr, Eqir,\n\n Eqri, Eqrr,\n\n ];\n\n\n\n for (index, sample) in self.samples.iter().enumerate() {\n\n for operator in operators.iter() {\n\n if operate(*operator, &sample.before, &sample.instruction) == sample.after {\n\n let svo = self\n", "file_path": "day-16/src/main.rs", "rank": 19, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n area: Vec<Vec<Terrain>>,\n\n}\n\n\n\nimpl Engine {\n\n fn new(input: &str) -> Engine {\n\n let mut area: Vec<Vec<Terrain>> = Vec::new();\n\n for line in input.lines() {\n\n let mut row = Vec::new();\n\n for ch in line.chars() {\n\n row.push(match ch {\n\n '.' => Terrain::Open,\n\n '|' => Terrain::Tree,\n\n '#' => Terrain::Yard,\n\n _ => {\n\n panic!(\"Unexpected input\");\n\n }\n\n });\n\n }\n\n area.push(row);\n", "file_path": "day-18/src/main.rs", "rank": 20, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n bounds: Bounds,\n\n squares: Vec<Vec<Material>>,\n\n}\n\n\n\nimpl Engine {\n\n fn new(veins: &[Vein]) -> Engine {\n\n let bounds = veins.iter().fold(Bounds::new(), |mut bounds, vein| {\n\n vein.bound(&mut bounds);\n\n bounds\n\n });\n\n\n\n let width = bounds.max_x - bounds.min_x + 1 + 2;\n\n let height = bounds.max_y + 1;\n\n let mut squares = vec![vec![Material::Sand; width]; height];\n\n\n\n let well_x = 500 - bounds.min_x + 1;\n\n squares[0][well_x] = Material::Well;\n\n\n\n for vein in veins.iter() {\n", "file_path": "day-17/src/main.rs", "rank": 21, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n groups: HashMap<u32, (GroupType, Group)>,\n\n}\n\n\n\nimpl Engine {\n\n fn new(immune_system: Vec<Group>, infection: Vec<Group>) -> Engine {\n\n let mut groups = HashMap::new();\n\n\n\n let mut group_id = 100;\n\n for group in immune_system {\n\n groups.insert(group_id, (GroupType::ImmuneSystem, group));\n\n group_id += 1;\n\n }\n\n\n\n group_id = 200;\n\n for group in infection {\n\n groups.insert(group_id, (GroupType::Infection, group));\n\n group_id += 1;\n\n }\n\n\n", "file_path": "day-24/src/main.rs", "rank": 22, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n points: Vec<Point>,\n\n constellations: Vec<HashSet<Point>>,\n\n}\n\n\n\nimpl Engine {\n\n fn run(&mut self) -> u32 {\n\n while let Some(point) = self.points.pop() {\n\n let mut new_constellations = Vec::new();\n\n let mut new_constellation = HashSet::new();\n\n\n\n while let Some(constellation) = self.constellations.pop() {\n\n let mut point_is_in_constellation = false;\n\n for constellation_point in constellation.iter() {\n\n if (point.w - constellation_point.w).abs()\n\n + (point.x - constellation_point.x).abs()\n\n + (point.y - constellation_point.y).abs()\n\n + (point.z - constellation_point.z).abs()\n\n <= 3\n\n {\n", "file_path": "day-25/src/main.rs", "rank": 23, "score": 99265.48346207541 }, { "content": "#[derive(Debug)]\n\nstruct Engine {\n\n grid: Vec<Vec<Terrain>>,\n\n cart_locations: HashMap<Location, Cart>,\n\n}\n\n\n\nimpl Engine {\n\n fn new(input: &str) -> Engine {\n\n let mut grid = Vec::new();\n\n let mut cart_locations = HashMap::new();\n\n\n\n for (y, line) in input.lines().enumerate() {\n\n let mut row = Vec::new();\n\n for (x, ch) in line.chars().enumerate() {\n\n match ch {\n\n ' ' => {\n\n row.push(Terrain::Unpassable);\n\n }\n\n '-' => {\n\n row.push(Terrain::Horizontal);\n\n }\n", "file_path": "day-13/src/main.rs", "rank": 24, "score": 99265.48346207541 }, { "content": "fn operate(instruction: &Instruction, registers: &mut [usize]) {\n\n use self::Operator::*;\n\n\n\n match instruction.operator {\n\n Addr => {\n\n let a = registers[instruction.a];\n\n let b = registers[instruction.b];\n\n registers[instruction.c] = a + b;\n\n }\n\n Addi => {\n\n let a = registers[instruction.a];\n\n let b = instruction.b;\n\n registers[instruction.c] = a + b;\n\n }\n\n Mulr => {\n\n let a = registers[instruction.a];\n\n let b = registers[instruction.b];\n\n registers[instruction.c] = a * b;\n\n }\n\n Muli => {\n", "file_path": "day-19/src/main.rs", "rank": 25, "score": 95664.57508536361 }, { "content": "fn do_react(left: char, right: char) -> bool {\n\n if left == right {\n\n return false;\n\n }\n\n if left.to_ascii_uppercase() == right.to_ascii_uppercase() {\n\n return true;\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 26, "score": 95625.67056828375 }, { "content": "fn solve(mut events: Vec<Event>) -> (u32, u32) {\n\n events.sort_by(|a, b| {\n\n if a.timestamp.year > b.timestamp.year {\n\n return Ordering::Greater;\n\n }\n\n if a.timestamp.year < b.timestamp.year {\n\n return Ordering::Less;\n\n }\n\n if a.timestamp.month > b.timestamp.month {\n\n return Ordering::Greater;\n\n }\n\n if a.timestamp.month < b.timestamp.month {\n\n return Ordering::Less;\n\n }\n\n if a.timestamp.day > b.timestamp.day {\n\n return Ordering::Greater;\n\n }\n\n if a.timestamp.day < b.timestamp.day {\n\n return Ordering::Less;\n\n }\n", "file_path": "day-04/src/main.rs", "rank": 28, "score": 91474.14018018627 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (immune_system() + infection())\n\n .map(|(immune_system, infection)| Engine::new(immune_system, infection))\n\n}\n\n\n", "file_path": "day-24/src/main.rs", "rank": 29, "score": 88914.61844142537 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (sym(b'^') * token().repeat(1..) - sym(b'$')).map(|tokens| Engine {\n\n tokens,\n\n sides: HashMap::new(),\n\n distances: HashMap::new(),\n\n })\n\n}\n\n\n", "file_path": "day-20/src/main.rs", "rank": 31, "score": 88914.61844142537 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (samples() + program()).map(|(samples, program)| Engine {\n\n samples,\n\n sample_valid_operators: HashMap::new(),\n\n opcodes: vec![None; 16],\n\n program,\n\n })\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 32, "score": 88914.61844142537 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (ip_register() + (space() * instruction()).repeat(1..))\n\n .map(|(ip_register, instructions)| Engine::new(ip_register, instructions))\n\n}\n\n\n", "file_path": "day-19/src/main.rs", "rank": 33, "score": 88914.61844142537 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (space() * point()).repeat(1..).map(|points| Engine {\n\n points,\n\n constellations: vec![],\n\n })\n\n}\n\n\n", "file_path": "day-25/src/main.rs", "rank": 34, "score": 88914.61844142537 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (space() * nanobot())\n\n .repeat(1..)\n\n .map(|nanobots| Engine { nanobots })\n\n}\n\n\n", "file_path": "day-23/src/main.rs", "rank": 35, "score": 88914.61844142537 }, { "content": "fn engine<'a>() -> Parser<'a, u8, Engine> {\n\n (horizontal() | vertical())\n\n .repeat(1..)\n\n .map(|veins| Engine::new(&veins))\n\n}\n\n\n", "file_path": "day-17/src/main.rs", "rank": 36, "score": 88914.61844142537 }, { "content": "#[derive(Debug, Default, Clone, Eq, PartialEq)]\n\nstruct Contents {\n\n registers: Vec<u32>,\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 37, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Link {\n\n left: Option<usize>,\n\n right: Option<usize>,\n\n}\n\n\n", "file_path": "day-05/src/main.rs", "rank": 38, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Nanobot {\n\n x: i32,\n\n y: i32,\n\n z: i32,\n\n r: i32,\n\n}\n\n\n\nimpl Nanobot {\n\n fn in_range_of_nanobot(&self, nanobot: &Nanobot) -> bool {\n\n let distance =\n\n (nanobot.x - self.x).abs() + (nanobot.y - self.y).abs() + (nanobot.z - self.z).abs();\n\n\n\n distance <= nanobot.r\n\n }\n\n\n\n fn in_range_of_cube(&self, cube: &Cube) -> bool {\n\n let distance_x = if self.x < cube.base_x {\n\n (cube.base_x - self.x).abs()\n\n } else if self.x > (cube.base_x + cube.extent - 1) {\n\n (self.x - (cube.base_x + cube.extent - 1)).abs()\n", "file_path": "day-23/src/main.rs", "rank": 39, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Copy, Clone, Hash, Eq, PartialEq)]\n\nstruct Point {\n\n w: i32,\n\n x: i32,\n\n y: i32,\n\n z: i32,\n\n}\n\n\n", "file_path": "day-25/src/main.rs", "rank": 40, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Instruction {\n\n opcode: usize,\n\n a: usize,\n\n b: usize,\n\n c: usize,\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 41, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Default)]\n\nstruct Timestamp {\n\n year: u32,\n\n month: u8,\n\n day: u8,\n\n hour: u8,\n\n minute: u8,\n\n}\n\n\n", "file_path": "day-04/src/main.rs", "rank": 42, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Hash, Eq, PartialEq, Copy, Clone)]\n\nstruct Room {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "day-20/src/main.rs", "rank": 43, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Requirement {\n\n before: char,\n\n after: char,\n\n}\n\n\n", "file_path": "day-07/src/main.rs", "rank": 44, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, Copy, Clone)]\n\nstruct Location {\n\n x: u32,\n\n y: u32,\n\n}\n\n\n", "file_path": "day-06/src/main.rs", "rank": 45, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Unit {\n\n allegiance: Allegiance,\n\n attack_power: u32,\n\n hit_points: u32,\n\n}\n\n\n", "file_path": "day-15/src/main.rs", "rank": 46, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Default)]\n\nstruct Claim {\n\n id: u32,\n\n left_offset: u32,\n\n top_offset: u32,\n\n width: u32,\n\n height: u32,\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 47, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Copy, Clone)]\n\nstruct Coordinate {\n\n x: usize,\n\n y: usize,\n\n}\n\n\n\nimpl Coordinate {\n\n fn adjacents(&self) -> Vec<Coordinate> {\n\n let mut adjacents = Vec::new();\n\n\n\n adjacents.push(Coordinate {\n\n x: self.x + 1,\n\n y: self.y,\n\n });\n\n adjacents.push(Coordinate {\n\n x: self.x,\n\n y: self.y + 1,\n\n });\n\n if self.x > 0 {\n\n adjacents.push(Coordinate {\n\n x: self.x - 1,\n", "file_path": "day-22/src/main.rs", "rank": 48, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Group {\n\n units: u32,\n\n hit_points: u32,\n\n attack_damage: u32,\n\n attack_type: AttackType,\n\n initiative: u32,\n\n weaknesses: Vec<AttackType>,\n\n immunities: Vec<AttackType>,\n\n}\n\n\n\nimpl Group {\n\n fn effective_power(&self) -> u32 {\n\n self.units * self.attack_damage\n\n }\n\n\n\n fn damage_multiplier(&self, attack_type: AttackType) -> u32 {\n\n if self.weaknesses.contains(&attack_type) {\n\n 2\n\n } else if self.immunities.contains(&attack_type) {\n\n 0\n\n } else {\n\n 1\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-24/src/main.rs", "rank": 49, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Bounds {\n\n min_x: usize,\n\n max_x: usize,\n\n min_y: usize,\n\n max_y: usize,\n\n}\n\n\n\nimpl Bounds {\n\n fn new() -> Bounds {\n\n let min_x = usize::max_value();\n\n let max_x = 0;\n\n let min_y = usize::max_value();\n\n let max_y = 0;\n\n\n\n Bounds {\n\n min_x,\n\n max_x,\n\n min_y,\n\n max_y,\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-17/src/main.rs", "rank": 50, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Scanner {\n\n depth: u32,\n\n target: Coordinate,\n\n erosion_levels: Vec<Vec<u32>>,\n\n}\n\n\n\nimpl Scanner {\n\n fn new(depth: u32, target: Coordinate) -> Scanner {\n\n let mut erosion_levels: Vec<Vec<u32>> = Vec::new();\n\n\n\n for y in 0..=(target.y + target.x + (7 * 2)) {\n\n let mut row = Vec::new();\n\n for x in 0..=(target.x + target.y + (7 * 2)) {\n\n let geologic_index = if x == 0 && y == 0 {\n\n 0\n\n } else if x == target.x && y == target.y {\n\n 0\n\n } else if y == 0 {\n\n x as u32 * 16807\n\n } else if x == 0 {\n", "file_path": "day-22/src/main.rs", "rank": 51, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Delay {\n\n seconds: u32,\n\n}\n\n\n\nimpl Into<Delay> for char {\n\n fn into(self) -> Delay {\n\n let seconds = match self {\n\n 'A' => 61,\n\n 'B' => 62,\n\n 'C' => 63,\n\n 'D' => 64,\n\n 'E' => 65,\n\n 'F' => 66,\n\n 'G' => 67,\n\n 'H' => 68,\n\n 'I' => 69,\n\n 'J' => 70,\n\n 'K' => 71,\n\n 'L' => 72,\n\n 'M' => 73,\n", "file_path": "day-07/src/main.rs", "rank": 52, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Eq, PartialEq, Ord, PartialOrd)]\n\nstruct Cube {\n\n extent: i32,\n\n base_x: i32,\n\n base_y: i32,\n\n base_z: i32,\n\n}\n\n\n\nimpl Cube {\n\n fn new(extent: i32) -> Cube {\n\n Cube {\n\n extent,\n\n base_x: -(extent / 2),\n\n base_y: -(extent / 2),\n\n base_z: -(extent / 2),\n\n }\n\n }\n\n\n\n fn subdivide(&self) -> Vec<Cube> {\n\n let deltas = [\n\n (0, 0, 0),\n", "file_path": "day-23/src/main.rs", "rank": 53, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Default)]\n\nstruct Shift {\n\n timestamp: Timestamp,\n\n guard_id: u32,\n\n sleeping_minutes: Vec<bool>,\n\n}\n\n\n\nimpl Shift {\n\n fn new(timestamp: Timestamp, guard_id: u32) -> Shift {\n\n let sleeping_minutes = vec![false; 60];\n\n Shift {\n\n timestamp,\n\n guard_id,\n\n sleeping_minutes,\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-04/src/main.rs", "rank": 54, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Instruction {\n\n operator: Operator,\n\n a: usize,\n\n b: usize,\n\n c: usize,\n\n}\n\n\n", "file_path": "day-19/src/main.rs", "rank": 55, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct Location {\n\n x: usize,\n\n y: usize,\n\n}\n\n\n\nimpl Ord for Location {\n\n fn cmp(&self, other: &Location) -> Ordering {\n\n match self.y.cmp(&other.y) {\n\n Ordering::Less => Ordering::Less,\n\n Ordering::Equal => self.x.cmp(&other.x),\n\n Ordering::Greater => Ordering::Greater,\n\n }\n\n }\n\n}\n\n\n\nimpl PartialOrd for Location {\n\n fn partial_cmp(&self, other: &Location) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n", "file_path": "day-15/src/main.rs", "rank": 56, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Point {\n\n x: i32,\n\n y: i32,\n\n dx: i32,\n\n dy: i32,\n\n}\n\n\n", "file_path": "day-10/src/main.rs", "rank": 57, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Sky {\n\n points: Vec<Point>,\n\n min_x: i32,\n\n max_x: i32,\n\n min_y: i32,\n\n max_y: i32,\n\n width: i32,\n\n height: i32,\n\n}\n\n\n\nimpl Sky {\n\n fn new(points: Vec<Point>) -> Sky {\n\n Sky {\n\n points,\n\n min_x: 0,\n\n max_x: 0,\n\n min_y: 0,\n\n max_y: 0,\n\n width: 0,\n\n height: 0,\n", "file_path": "day-10/src/main.rs", "rank": 58, "score": 63851.36795064181 }, { "content": "#[derive(Debug, Hash, Eq, PartialEq, Copy, Clone)]\n\nstruct Location {\n\n x: usize,\n\n y: usize,\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 59, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Program {\n\n instructions: Vec<Instruction>,\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 60, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Cart {\n\n facing: Facing,\n\n next_turn: Turn,\n\n}\n\n\n", "file_path": "day-13/src/main.rs", "rank": 61, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Event {\n\n timestamp: Timestamp,\n\n action: Action,\n\n}\n\n\n", "file_path": "day-04/src/main.rs", "rank": 62, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Node {\n\n index: usize,\n\n child_quantity: u32,\n\n metadata_quantity: u32,\n\n children: Vec<usize>,\n\n metadata: Vec<u32>,\n\n}\n\n\n\nimpl Node {\n\n fn new(index: usize, child_quantity: u32, metadata_quantity: u32) -> Node {\n\n let children = Vec::new();\n\n let metadata = Vec::new();\n\n Node {\n\n index,\n\n child_quantity,\n\n metadata_quantity,\n\n children,\n\n metadata,\n\n }\n\n }\n\n}\n\n\n", "file_path": "day-08/src/main.rs", "rank": 63, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Cave {\n\n regions: Vec<Vec<RegionType>>,\n\n}\n\n\n\nimpl Cave {\n\n #[allow(dead_code)]\n\n fn display(&self, target: &Coordinate) {\n\n for y in 0..self.regions.len() {\n\n for x in 0..self.regions[0].len() {\n\n if x == 0 && y == 0 {\n\n print!(\"M\");\n\n continue;\n\n }\n\n if x == target.x && y == target.y {\n\n print!(\"T\");\n\n continue;\n\n }\n\n print!(\n\n \"{}\",\n\n match self.regions[y][x] {\n", "file_path": "day-22/src/main.rs", "rank": 64, "score": 63851.36795064181 }, { "content": "#[derive(Debug)]\n\nstruct Sample {\n\n before: Contents,\n\n instruction: Instruction,\n\n after: Contents,\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 65, "score": 63851.36795064181 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"claim.pest\"]\n\nstruct ClaimParser;\n\n\n", "file_path": "day-03/src/main.rs", "rank": 66, "score": 62611.547733650914 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"event.pest\"]\n\nstruct EventParser;\n\n\n", "file_path": "day-04/src/main.rs", "rank": 67, "score": 62611.547733650914 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut requirements = Vec::new();\n\n\n\n let rule = r\"Step (\\D) must be finished before step (\\D) can begin.\";\n\n let re = Regex::new(rule).unwrap();\n\n\n\n for capture in re.captures_iter(input.trim()) {\n\n let before = capture[1].chars().next().unwrap();\n\n let after = capture[2].chars().next().unwrap();\n\n requirements.push(Requirement { before, after });\n\n }\n\n\n\n let (order, _seconds) = solve(1, &requirements);\n\n println!(\n\n \"Part 1: the steps should be completed in this order: {}\",\n\n order\n\n );\n\n\n\n let (_order, seconds) = solve(5, &requirements);\n\n println!(\"Part 2: It will take {} seconds to complete\", seconds);\n\n}\n", "file_path": "day-07/src/main.rs", "rank": 68, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let rule = r\"^position=<\\s*(-?\\d+),\\s*(-?\\d+)> velocity=<\\s*(-?\\d+),\\s*(-?\\d+)>$\";\n\n let re = Regex::new(rule).unwrap();\n\n\n\n let mut points = Vec::new();\n\n for line in input.trim().lines() {\n\n if let Some(captures) = re.captures(line) {\n\n let x: i32 = captures[1].parse().unwrap();\n\n let y: i32 = captures[2].parse().unwrap();\n\n let dx: i32 = captures[3].parse().unwrap();\n\n let dy: i32 = captures[4].parse().unwrap();\n\n points.push(Point { x, y, dx, dy });\n\n }\n\n }\n\n\n\n let mut sky = Sky::new(points);\n\n\n\n for j in 1..10500 {\n\n sky.tick();\n\n if sky.min_x >= 0 && sky.min_y >= 0 {\n\n println!(\"j = {}\", j);\n\n sky.display();\n\n }\n\n }\n\n}\n", "file_path": "day-10/src/main.rs", "rank": 69, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let claims: Vec<Claim> = input.lines().map(|x| parse(x)).collect();\n\n\n\n let (part1, part2) = solve(&claims);\n\n println!(\n\n \"Part 1: {} square inches of fabric are within two or more claims\",\n\n part1\n\n );\n\n println!(\"Part 2: the only claim that doesn't overlap is {}\", part2);\n\n}\n", "file_path": "day-03/src/main.rs", "rank": 70, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = Engine::new();\n\n let recipe_count: usize = input.trim().parse().unwrap();\n\n let part1 = engine.solve_part1(recipe_count);\n\n println!(\n\n \"Part 1: after the first {} recipes, the scores of the next ten recipes are '{}'\",\n\n recipe_count, part1\n\n );\n\n\n\n let mut engine = Engine::new();\n\n let score_sequence: Vec<u32> = input\n\n .trim()\n\n .chars()\n\n .map(|x| x.to_digit(10).unwrap())\n\n .collect();\n\n let part2 = engine.solve_part2(&score_sequence);\n\n println!(\n", "file_path": "day-14/src/main.rs", "rank": 71, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let rule = r\"^(\\d+) players; last marble is worth (\\d+) points$\";\n\n let re = Regex::new(rule).unwrap();\n\n\n\n let captures = re.captures(&input.trim()).unwrap();\n\n let players: usize = captures[1].parse().unwrap();\n\n let last_marble: u32 = captures[2].parse().unwrap();\n\n\n\n let part1 = solve(players, last_marble);\n\n println!(\"Part 1: the winning elf's score is {}\", part1);\n\n\n\n let part2 = solve(players, last_marble * 100);\n\n println!(\"Part 1: the winning elf's score is {}\", part2);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "day-09/src/main.rs", "rank": 72, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let serial_number: i32 = input.trim().parse().unwrap();\n\n\n\n let (x, y, _dial) = solve(serial_number, 3, 3);\n\n println!(\"Part 1: the X,Y coordinate of the top-left fuel cell of the 3x3 square with the largest total power is {},{}\", x, y);\n\n\n\n let (x, y, dial) = solve(serial_number, 1, 300);\n\n println!(\n\n \"Part 2: the X,Y,size identifier of the square with the largest total power is {},{},{}\",\n\n x, y, dial\n\n );\n\n}\n", "file_path": "day-11/src/main.rs", "rank": 73, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let coordinates = input.lines().fold(Vec::new(), |mut acc, line| {\n\n let mut fields = line.trim().split(\", \");\n\n let x = fields.next().unwrap().parse().unwrap();\n\n let y = fields.next().unwrap().parse().unwrap();\n\n acc.push(Location { x, y });\n\n acc\n\n });\n\n\n\n let (part1, part2) = solve(&coordinates);\n\n println!(\n\n \"Part 1: the size of the largest area that isn't infinite is {}\",\n\n part1\n\n );\n\n println!(\"Part 2: the size of the region is {}\", part2);\n\n}\n", "file_path": "day-06/src/main.rs", "rank": 74, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = Engine::new(&input);\n\n let location = loop {\n\n if let Some(location) = engine.tick(true) {\n\n break location;\n\n }\n\n };\n\n println!(\n\n \"Part 1: the location of the first crash is {},{}\",\n\n location.x, location.y\n\n );\n\n\n\n let mut engine = Engine::new(&input);\n\n let location = loop {\n\n if let Some(location) = engine.tick(false) {\n\n break location;\n\n }\n\n };\n\n println!(\n\n \"Part 2: the location of the last cart is {},{}\",\n\n location.x, location.y\n\n );\n\n}\n", "file_path": "day-13/src/main.rs", "rank": 75, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let events: Vec<Event> = input.lines().map(|x| parse(x)).collect();\n\n\n\n let (part1, part2) = solve(events);\n\n println!(\n\n \"Part 1: the product of the chosen guard ID and the minute is {}\",\n\n part1\n\n );\n\n println!(\n\n \"Part 2: the product of the chosen guard ID and the minute is {}\",\n\n part2\n\n );\n\n}\n", "file_path": "day-04/src/main.rs", "rank": 76, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let units: Vec<char> = input.trim().chars().collect();\n\n\n\n let remaining_unit_count = react_polymer(&units);\n\n println!(\n\n \"Part 1: {} units remain after fully reacting the polymer\",\n\n remaining_unit_count\n\n );\n\n\n\n let mut unit_types: Vec<char> = units.iter().map(|x| x.to_ascii_lowercase()).collect();\n\n unit_types.sort();\n\n unit_types.dedup();\n\n\n\n let mut min_remaining_unit_count = u32::max_value();\n\n for unit_type in unit_types {\n\n let filtered_units: Vec<char> = units\n\n .clone()\n", "file_path": "day-05/src/main.rs", "rank": 77, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let ids: Vec<&str> = input.lines().collect();\n\n\n\n println!(\"Part 1: the checksum is {}\", solve_part1(&ids));\n\n println!(\"Part 2: the common letters are {}\", solve_part2(&ids));\n\n}\n", "file_path": "day-02/src/main.rs", "rank": 78, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let numbers: Vec<u32> = input\n\n .trim()\n\n .split(' ')\n\n .map(|x| x.parse().unwrap())\n\n .collect();\n\n\n\n let mut engine = Engine::new(numbers);\n\n engine.read_node();\n\n\n\n let part1 = engine.metadata_sum();\n\n println!(\"Part 1: the sum of all metadata entries is {}\", part1);\n\n\n\n let part2 = engine.root_node_value();\n\n println!(\"Part 2: the value of the root node is {}\", part2);\n\n}\n", "file_path": "day-08/src/main.rs", "rank": 79, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n // Part 1\n\n\n\n let mut engine = Engine::new(input.trim());\n\n engine.display();\n\n\n\n while engine.round() {\n\n println!(\"After {} rounds:\", engine.round_number);\n\n engine.display();\n\n }\n\n println!(\"Final:\");\n\n engine.display();\n\n\n\n println!(\n\n \"Part 1: Combat ends after {} full rounds\",\n\n engine.round_number\n\n );\n", "file_path": "day-15/src/main.rs", "rank": 80, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let deltas: Vec<i32> = input\n\n .lines()\n\n .map(|x| x.trim().parse::<i32>().unwrap())\n\n .collect();\n\n\n\n let result: i32 = deltas.iter().sum();\n\n println!(\"Part 1: the resulting frequency is {}\", result);\n\n\n\n let mut frequency = 0;\n\n let mut hs = HashSet::new();\n\n\n\n hs.insert(frequency);\n\n\n\n for delta in deltas.iter().cycle() {\n\n frequency += delta;\n\n if !hs.insert(frequency) {\n\n break;\n\n }\n\n }\n\n\n\n println!(\n\n \"Part 2: the first frequency the device reaches twice is {}\",\n\n frequency\n\n );\n\n}\n", "file_path": "day-01/src/main.rs", "rank": 81, "score": 60005.543595791256 }, { "content": "fn main() {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n // Part 1\n\n let mut engine = Engine::new(&input);\n\n\n\n engine.display(0);\n\n for minute in 1..=10 {\n\n engine.tick();\n\n engine.display(minute);\n\n }\n\n let terrains: Vec<Terrain> = engine.area.iter().cloned().flatten().collect();\n\n let (trees, yards) = terrain_counts(&terrains);\n\n println!(\n\n \"Part 1: the total resource value of the area after 10 minutes is {}\",\n\n trees * yards\n\n );\n\n\n\n // Part 2\n", "file_path": "day-18/src/main.rs", "rank": 82, "score": 60005.543595791256 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = engine().parse(input.as_bytes())?;\n\n engine.flow(500 - engine.bounds.min_x + 1, 1);\n\n engine.display();\n\n\n\n let (flowing, settled) = engine.water_tile_count();\n\n println!(\"Part 1: the water can reach {} tiles\", flowing + settled);\n\n println!(\"Part 2: {} tiles are left\", settled);\n\n\n\n Ok(())\n\n}\n", "file_path": "day-17/src/main.rs", "rank": 83, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let engine = engine().parse(input.as_bytes())?;\n\n println!(\n\n \"Part 1: {} nanobots are in range of the strongest nanobot\",\n\n engine.in_range_of_strongest()\n\n );\n\n\n\n let cube = engine.run();\n\n let shortest_distance = cube.base_x.abs() + cube.base_y.abs() + cube.base_z.abs();\n\n println!(\n\n \"Part 2: the shortest manhattan distance is {}\",\n\n shortest_distance\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "day-23/src/main.rs", "rank": 84, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = engine().parse(input.as_bytes())?;\n\n\n\n engine.generate_sample_valid_operators();\n\n\n\n // For part 1, count samples with three or more valid operators\n\n let mut count = 0;\n\n for (_, operators) in engine.sample_valid_operators.iter() {\n\n if operators.len() >= 3 {\n\n count += 1;\n\n }\n\n }\n\n println!(\n\n \"Part 1: {} samples behave like three or more opcodes\",\n\n count\n\n );\n\n\n\n engine.generate_opcodes();\n\n let contents = engine.run_program();\n\n println!(\n\n \"Part 2: the value contained in register 0 after running the test program is {}\",\n\n contents.registers[0]\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "day-16/src/main.rs", "rank": 85, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let scanner = scanner().parse(input.as_bytes())?;\n\n\n\n let cave = scanner.cave();\n\n //cave.display(&scanner.target);\n\n\n\n println!(\"Part 1: the total risk level is {}\", scanner.risk_level());\n\n\n\n println!(\n\n \"Part 2: the fewest minutes needed to reach the target is {}\",\n\n cave.shortest_path(&scanner.target)\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "day-22/src/main.rs", "rank": 86, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = engine().parse(input.as_bytes())?;\n\n\n\n let constellation_count = engine.run();\n\n println!(\"Part 1: {} constellations are formed\", constellation_count);\n\n\n\n Ok(())\n\n}\n", "file_path": "day-25/src/main.rs", "rank": 87, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = engine().parse(input.as_bytes())?;\n\n engine.determine_sides();\n\n engine.determine_distances();\n\n\n\n println!(\n\n \"Part 1: the largest number of required doors is {}\",\n\n engine.max_distance()\n\n );\n\n\n\n println!(\n\n \"Part 2: {} rooms require passing through 1000 doors\",\n\n engine.one_thousand_doors()\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "day-20/src/main.rs", "rank": 89, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine_p1 = engine().parse(input.as_bytes())?;\n\n\n\n if let Some((_, winning_army_units)) = engine_p1.fight() {\n\n println!(\"Part 1: the winning army has {} units\", winning_army_units);\n\n }\n\n\n\n let mut boost = 1;\n\n loop {\n\n let mut engine_p2 = engine().parse(input.as_bytes())?;\n\n engine_p2.boost(boost);\n\n if let Some((winning_army_group_type, winning_army_units)) = engine_p2.fight() {\n\n if winning_army_group_type == GroupType::ImmuneSystem {\n\n println!(\n\n \"Part 2: the immune system has {} units left\",\n\n winning_army_units\n\n );\n\n break;\n\n }\n\n }\n\n\n\n boost += 1;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "day-24/src/main.rs", "rank": 90, "score": 53040.997125210386 }, { "content": "fn main() -> Result<(), Error> {\n\n let mut input = String::new();\n\n stdin().read_to_string(&mut input).unwrap();\n\n\n\n let mut engine = engine().parse(input.as_bytes())?;\n\n\n\n engine.run();\n\n println!(\n\n \"Part 1: the value left in register 0 is {}\",\n\n engine.registers[0]\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "day-19/src/main.rs", "rank": 91, "score": 53040.997125210386 }, { "content": "fn parse(line: &str) -> Claim {\n\n let pairs = ClaimParser::parse(Rule::claim, line).unwrap_or_else(|e| panic!(\"{}\", e));\n\n\n\n let mut claim = Claim::default();\n\n\n\n for pair in pairs {\n\n match pair.as_rule() {\n\n Rule::id => {\n\n claim.id = pair.as_str().parse().unwrap();\n\n }\n\n Rule::left_offset => {\n\n claim.left_offset = pair.as_str().parse().unwrap();\n\n }\n\n Rule::top_offset => {\n\n claim.top_offset = pair.as_str().parse().unwrap();\n\n }\n\n Rule::width => {\n\n claim.width = pair.as_str().parse().unwrap();\n\n }\n\n Rule::height => {\n\n claim.height = pair.as_str().parse().unwrap();\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n claim\n\n}\n\n\n", "file_path": "day-03/src/main.rs", "rank": 92, "score": 50244.474291204206 }, { "content": "fn parse(line: &str) -> Event {\n\n let pairs = EventParser::parse(Rule::event, line).unwrap_or_else(|e| panic!(\"{}\", e));\n\n\n\n let mut timestamp = Timestamp::default();\n\n\n\n for pair in pairs {\n\n match pair.as_rule() {\n\n Rule::timestamp => {\n\n for inner_pair in pair.into_inner() {\n\n match inner_pair.as_rule() {\n\n Rule::year => {\n\n timestamp.year = inner_pair.as_str().parse().unwrap();\n\n }\n\n Rule::month => {\n\n timestamp.month = inner_pair.as_str().parse().unwrap();\n\n }\n\n Rule::day => {\n\n timestamp.day = inner_pair.as_str().parse().unwrap();\n\n }\n\n Rule::hour => {\n", "file_path": "day-04/src/main.rs", "rank": 93, "score": 50244.474291204206 }, { "content": "fn space<'a>() -> Parser<'a, u8, ()> {\n\n one_of(b\" \\t\\r\\n\").repeat(0..).discard()\n\n}\n\n\n", "file_path": "day-17/src/main.rs", "rank": 94, "score": 49815.058873954076 }, { "content": "fn space<'a>() -> Parser<'a, u8, ()> {\n\n one_of(b\" \\t\\r\\n\").repeat(0..).discard()\n\n}\n\n\n", "file_path": "day-23/src/main.rs", "rank": 95, "score": 49815.058873954076 }, { "content": "fn space<'a>() -> Parser<'a, u8, ()> {\n\n one_of(b\" \\t\\r\\n\").repeat(0..).discard()\n\n}\n\n\n", "file_path": "day-22/src/main.rs", "rank": 96, "score": 49815.058873954076 }, { "content": "fn space<'a>() -> Parser<'a, u8, ()> {\n\n one_of(b\" \\t\\r\\n\").repeat(0..).discard()\n\n}\n\n\n", "file_path": "day-19/src/main.rs", "rank": 97, "score": 49815.058873954076 }, { "content": "fn space<'a>() -> Parser<'a, u8, ()> {\n\n one_of(b\" \\t\\r\\n\").repeat(0..).discard()\n\n}\n\n\n", "file_path": "day-16/src/main.rs", "rank": 98, "score": 49815.058873954076 } ]
Rust
ruma-events/tests/pdu.rs
ignatenkobrain/ruma
1c47963befcf241f1dbd0e9ad12ab3dfd7ef54cc
#![cfg(not(feature = "unstable-pre-spec"))] use std::{ collections::BTreeMap, time::{Duration, SystemTime}, }; use ruma_events::{ pdu::{EventHash, Pdu, RoomV1Pdu, RoomV3Pdu}, EventType, }; use ruma_identifiers::{event_id, room_id, server_name, server_signing_key_id, user_id}; use serde_json::{from_value as from_json_value, json, to_value as to_json_value}; #[test] fn serialize_pdu_as_v1() { let mut signatures = BTreeMap::new(); let mut inner_signature = BTreeMap::new(); inner_signature.insert( server_signing_key_id!("ed25519:key_version"), "86BytesOfSignatureOfTheRedactedEvent".into(), ); signatures.insert(server_name!("example.com"), inner_signature); let mut unsigned = BTreeMap::new(); unsigned.insert("somekey".into(), json!({"a": 456})); let v1_pdu = RoomV1Pdu { room_id: room_id!("!n8f893n9:example.com"), event_id: event_id!("$somejoinevent:matrix.org"), sender: user_id!("@sender:example.com"), origin: "matrix.org".into(), origin_server_ts: SystemTime::UNIX_EPOCH + Duration::from_millis(1_592_050_773_658), kind: EventType::RoomPowerLevels, content: json!({"testing": 123}), state_key: Some("state".into()), prev_events: vec![( event_id!("$previousevent:matrix.org"), EventHash { sha256: "123567".into() }, )], depth: 2_u32.into(), auth_events: vec![( event_id!("$someauthevent:matrix.org"), EventHash { sha256: "21389CFEDABC".into() }, )], redacts: Some(event_id!("$9654:matrix.org")), unsigned, hashes: EventHash { sha256: "1233543bABACDEF".into() }, signatures, }; let pdu = Pdu::RoomV1Pdu(v1_pdu); let json = json!({ "room_id": "!n8f893n9:example.com", "event_id": "$somejoinevent:matrix.org", "sender": "@sender:example.com", "origin": "matrix.org", "origin_server_ts": 1_592_050_773_658usize, "type": "m.room.power_levels", "content": { "testing": 123 }, "state_key": "state", "prev_events": [ [ "$previousevent:matrix.org", {"sha256": "123567"} ] ], "depth": 2, "auth_events": [ ["$someauthevent:matrix.org", {"sha256": "21389CFEDABC"}] ], "redacts": "$9654:matrix.org", "unsigned": { "somekey": { "a": 456 } }, "hashes": { "sha256": "1233543bABACDEF" }, "signatures": { "example.com": { "ed25519:key_version":"86BytesOfSignatureOfTheRedactedEvent" } } }); assert_eq!(to_json_value(&pdu).unwrap(), json); } #[test] fn serialize_pdu_as_v3() { let mut signatures = BTreeMap::new(); let mut inner_signature = BTreeMap::new(); inner_signature.insert( server_signing_key_id!("ed25519:key_version"), "86BytesOfSignatureOfTheRedactedEvent".into(), ); signatures.insert(server_name!("example.com"), inner_signature); let mut unsigned = BTreeMap::new(); unsigned.insert("somekey".into(), json!({"a": 456})); let v3_pdu = RoomV3Pdu { room_id: room_id!("!n8f893n9:example.com"), sender: user_id!("@sender:example.com"), origin: "matrix.org".into(), origin_server_ts: SystemTime::UNIX_EPOCH + Duration::from_millis(1_592_050_773_658), kind: EventType::RoomPowerLevels, content: json!({"testing": 123}), state_key: Some("state".into()), prev_events: vec![event_id!("$previousevent:matrix.org")], depth: 2_u32.into(), auth_events: vec![event_id!("$someauthevent:matrix.org")], redacts: Some(event_id!("$9654:matrix.org")), unsigned, hashes: EventHash { sha256: "1233543bABACDEF".into() }, signatures, }; let pdu_stub = Pdu::RoomV3Pdu(v3_pdu); let json = json!({ "room_id": "!n8f893n9:example.com", "sender": "@sender:example.com", "origin": "matrix.org", "origin_server_ts": 1_592_050_773_658usize, "type": "m.room.power_levels", "content": { "testing": 123 }, "state_key": "state", "prev_events": [ "$previousevent:matrix.org" ], "depth": 2, "auth_events": ["$someauthevent:matrix.org" ], "redacts": "$9654:matrix.org", "unsigned": { "somekey": { "a": 456 } }, "hashes": { "sha256": "1233543bABACDEF" }, "signatures": { "example.com": { "ed25519:key_version":"86BytesOfSignatureOfTheRedactedEvent" } } }); assert_eq!(to_json_value(&pdu_stub).unwrap(), json); } #[test] fn deserialize_pdu_as_v1() { let json = json!({ "room_id": "!n8f893n9:example.com", "event_id": "$somejoinevent:matrix.org", "auth_events": [ [ "$abc123:matrix.org", { "sha256": "Base64EncodedSha256HashesShouldBe43BytesLong" } ] ], "content": { "key": "value" }, "depth": 12, "event_id": "$a4ecee13e2accdadf56c1025:example.com", "hashes": { "sha256": "ThisHashCoversAllFieldsInCaseThisIsRedacted" }, "origin": "matrix.org", "origin_server_ts": 1_234_567_890, "prev_events": [ [ "$abc123:matrix.org", { "sha256": "Base64EncodedSha256HashesShouldBe43BytesLong" } ] ], "redacts": "$def456:matrix.org", "room_id": "!abc123:matrix.org", "sender": "@someone:matrix.org", "signatures": { "example.com": { "ed25519:key_version": "86BytesOfSignatureOfTheRedactedEvent" } }, "state_key": "my_key", "type": "m.room.message", "unsigned": { "key": "value" } }); let parsed = from_json_value::<Pdu>(json).unwrap(); match parsed { Pdu::RoomV1Pdu(v1_pdu) => { assert_eq!(v1_pdu.auth_events.first().unwrap().0, event_id!("$abc123:matrix.org")); assert_eq!( v1_pdu.auth_events.first().unwrap().1.sha256, "Base64EncodedSha256HashesShouldBe43BytesLong" ); } Pdu::RoomV3Pdu(_) => panic!("Matched V3 PDU"), } } #[cfg(not(feature = "unstable-pre-spec"))] #[test] fn deserialize_pdu_as_v3() { let json = json!({ "room_id": "!n8f893n9:example.com", "auth_events": [ "$abc123:matrix.org" ], "content": { "key": "value" }, "depth": 12, "event_id": "$a4ecee13e2accdadf56c1025:example.com", "hashes": { "sha256": "ThisHashCoversAllFieldsInCaseThisIsRedacted" }, "origin": "matrix.org", "origin_server_ts": 1_234_567_890, "prev_events": [ "$abc123:matrix.org" ], "redacts": "$def456:matrix.org", "room_id": "!abc123:matrix.org", "sender": "@someone:matrix.org", "signatures": { "example.com": { "ed25519:key_version": "86BytesOfSignatureOfTheRedactedEvent" } }, "state_key": "my_key", "type": "m.room.message", "unsigned": { "key": "value" } }); let parsed = from_json_value::<Pdu>(json).unwrap(); match parsed { Pdu::RoomV1Pdu(_) => panic!("Matched V1 PDU"), Pdu::RoomV3Pdu(v3_pdu) => { assert_eq!(v3_pdu.auth_events.first().unwrap(), &event_id!("$abc123:matrix.org")); } } }
#![cfg(not(feature = "unstable-pre-spec"))] use std::{ collections::BTreeMap, time::{Duration, SystemTime}, }; use ruma_events::{ pdu::{EventHash, Pdu, RoomV1Pdu, RoomV3Pdu}, EventType, }; use ruma_identifiers::{event_id, room_id, server_name, server_signing_key_id, user_id}; use serde_json::{from_value as from_json_value, json, to_value as to_json_value}; #[test] fn serialize_pdu_as_v1() { let mut signatures = BTreeMap::new(); let mut inner_signature = BTreeMap::new(); inner_signature.insert( server_signing_key_id!("ed25519:key_version"), "86BytesOfSignatureOfTheRedactedEvent".into(), ); signatures.insert(server_name!("example.com"), inner_signature); let mut unsigned = BTreeMap::new(); unsigned.insert("somekey".into(), json!({"a": 456})); let v1_pdu = RoomV1Pdu { room_id: room_id!("!n8f893n9:example.com"), event_id: event_id!("$somejoinevent:matrix.org"), sender: user_id!("@sender:example.com"), origin: "matrix.org".into(), origin_server_ts: SystemTime::UNIX_EPOCH + Duration::from_millis(1_592_050_773_658), kind: EventType::RoomPowerLevels, content: json!({"testing": 123}), state_key: Some("state".into()), prev_events: vec![( event_id!("$previousevent:matrix.org"), EventHash { sha256: "123567".into() }, )], depth:
rg")), unsigned, hashes: EventHash { sha256: "1233543bABACDEF".into() }, signatures, }; let pdu_stub = Pdu::RoomV3Pdu(v3_pdu); let json = json!({ "room_id": "!n8f893n9:example.com", "sender": "@sender:example.com", "origin": "matrix.org", "origin_server_ts": 1_592_050_773_658usize, "type": "m.room.power_levels", "content": { "testing": 123 }, "state_key": "state", "prev_events": [ "$previousevent:matrix.org" ], "depth": 2, "auth_events": ["$someauthevent:matrix.org" ], "redacts": "$9654:matrix.org", "unsigned": { "somekey": { "a": 456 } }, "hashes": { "sha256": "1233543bABACDEF" }, "signatures": { "example.com": { "ed25519:key_version":"86BytesOfSignatureOfTheRedactedEvent" } } }); assert_eq!(to_json_value(&pdu_stub).unwrap(), json); } #[test] fn deserialize_pdu_as_v1() { let json = json!({ "room_id": "!n8f893n9:example.com", "event_id": "$somejoinevent:matrix.org", "auth_events": [ [ "$abc123:matrix.org", { "sha256": "Base64EncodedSha256HashesShouldBe43BytesLong" } ] ], "content": { "key": "value" }, "depth": 12, "event_id": "$a4ecee13e2accdadf56c1025:example.com", "hashes": { "sha256": "ThisHashCoversAllFieldsInCaseThisIsRedacted" }, "origin": "matrix.org", "origin_server_ts": 1_234_567_890, "prev_events": [ [ "$abc123:matrix.org", { "sha256": "Base64EncodedSha256HashesShouldBe43BytesLong" } ] ], "redacts": "$def456:matrix.org", "room_id": "!abc123:matrix.org", "sender": "@someone:matrix.org", "signatures": { "example.com": { "ed25519:key_version": "86BytesOfSignatureOfTheRedactedEvent" } }, "state_key": "my_key", "type": "m.room.message", "unsigned": { "key": "value" } }); let parsed = from_json_value::<Pdu>(json).unwrap(); match parsed { Pdu::RoomV1Pdu(v1_pdu) => { assert_eq!(v1_pdu.auth_events.first().unwrap().0, event_id!("$abc123:matrix.org")); assert_eq!( v1_pdu.auth_events.first().unwrap().1.sha256, "Base64EncodedSha256HashesShouldBe43BytesLong" ); } Pdu::RoomV3Pdu(_) => panic!("Matched V3 PDU"), } } #[cfg(not(feature = "unstable-pre-spec"))] #[test] fn deserialize_pdu_as_v3() { let json = json!({ "room_id": "!n8f893n9:example.com", "auth_events": [ "$abc123:matrix.org" ], "content": { "key": "value" }, "depth": 12, "event_id": "$a4ecee13e2accdadf56c1025:example.com", "hashes": { "sha256": "ThisHashCoversAllFieldsInCaseThisIsRedacted" }, "origin": "matrix.org", "origin_server_ts": 1_234_567_890, "prev_events": [ "$abc123:matrix.org" ], "redacts": "$def456:matrix.org", "room_id": "!abc123:matrix.org", "sender": "@someone:matrix.org", "signatures": { "example.com": { "ed25519:key_version": "86BytesOfSignatureOfTheRedactedEvent" } }, "state_key": "my_key", "type": "m.room.message", "unsigned": { "key": "value" } }); let parsed = from_json_value::<Pdu>(json).unwrap(); match parsed { Pdu::RoomV1Pdu(_) => panic!("Matched V1 PDU"), Pdu::RoomV3Pdu(v3_pdu) => { assert_eq!(v3_pdu.auth_events.first().unwrap(), &event_id!("$abc123:matrix.org")); } } }
2_u32.into(), auth_events: vec![( event_id!("$someauthevent:matrix.org"), EventHash { sha256: "21389CFEDABC".into() }, )], redacts: Some(event_id!("$9654:matrix.org")), unsigned, hashes: EventHash { sha256: "1233543bABACDEF".into() }, signatures, }; let pdu = Pdu::RoomV1Pdu(v1_pdu); let json = json!({ "room_id": "!n8f893n9:example.com", "event_id": "$somejoinevent:matrix.org", "sender": "@sender:example.com", "origin": "matrix.org", "origin_server_ts": 1_592_050_773_658usize, "type": "m.room.power_levels", "content": { "testing": 123 }, "state_key": "state", "prev_events": [ [ "$previousevent:matrix.org", {"sha256": "123567"} ] ], "depth": 2, "auth_events": [ ["$someauthevent:matrix.org", {"sha256": "21389CFEDABC"}] ], "redacts": "$9654:matrix.org", "unsigned": { "somekey": { "a": 456 } }, "hashes": { "sha256": "1233543bABACDEF" }, "signatures": { "example.com": { "ed25519:key_version":"86BytesOfSignatureOfTheRedactedEvent" } } }); assert_eq!(to_json_value(&pdu).unwrap(), json); } #[test] fn serialize_pdu_as_v3() { let mut signatures = BTreeMap::new(); let mut inner_signature = BTreeMap::new(); inner_signature.insert( server_signing_key_id!("ed25519:key_version"), "86BytesOfSignatureOfTheRedactedEvent".into(), ); signatures.insert(server_name!("example.com"), inner_signature); let mut unsigned = BTreeMap::new(); unsigned.insert("somekey".into(), json!({"a": 456})); let v3_pdu = RoomV3Pdu { room_id: room_id!("!n8f893n9:example.com"), sender: user_id!("@sender:example.com"), origin: "matrix.org".into(), origin_server_ts: SystemTime::UNIX_EPOCH + Duration::from_millis(1_592_050_773_658), kind: EventType::RoomPowerLevels, content: json!({"testing": 123}), state_key: Some("state".into()), prev_events: vec![event_id!("$previousevent:matrix.org")], depth: 2_u32.into(), auth_events: vec![event_id!("$someauthevent:matrix.org")], redacts: Some(event_id!("$9654:matrix.o
random
[ { "content": "fn aliases_event_with_prev_content() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"aliases\": [ \"#somewhere:localhost\" ]\n\n },\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"prev_content\": {\n\n \"aliases\": [ \"#inner:localhost\" ]\n\n },\n\n \"room_id\": \"!roomid:room.com\",\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"\",\n\n \"type\": \"m.room.aliases\"\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/state_event.rs", "rank": 1, "score": 200858.17620149627 }, { "content": "/// Creates a *content hash* for an event.\n\n///\n\n/// Returns the hash as a Base64-encoded string, using the standard character set, without padding.\n\n///\n\n/// The content hash of an event covers the complete event including the unredacted contents. It is\n\n/// used during federation and is described in the Matrix server-server specification.\n\n///\n\n/// # Parameters\n\n///\n\n/// object: A JSON object to generate a content hash for.\n\npub fn content_hash(object: &CanonicalJsonObject) -> String {\n\n let json = canonical_json_with_fields_to_remove(object, CONTENT_HASH_FIELDS_TO_REMOVE);\n\n let hash = digest(&SHA256, json.as_bytes());\n\n\n\n encode_config(&hash, STANDARD_NO_PAD)\n\n}\n\n\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 2, "score": 192379.72274481543 }, { "content": "fn has_prev_content_field(kind: &EventKind, var: &EventKindVariation) -> bool {\n\n matches!(kind, EventKind::State)\n\n && matches!(var, EventKindVariation::Full | EventKindVariation::Sync)\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_enum.rs", "rank": 6, "score": 181801.981291459 }, { "content": "fn full_unsigned() -> RedactedUnsigned {\n\n let mut unsigned = RedactedUnsigned::default();\n\n unsigned.redacted_because = Some(Box::new(RedactionEvent {\n\n content: RedactionEventContent { reason: Some(\"redacted because\".into()) },\n\n room_id: room_id!(\"!roomid:room.com\"),\n\n redacts: event_id!(\"$h29iv0s8:example.com\"),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: Unsigned::default(),\n\n }));\n\n\n\n unsigned\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 7, "score": 177627.82419998298 }, { "content": "fn sync_unsigned() -> RedactedSyncUnsigned {\n\n let mut unsigned = RedactedSyncUnsigned::default();\n\n // The presence of `redacted_because` triggers the event enum to return early\n\n // with `RedactedContent` instead of failing to deserialize according\n\n // to the event type string.\n\n unsigned.redacted_because = Some(Box::new(SyncRedactionEvent {\n\n content: RedactionEventContent { reason: Some(\"redacted because\".into()) },\n\n redacts: event_id!(\"$h29iv0s8:example.com\"),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: Unsigned::default(),\n\n }));\n\n\n\n unsigned\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 8, "score": 174828.02393712223 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.pass(\"tests/ui/01-content-sanity-check.rs\");\n\n t.compile_fail(\"tests/ui/02-no-event-type.rs\");\n\n t.compile_fail(\"tests/ui/03-invalid-event-type.rs\");\n\n}\n", "file_path": "ruma-events/tests/event_content.rs", "rank": 9, "score": 174590.8834782323 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.pass(\"tests/ui/07-enum-sanity-check.rs\");\n\n t.compile_fail(\"tests/ui/08-enum-invalid-path.rs\");\n\n}\n", "file_path": "ruma-events/tests/event_content_enum.rs", "rank": 10, "score": 171298.46379425906 }, { "content": "fn main() {}\n", "file_path": "ruma-events/tests/ui/06-no-content-field.rs", "rank": 11, "score": 171292.68160458223 }, { "content": "/// Uses a set of public keys to verify a signed JSON object.\n\n///\n\n/// # Parameters\n\n///\n\n/// * public_key_map: A map from entity identifiers to a map from key identifiers to public keys.\n\n/// Generally, entity identifiers are server names—the host/IP/port of a homeserver (e.g.\n\n/// \"example.com\") for which a signature must be verified. Key identifiers for each server (e.g.\n\n/// \"ed25519:1\") then map to their respective public keys.\n\n/// * object: The JSON object that was signed.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if verification fails.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use std::collections::BTreeMap;\n\n///\n\n/// const PUBLIC_KEY: &str = \"XGX0JRS2Af3be3knz2fBiRbApjm2Dh61gXDJA8kcJNI\";\n\n///\n\n/// // Deserialize the signed JSON.\n\n/// let object = serde_json::from_str(\n\n/// r#\"{\n\n/// \"signatures\": {\n\n/// \"domain\": {\n\n/// \"ed25519:1\": \"K8280/U9SSy9IVtjBuVeLr+HpOB4BQFWbg+UZaADMtTdGYI7Geitb76LTrr5QV/7Xg4ahLwYGYZzuHGZKM5ZAQ\"\n\n/// }\n\n/// }\n\n/// }\"#\n\n/// ).unwrap();\n\n///\n\n/// // Create the `PublicKeyMap` that will inform `verify_json` which signatures to verify.\n\n/// let mut public_key_set = BTreeMap::new();\n\n/// public_key_set.insert(\"ed25519:1\".into(), PUBLIC_KEY.to_string());\n\n/// let mut public_key_map = BTreeMap::new();\n\n/// public_key_map.insert(\"domain\".into(), public_key_set);\n\n///\n\n/// // Verify at least one signature for each entity in `public_key_map`.\n\n/// assert!(ruma_signatures::verify_json(&public_key_map, &object).is_ok());\n\n/// ```\n\npub fn verify_json(\n\n public_key_map: &PublicKeyMap,\n\n object: &CanonicalJsonObject,\n\n) -> Result<(), Error> {\n\n let signature_map = match object.get(\"signatures\") {\n\n Some(CanonicalJsonValue::Object(signatures)) => signatures.clone(),\n\n Some(_) => return Err(Error::new(\"field `signatures` must be a JSON object\")),\n\n None => return Err(Error::new(\"JSON object must contain a `signatures` field.\")),\n\n };\n\n\n\n for (entity_id, public_keys) in public_key_map {\n\n let signature_set = match signature_map.get(entity_id) {\n\n Some(CanonicalJsonValue::Object(set)) => set,\n\n Some(_) => return Err(Error::new(\"signature sets must be JSON objects\")),\n\n None => {\n\n return Err(Error::new(format!(\"no signatures found for entity `{}`\", entity_id)))\n\n }\n\n };\n\n\n\n let mut maybe_signature = None;\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 12, "score": 169455.4353290829 }, { "content": "/// Uses a public key to verify a signed JSON object.\n\n///\n\n/// # Parameters\n\n///\n\n/// * verifier: A `Verifier` appropriate for the digital signature algorithm that was used.\n\n/// * public_key: The raw bytes of the public key used to sign the JSON.\n\n/// * signature: The raw bytes of the signature.\n\n/// * object: The JSON object that was signed.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if verification fails.\n\nfn verify_json_with<V>(\n\n verifier: &V,\n\n public_key: &[u8],\n\n signature: &[u8],\n\n object: &CanonicalJsonObject,\n\n) -> Result<(), Error>\n\nwhere\n\n V: Verifier,\n\n{\n\n verifier.verify_json(public_key, signature, canonical_json(object).as_bytes())\n\n}\n\n\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 13, "score": 169449.44740042472 }, { "content": "fn redaction() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"reason\": \"being a turd\"\n\n },\n\n \"redacts\": \"$nomore:example.com\",\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"sender\": \"@carl:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"room_id\": \"!roomid:room.com\",\n\n \"type\": \"m.room.redaction\"\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/redaction.rs", "rank": 14, "score": 168934.21719219696 }, { "content": "fn main() {}\n", "file_path": "ruma-events/tests/ui/09-enum-invalid-kind.rs", "rank": 15, "score": 168222.56054802466 }, { "content": "#[test]\n\nfn deserialize_aliases_content() {\n\n let json_data = json!({\n\n \"aliases\": [ \"#somewhere:localhost\" ]\n\n });\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnyStateEventContent>>(json_data)\n\n .unwrap()\n\n .deserialize_content(\"m.room.aliases\")\n\n .unwrap(),\n\n AnyStateEventContent::RoomAliases(content)\n\n if content.aliases == vec![room_alias_id!(\"#somewhere:localhost\")]\n\n );\n\n}\n\n\n", "file_path": "ruma-events/tests/state_event.rs", "rank": 16, "score": 168151.9611242526 }, { "content": "#[test]\n\nfn content_type_override() {\n\n let res = Response { stuff: \"magic\".into() };\n\n let mut http_res = http::Response::<Vec<u8>>::try_from(res).unwrap();\n\n\n\n // Test that we correctly replaced the default content type,\n\n // not adding another content-type header.\n\n assert_eq!(\n\n match http_res.headers_mut().entry(CONTENT_TYPE) {\n\n Entry::Occupied(occ) => occ.iter().count(),\n\n _ => 0,\n\n },\n\n 1\n\n );\n\n assert_eq!(http_res.headers().get(\"content-type\").unwrap(), \"magic\");\n\n}\n", "file_path": "ruma-api/tests/header_override.rs", "rank": 17, "score": 168151.9611242526 }, { "content": "fn main() {}\n", "file_path": "ruma-events/tests/ui/01-content-sanity-check.rs", "rank": 18, "score": 168146.17893457576 }, { "content": "#[test]\n\nfn request_serde() -> Result<(), Box<dyn std::error::Error + 'static>> {\n\n let req = Request {\n\n hello: \"hi\".to_owned(),\n\n world: \"test\".to_owned(),\n\n q1: \"query_param_special_chars %/&@!\".to_owned(),\n\n q2: 55,\n\n bar: \"barVal\".to_owned(),\n\n baz: user_id!(\"@bazme:ruma.io\"),\n\n };\n\n\n\n let http_req = req.clone().try_into_http_request(\"https://homeserver.tld\", None)?;\n\n let req2 = Request::try_from_http_request(http_req)?;\n\n\n\n assert_eq!(req.hello, req2.hello);\n\n assert_eq!(req.world, req2.world);\n\n assert_eq!(req.q1, req2.q1);\n\n assert_eq!(req.q2, req2.q2);\n\n assert_eq!(req.bar, req2.bar);\n\n assert_eq!(req.baz, req2.baz);\n\n\n\n Ok(())\n\n}\n", "file_path": "ruma-api/tests/conversions.rs", "rank": 19, "score": 166139.03505973064 }, { "content": "fn aliases_event() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"aliases\": [\"#somewhere:localhost\"]\n\n },\n\n \"event_id\": \"$152037280074GZeOm:localhost\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@example:localhost\",\n\n \"state_key\": \"\",\n\n \"room_id\": \"!room:room.com\",\n\n \"type\": \"m.room.aliases\",\n\n \"unsigned\": {\n\n \"age\": 1\n\n }\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/enums.rs", "rank": 20, "score": 165785.48509632895 }, { "content": "fn message_event() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"body\": \"baba\",\n\n \"format\": \"org.matrix.custom.html\",\n\n \"formatted_body\": \"<strong>baba</strong>\",\n\n \"msgtype\": \"m.text\"\n\n },\n\n \"event_id\": \"$152037280074GZeOm:localhost\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@example:localhost\",\n\n \"room_id\": \"!room:room.com\",\n\n \"type\": \"m.room.message\",\n\n \"unsigned\": {\n\n \"age\": 1\n\n }\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/enums.rs", "rank": 21, "score": 165785.48509632895 }, { "content": "#[test]\n\nfn serialize_stripped_state_event_any_content() {\n\n let event = StrippedStateEvent {\n\n content: AnyStateEventContent::RoomTopic(TopicEventContent {\n\n topic: \"Testing room\".into(),\n\n }),\n\n state_key: \"\".into(),\n\n sender: user_id!(\"@example:localhost\"),\n\n };\n\n\n\n let json_data = json!({\n\n \"content\": {\n\n \"topic\": \"Testing room\"\n\n },\n\n \"type\": \"m.room.topic\",\n\n \"state_key\": \"\",\n\n \"sender\": \"@example:localhost\"\n\n });\n\n\n\n assert_eq!(to_json_value(&event).unwrap(), json_data);\n\n}\n\n\n", "file_path": "ruma-events/tests/stripped.rs", "rank": 22, "score": 165141.8854018265 }, { "content": "#[test]\n\nfn redacted_aliases_event_serialize_with_content() {\n\n let redacted = RedactedSyncStateEvent {\n\n content: RedactedAliasesEventContent { aliases: Some(vec![]) },\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n state_key: \"\".to_string(),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: RedactedSyncUnsigned::default(),\n\n };\n\n\n\n let expected = json!({\n\n \"content\": {\n\n \"aliases\": []\n\n },\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"state_key\": \"\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"type\": \"m.room.aliases\"\n\n });\n\n\n\n let actual = to_json_value(&redacted).unwrap();\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 23, "score": 165141.8854018265 }, { "content": "#[test]\n\nfn redacted_aliases_event_serialize_no_content() {\n\n let redacted = RedactedSyncStateEvent {\n\n content: RedactedAliasesEventContent { aliases: None },\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n state_key: \"\".into(),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: RedactedSyncUnsigned::default(),\n\n };\n\n\n\n let expected = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"state_key\": \"\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"type\": \"m.room.aliases\"\n\n });\n\n\n\n let actual = to_json_value(&redacted).unwrap();\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 24, "score": 165141.8854018265 }, { "content": "#[test]\n\nfn deserialize_aliases_with_prev_content() {\n\n let json_data = aliases_event_with_prev_content();\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<StateEvent<AnyStateEventContent>>>(json_data)\n\n .unwrap()\n\n .deserialize()\n\n .unwrap(),\n\n StateEvent {\n\n content: AnyStateEventContent::RoomAliases(content),\n\n event_id,\n\n origin_server_ts,\n\n prev_content: Some(AnyStateEventContent::RoomAliases(prev_content)),\n\n room_id,\n\n sender,\n\n state_key,\n\n unsigned,\n\n } if content.aliases == vec![room_alias_id!(\"#somewhere:localhost\")]\n\n && event_id == event_id!(\"$h29iv0s8:example.com\")\n\n && origin_server_ts == UNIX_EPOCH + Duration::from_millis(1)\n\n && prev_content.aliases == vec![room_alias_id!(\"#inner:localhost\")]\n\n && room_id == room_id!(\"!roomid:room.com\")\n\n && sender == user_id!(\"@carl:example.com\")\n\n && state_key.is_empty()\n\n && unsigned.is_empty()\n\n );\n\n}\n\n\n", "file_path": "ruma-events/tests/state_event.rs", "rank": 25, "score": 165141.8854018265 }, { "content": "#[test]\n\nfn serialize_aliases_with_prev_content() {\n\n let aliases_event = StateEvent {\n\n content: AnyStateEventContent::RoomAliases(AliasesEventContent::new(vec![room_alias_id!(\n\n \"#somewhere:localhost\"\n\n )])),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n prev_content: Some(AnyStateEventContent::RoomAliases(AliasesEventContent::new(vec![\n\n room_alias_id!(\"#inner:localhost\"),\n\n ]))),\n\n room_id: room_id!(\"!roomid:room.com\"),\n\n sender: user_id!(\"@carl:example.com\"),\n\n state_key: \"\".into(),\n\n unsigned: Unsigned::default(),\n\n };\n\n\n\n let actual = to_json_value(&aliases_event).unwrap();\n\n let expected = aliases_event_with_prev_content();\n\n\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/state_event.rs", "rank": 26, "score": 165141.8854018265 }, { "content": "/// Signs an arbitrary JSON object and adds the signature to an object under the key `signatures`.\n\n///\n\n/// If `signatures` is already present, the new signature will be appended to the existing ones.\n\n///\n\n/// # Parameters\n\n///\n\n/// * entity_id: The identifier of the entity creating the signature. Generally this means a\n\n/// homeserver, e.g. \"example.com\".\n\n/// * key_pair: A cryptographic key pair used to sign the JSON.\n\n/// * object: A JSON object to sign according and append a signature to.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if:\n\n///\n\n/// * `object` contains a field called `signatures` that is not a JSON object.\n\n///\n\n/// # Examples\n\n///\n\n/// A homeserver signs JSON with a key pair:\n\n///\n\n/// ```rust\n\n/// const PKCS8: &str = \"\\\n\n/// MFMCAQEwBQYDK2VwBCIEINjozvdfbsGEt6DD+7Uf4PiJ/YvTNXV2mIPc/\\\n\n/// tA0T+6toSMDIQDdM+tpNzNWQM9NFpfgr4B9S7LHszOrVRp9NfKmeXS3aQ\\\n\n/// \";\n\n///\n\n/// let document = base64::decode_config(&PKCS8, base64::STANDARD_NO_PAD).unwrap();\n\n///\n\n/// // Create an Ed25519 key pair.\n\n/// let key_pair = ruma_signatures::Ed25519KeyPair::new(\n\n/// &document,\n\n/// \"1\".into(), // The \"version\" of the key.\n\n/// ).unwrap();\n\n///\n\n/// // Deserialize some JSON.\n\n/// let mut value = serde_json::from_str(\"{}\").unwrap();\n\n///\n\n/// // Sign the JSON with the key pair.\n\n/// assert!(ruma_signatures::sign_json(\"domain\", &key_pair, &mut value).is_ok());\n\n/// ```\n\n///\n\n/// This will modify the JSON from an empty object to a structure like this:\n\n///\n\n/// ```json\n\n/// {\n\n/// \"signatures\": {\n\n/// \"domain\": {\n\n/// \"ed25519:1\": \"K8280/U9SSy9IVtjBuVeLr+HpOB4BQFWbg+UZaADMtTdGYI7Geitb76LTrr5QV/7Xg4ahLwYGYZzuHGZKM5ZAQ\"\n\n/// }\n\n/// }\n\n/// }\n\n/// ```\n\npub fn sign_json<K>(\n\n entity_id: &str,\n\n key_pair: &K,\n\n object: &mut CanonicalJsonObject,\n\n) -> Result<(), Error>\n\nwhere\n\n K: KeyPair,\n\n{\n\n let mut signature_map;\n\n let maybe_unsigned;\n\n\n\n // FIXME: Once MSRV >= 1.45.0, use remove_key and don't allocate new `String`s below.\n\n signature_map = match object.remove(\"signatures\") {\n\n Some(CanonicalJsonValue::Object(signatures)) => signatures,\n\n Some(_) => return Err(Error::new(\"field `signatures` must be a JSON object\")),\n\n None => BTreeMap::new(),\n\n };\n\n\n\n maybe_unsigned = object.remove(\"unsigned\");\n\n\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 27, "score": 164215.21258520332 }, { "content": "fn custom_state_event() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"m.relates_to\": {\n\n \"event_id\": \"$MDitXXXXXX\",\n\n \"key\": \"👍\",\n\n \"rel_type\": \"m.annotation\"\n\n }\n\n },\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 10,\n\n \"room_id\": \"!room:room.com\",\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"\",\n\n \"type\": \"m.reaction\",\n\n \"unsigned\": {\n\n \"age\": 85\n\n }\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/custom.rs", "rank": 28, "score": 162773.27661211474 }, { "content": "fn message_event_sync() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"body\": \"baba\",\n\n \"format\": \"org.matrix.custom.html\",\n\n \"formatted_body\": \"<strong>baba</strong>\",\n\n \"msgtype\": \"m.text\"\n\n },\n\n \"event_id\": \"$152037280074GZeOm:localhost\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@example:localhost\",\n\n \"type\": \"m.room.message\",\n\n \"unsigned\": {\n\n \"age\": 1\n\n }\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/enums.rs", "rank": 29, "score": 162773.27661211474 }, { "content": "fn aliases_event_sync() -> JsonValue {\n\n json!({\n\n \"content\": {\n\n \"aliases\": [\"#somewhere:localhost\"]\n\n },\n\n \"event_id\": \"$152037280074GZeOm:localhost\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@example:localhost\",\n\n \"state_key\": \"\",\n\n \"type\": \"m.room.aliases\",\n\n \"unsigned\": {\n\n \"age\": 1\n\n }\n\n })\n\n}\n\n\n", "file_path": "ruma-events/tests/enums.rs", "rank": 30, "score": 162773.27661211474 }, { "content": "#[test]\n\nfn deserialize_message_call_answer_content() {\n\n let json_data = json!({\n\n \"answer\": {\n\n \"type\": \"answer\",\n\n \"sdp\": \"Hello\"\n\n },\n\n \"call_id\": \"foofoo\",\n\n \"version\": 1\n\n });\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnyMessageEventContent>>(json_data)\n\n .unwrap()\n\n .deserialize_content(\"m.call.answer\")\n\n .unwrap(),\n\n AnyMessageEventContent::CallAnswer(AnswerEventContent {\n\n answer: SessionDescription {\n\n session_type: SessionDescriptionType::Answer,\n\n sdp,\n\n ..\n\n },\n\n call_id,\n\n version,\n\n ..\n\n }) if sdp == \"Hello\" && call_id == \"foofoo\" && version == UInt::new(1).unwrap()\n\n );\n\n}\n\n\n", "file_path": "ruma-events/tests/message_event.rs", "rank": 31, "score": 162259.55204433316 }, { "content": "#[test]\n\nfn serialize_aliases_without_prev_content() {\n\n let aliases_event = StateEvent {\n\n content: AnyStateEventContent::RoomAliases(AliasesEventContent::new(vec![room_alias_id!(\n\n \"#somewhere:localhost\"\n\n )])),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n prev_content: None,\n\n room_id: room_id!(\"!roomid:room.com\"),\n\n sender: user_id!(\"@carl:example.com\"),\n\n state_key: \"\".into(),\n\n unsigned: Unsigned::default(),\n\n };\n\n\n\n let actual = to_json_value(&aliases_event).unwrap();\n\n let expected = json!({\n\n \"content\": {\n\n \"aliases\": [ \"#somewhere:localhost\" ]\n\n },\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"room_id\": \"!roomid:room.com\",\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"\",\n\n \"type\": \"m.room.aliases\",\n\n });\n\n\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/state_event.rs", "rank": 32, "score": 162259.55204433316 }, { "content": "#[test]\n\nfn deserialize_avatar_without_prev_content() {\n\n let json_data = json!({\n\n \"content\": {\n\n \"info\": {\n\n \"h\": 423,\n\n \"mimetype\": \"image/png\",\n\n \"size\": 84242,\n\n \"thumbnail_info\": {\n\n \"h\": 334,\n\n \"mimetype\": \"image/png\",\n\n \"size\": 82595,\n\n \"w\": 800\n\n },\n\n \"thumbnail_url\": \"mxc://matrix.org\",\n\n \"w\": 1011\n\n },\n\n \"url\": \"http://www.matrix.org\"\n\n },\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n", "file_path": "ruma-events/tests/state_event.rs", "rank": 33, "score": 162259.55204433316 }, { "content": "/// Converts an event into the [canonical] string form.\n\n///\n\n/// [canonical]: https://matrix.org/docs/spec/appendices#canonical-json\n\n///\n\n/// # Parameters\n\n///\n\n/// * object: The JSON object to convert.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// let input =\n\n/// r#\"{\n\n/// \"本\": 2,\n\n/// \"日\": 1\n\n/// }\"#;\n\n///\n\n/// let object = serde_json::from_str(input).unwrap();\n\n/// let canonical = ruma_signatures::canonical_json(&object);\n\n///\n\n/// assert_eq!(canonical, r#\"{\"日\":1,\"本\":2}\"#);\n\n/// ```\n\npub fn canonical_json(object: &CanonicalJsonObject) -> String {\n\n canonical_json_with_fields_to_remove(object, CANONICAL_JSON_FIELDS_TO_REMOVE)\n\n}\n\n\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 34, "score": 162000.9662281657 }, { "content": "/// Internal implementation detail of the canonical JSON algorithm. Allows customization of the\n\n/// fields that will be removed before serializing.\n\nfn canonical_json_with_fields_to_remove(object: &CanonicalJsonObject, fields: &[&str]) -> String {\n\n let mut owned_object = object.clone();\n\n\n\n for field in fields {\n\n owned_object.remove(*field);\n\n }\n\n\n\n to_canonical_json_string(&owned_object).expect(\"JSON object serialization to succeed\")\n\n}\n\n\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 35, "score": 153642.15078448193 }, { "content": "pub fn serde_json_eq<T>(de: T, se: serde_json::Value)\n\nwhere\n\n T: Clone + Debug + PartialEq + Serialize + DeserializeOwned,\n\n{\n\n assert_eq!(se, serde_json::to_value(de.clone()).unwrap());\n\n assert_eq!(de, serde_json::from_value(se).unwrap());\n\n}\n", "file_path": "ruma-serde/src/test.rs", "rank": 36, "score": 150972.7973364436 }, { "content": "#[cfg(feature = \"criterion\")]\n\nfn deserialize_any_event(c: &mut Criterion) {\n\n let json_data = power_levels();\n\n\n\n c.bench_function(\"deserialize to `AnyEvent`\", |b| {\n\n b.iter(|| {\n\n let _ = serde_json::from_value::<Raw<AnyEvent>>(json_data.clone())\n\n .unwrap()\n\n .deserialize()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "ruma-events/benches/event_deserialize.rs", "rank": 37, "score": 143472.21168469047 }, { "content": "fn is_non_stripped_room_event(kind: &EventKind, var: &EventKindVariation) -> bool {\n\n matches!(kind, EventKind::Message | EventKind::State)\n\n && matches!(\n\n var,\n\n EventKindVariation::Full\n\n | EventKindVariation::Sync\n\n | EventKindVariation::Redacted\n\n | EventKindVariation::RedactedSync\n\n )\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_enum.rs", "rank": 38, "score": 141684.3694849368 }, { "content": "#[cfg(feature = \"criterion\")]\n\nfn deserialize_specific_event(c: &mut Criterion) {\n\n let json_data = power_levels();\n\n\n\n c.bench_function(\"deserialize to `StateEvent<PowerLevelsEventContent>`\", |b| {\n\n b.iter(|| {\n\n let _ = serde_json::from_value::<Raw<StateEvent<PowerLevelsEventContent>>>(\n\n json_data.clone(),\n\n )\n\n .unwrap()\n\n .deserialize()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n\n#[cfg(feature = \"criterion\")]\n\ncriterion_group!(\n\n benches,\n\n deserialize_any_event,\n\n deserialize_any_room_event,\n\n deserialize_any_state_event,\n\n deserialize_specific_event\n\n);\n\n\n\n#[cfg(feature = \"criterion\")]\n\ncriterion_main!(benches);\n\n\n", "file_path": "ruma-events/benches/event_deserialize.rs", "rank": 39, "score": 141544.9250923335 }, { "content": "#[cfg(feature = \"criterion\")]\n\nfn deserialize_any_state_event(c: &mut Criterion) {\n\n let json_data = power_levels();\n\n\n\n c.bench_function(\"deserialize to `AnyStateEvent`\", |b| {\n\n b.iter(|| {\n\n let _ = serde_json::from_value::<Raw<AnyStateEvent>>(json_data.clone())\n\n .unwrap()\n\n .deserialize()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "ruma-events/benches/event_deserialize.rs", "rank": 40, "score": 141544.9250923335 }, { "content": "#[cfg(feature = \"criterion\")]\n\nfn deserialize_any_room_event(c: &mut Criterion) {\n\n let json_data = power_levels();\n\n\n\n c.bench_function(\"deserialize to `AnyRoomEvent`\", |b| {\n\n b.iter(|| {\n\n let _ = serde_json::from_value::<Raw<AnyRoomEvent>>(json_data.clone())\n\n .unwrap()\n\n .deserialize()\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "ruma-events/benches/event_deserialize.rs", "rank": 41, "score": 141544.9250923335 }, { "content": "fn marker_traits(kind: &EventKind, ruma_events: &TokenStream) -> TokenStream {\n\n let ident = kind.to_content_enum();\n\n match kind {\n\n EventKind::State => quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::RoomEventContent for #ident {}\n\n #[automatically_derived]\n\n impl #ruma_events::StateEventContent for #ident {}\n\n },\n\n EventKind::Message => quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::RoomEventContent for #ident {}\n\n #[automatically_derived]\n\n impl #ruma_events::MessageEventContent for #ident {}\n\n },\n\n EventKind::Ephemeral => quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::EphemeralRoomEventContent for #ident {}\n\n },\n\n EventKind::Basic => quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::BasicEventContent for #ident {}\n\n },\n\n _ => TokenStream::new(),\n\n }\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_enum.rs", "rank": 42, "score": 136405.70086935844 }, { "content": "fn default_ver() -> Vec<RoomVersionId> {\n\n vec![RoomVersionId::Version1]\n\n}\n\n\n", "file_path": "ruma-federation-api/src/membership/create_join_event_template/v1.rs", "rank": 43, "score": 135656.17641662277 }, { "content": "fn generate_event_idents(kind: &EventKind, var: &EventKindVariation) -> Option<(Ident, Ident)> {\n\n Some((kind.to_event_ident(var)?, kind.to_event_enum_ident(var)?))\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_enum.rs", "rank": 44, "score": 135343.88972854996 }, { "content": "fn strip_lifetimes(field_type: &mut Type) -> bool {\n\n match field_type {\n\n // T<'a> -> IncomingT\n\n // The IncomingT has to be declared by the user of this derive macro.\n\n Type::Path(TypePath { path, .. }) => {\n\n let mut has_lifetimes = false;\n\n let mut is_lifetime_generic = false;\n\n\n\n for seg in &mut path.segments {\n\n // strip generic lifetimes\n\n match &mut seg.arguments {\n\n PathArguments::AngleBracketed(AngleBracketedGenericArguments {\n\n args, ..\n\n }) => {\n\n *args = args\n\n .clone()\n\n .into_iter()\n\n .map(|mut ty| {\n\n if let GenericArgument::Type(ty) = &mut ty {\n\n if strip_lifetimes(ty) {\n", "file_path": "ruma-serde-macros/src/outgoing.rs", "rank": 45, "score": 134396.07643723275 }, { "content": "fn allowed_content_keys_for(event_type: &str, version: &RoomVersionId) -> &'static [&'static str] {\n\n match event_type {\n\n \"m.room.member\" => &[\"membership\"],\n\n \"m.room.create\" => &[\"creator\"],\n\n \"m.room.join_rules\" => &[\"join_rule\"],\n\n \"m.room.power_levels\" => &[\n\n \"ban\",\n\n \"events\",\n\n \"events_default\",\n\n \"kick\",\n\n \"redact\",\n\n \"state_default\",\n\n \"users\",\n\n \"users_default\",\n\n ],\n\n \"m.room.aliases\" => match version {\n\n RoomVersionId::Version1\n\n | RoomVersionId::Version2\n\n | RoomVersionId::Version3\n\n | RoomVersionId::Version4\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 46, "score": 134016.3458180539 }, { "content": "type EventKindFn = fn(&EventKind, &EventKindVariation) -> bool;\n\n\n\n/// This const is used to generate the accessor methods for the `Any*Event` enums.\n\n///\n\n/// DO NOT alter the field names unless the structs in `ruma_events::event_kinds` have changed.\n\nconst EVENT_FIELDS: &[(&str, EventKindFn)] = &[\n\n (\"origin_server_ts\", is_non_stripped_room_event),\n\n (\"room_id\", |kind, var| {\n\n matches!(kind, EventKind::Message | EventKind::State)\n\n && matches!(var, EventKindVariation::Full | EventKindVariation::Redacted)\n\n }),\n\n (\"event_id\", is_non_stripped_room_event),\n\n (\"sender\", |kind, &var| {\n\n matches!(kind, EventKind::Message | EventKind::State | EventKind::ToDevice)\n\n && var != EventKindVariation::Initial\n\n }),\n\n (\"state_key\", |kind, _| matches!(kind, EventKind::State)),\n\n (\"unsigned\", is_non_stripped_room_event),\n\n];\n\n\n", "file_path": "ruma-events-macros/src/event_enum.rs", "rank": 47, "score": 132289.84937352833 }, { "content": "fn inner_enum_idents(kind: &EventKind, var: &EventKindVariation) -> (Option<Ident>, Option<Ident>) {\n\n match var {\n\n EventKindVariation::Full => {\n\n (kind.to_event_enum_ident(var), kind.to_event_enum_ident(&EventKindVariation::Redacted))\n\n }\n\n EventKindVariation::Sync => (\n\n kind.to_event_enum_ident(var),\n\n kind.to_event_enum_ident(&EventKindVariation::RedactedSync),\n\n ),\n\n EventKindVariation::Stripped => (\n\n kind.to_event_enum_ident(var),\n\n kind.to_event_enum_ident(&EventKindVariation::RedactedStripped),\n\n ),\n\n EventKindVariation::Initial => (kind.to_event_enum_ident(var), None),\n\n _ => (None, None),\n\n }\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_enum.rs", "rank": 48, "score": 132015.90033991676 }, { "content": "fn generate_event_content_impl(\n\n ident: &Ident,\n\n event_type: &LitStr,\n\n ruma_events: &TokenStream,\n\n) -> TokenStream {\n\n let serde = quote! { #ruma_events::exports::serde };\n\n let serde_json = quote! { #ruma_events::exports::serde_json };\n\n\n\n quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::EventContent for #ident {\n\n fn event_type(&self) -> &str {\n\n #event_type\n\n }\n\n\n\n fn from_parts(\n\n ev_type: &str,\n\n content: Box<#serde_json::value::RawValue>\n\n ) -> Result<Self, #serde_json::Error> {\n\n if ev_type != #event_type {\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 49, "score": 130146.75400366464 }, { "content": "/// Fallible conversion from a `serde_json::Map` to a `CanonicalJsonObject`.\n\npub fn try_from_json_map(\n\n json: JsonObject<String, JsonValue>,\n\n) -> Result<CanonicalJsonObject, Error> {\n\n json.into_iter().map(|(k, v)| Ok((k, v.try_into()?))).collect()\n\n}\n\n\n", "file_path": "ruma-serde/src/canonical_json.rs", "rank": 50, "score": 129546.99047651731 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n // rustc overflows when compiling this see:\n\n // https://github.com/rust-lang/rust/issues/55779\n\n // there is a workaround in the file.\n\n t.pass(\"tests/ui/04-event-sanity-check.rs\");\n\n t.compile_fail(\"tests/ui/05-named-fields.rs\");\n\n t.compile_fail(\"tests/ui/06-no-content-field.rs\");\n\n}\n", "file_path": "ruma-events/tests/event.rs", "rank": 51, "score": 128077.01890178457 }, { "content": "#[test]\n\nfn serialization() {\n\n let ev = ToDeviceEvent {\n\n sender: user_id!(\"@example:example.org\"),\n\n content: AnyToDeviceEventContent::RoomKey(RoomKeyEventContent {\n\n algorithm: EventEncryptionAlgorithm::MegolmV1AesSha2,\n\n room_id: room_id!(\"!testroomid:example.org\"),\n\n session_id: \"SessId\".into(),\n\n session_key: \"SessKey\".into(),\n\n }),\n\n };\n\n\n\n assert_eq!(\n\n to_json_value(ev).unwrap(),\n\n json!({\n\n \"type\": \"m.room_key\",\n\n \"sender\": \"@example:example.org\",\n\n \"content\": {\n\n \"algorithm\": \"m.megolm.v1.aes-sha2\",\n\n \"room_id\": \"!testroomid:example.org\",\n\n \"session_id\": \"SessId\",\n\n \"session_key\": \"SessKey\",\n\n },\n\n })\n\n );\n\n}\n", "file_path": "ruma-events/tests/to_device.rs", "rank": 52, "score": 128077.01890178457 }, { "content": "/// Create an `EventContent` implementation for a struct.\n\npub fn expand_event_content(\n\n input: &DeriveInput,\n\n emit_redacted: bool,\n\n ruma_events: &TokenStream,\n\n) -> syn::Result<TokenStream> {\n\n let ruma_identifiers = quote! { #ruma_events::exports::ruma_identifiers };\n\n let serde = quote! { #ruma_events::exports::serde };\n\n let serde_json = quote! { #ruma_events::exports::serde_json };\n\n\n\n let ident = &input.ident;\n\n\n\n let content_attr = input\n\n .attrs\n\n .iter()\n\n .filter(|attr| attr.path.is_ident(\"ruma_event\"))\n\n .map(|attr| attr.parse_args::<MetaAttrs>())\n\n .collect::<syn::Result<Vec<_>>>()?;\n\n\n\n let event_type = content_attr.iter().find_map(|a| a.get_event_type()).ok_or_else(|| {\n\n let msg = \"no event type attribute found, \\\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 53, "score": 127693.20322087861 }, { "content": "/// Create a `StateEventContent` implementation for a struct\n\npub fn expand_state_event_content(\n\n input: &DeriveInput,\n\n ruma_events: &TokenStream,\n\n) -> syn::Result<TokenStream> {\n\n let ident = input.ident.clone();\n\n let room_ev_content = expand_room_event_content(input, ruma_events)?;\n\n\n\n let redacted_marker_trait = if needs_redacted_from_input(input) {\n\n let ident = format_ident!(\"Redacted{}\", input.ident);\n\n quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::RedactedStateEventContent for #ident {}\n\n }\n\n } else {\n\n TokenStream::new()\n\n };\n\n\n\n Ok(quote! {\n\n #room_ev_content\n\n\n\n #[automatically_derived]\n\n impl #ruma_events::StateEventContent for #ident {}\n\n\n\n #redacted_marker_trait\n\n })\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 54, "score": 126021.88634819789 }, { "content": "/// Create a `RoomEventContent` implementation for a struct.\n\npub fn expand_room_event_content(\n\n input: &DeriveInput,\n\n ruma_events: &TokenStream,\n\n) -> syn::Result<TokenStream> {\n\n let ident = input.ident.clone();\n\n let event_content_impl = expand_event_content(input, true, ruma_events)?;\n\n\n\n Ok(quote! {\n\n #event_content_impl\n\n\n\n #[automatically_derived]\n\n impl #ruma_events::RoomEventContent for #ident {}\n\n })\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 55, "score": 126021.88634819789 }, { "content": "/// Create a `MessageEventContent` implementation for a struct\n\npub fn expand_message_event_content(\n\n input: &DeriveInput,\n\n ruma_events: &TokenStream,\n\n) -> syn::Result<TokenStream> {\n\n let ident = input.ident.clone();\n\n let room_ev_content = expand_room_event_content(input, ruma_events)?;\n\n\n\n let redacted_marker_trait = if needs_redacted_from_input(input) {\n\n let ident = format_ident!(\"Redacted{}\", &ident);\n\n quote! {\n\n #[automatically_derived]\n\n impl #ruma_events::RedactedMessageEventContent for #ident {}\n\n }\n\n } else {\n\n TokenStream::new()\n\n };\n\n\n\n Ok(quote! {\n\n #room_ev_content\n\n\n\n #[automatically_derived]\n\n impl #ruma_events::MessageEventContent for #ident {}\n\n\n\n #redacted_marker_trait\n\n })\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 56, "score": 126021.88634819789 }, { "content": "/// Create a `BasicEventContent` implementation for a struct\n\npub fn expand_basic_event_content(\n\n input: &DeriveInput,\n\n ruma_events: &TokenStream,\n\n) -> syn::Result<TokenStream> {\n\n let ident = input.ident.clone();\n\n let event_content_impl = expand_event_content(input, false, ruma_events)?;\n\n\n\n Ok(quote! {\n\n #event_content_impl\n\n\n\n #[automatically_derived]\n\n impl #ruma_events::BasicEventContent for #ident {}\n\n })\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 57, "score": 126021.88634819789 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.pass(\"tests/ui/01-valid-id-macros.rs\");\n\n t.compile_fail(\"tests/ui/02-invalid-id-macros.rs\");\n\n}\n", "file_path": "ruma-identifiers/tests/id-macros.rs", "rank": 58, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn display() {\n\n assert_eq!(MyEnum::First.to_string(), \"first\");\n\n assert_eq!(MyEnum::Second.to_string(), \"second\");\n\n assert_eq!(MyEnum::Third.to_string(), \"m.third\");\n\n assert_eq!(MyEnum::HelloWorld.to_string(), \"hello_world\");\n\n assert_eq!(MyEnum::_Custom(\"HelloWorld\".into()).to_string(), \"HelloWorld\");\n\n}\n\n\n", "file_path": "ruma-serde/tests/enum_derive.rs", "rank": 59, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn from_string() {\n\n assert_eq!(MyEnum::from(\"first\"), MyEnum::First);\n\n assert_eq!(MyEnum::from(\"second\"), MyEnum::Second);\n\n assert_eq!(MyEnum::from(\"m.third\"), MyEnum::Third);\n\n assert_eq!(MyEnum::from(\"hello_world\"), MyEnum::HelloWorld);\n\n assert_eq!(MyEnum::from(\"HelloWorld\"), MyEnum::_Custom(\"HelloWorld\".into()));\n\n}\n\n\n", "file_path": "ruma-serde/tests/enum_derive.rs", "rank": 60, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn deserialize() {\n\n assert_eq!(from_json_value::<MyEnum>(json!(\"first\")).unwrap(), MyEnum::First);\n\n assert_eq!(from_json_value::<MyEnum>(json!(\"hello_world\")).unwrap(), MyEnum::HelloWorld);\n\n assert_eq!(\n\n from_json_value::<MyEnum>(json!(\"\\\\\\n\\\\\")).unwrap(),\n\n MyEnum::_Custom(\"\\\\\\n\\\\\".into())\n\n );\n\n}\n", "file_path": "ruma-serde/tests/enum_derive.rs", "rank": 61, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn serialize() {\n\n assert_eq!(to_json_value(MyEnum::First).unwrap(), json!(\"first\"));\n\n assert_eq!(to_json_value(MyEnum::HelloWorld).unwrap(), json!(\"hello_world\"));\n\n assert_eq!(to_json_value(MyEnum::_Custom(\"\\\\\\n\\\\\".into())).unwrap(), json!(\"\\\\\\n\\\\\"));\n\n}\n\n\n", "file_path": "ruma-serde/tests/enum_derive.rs", "rank": 62, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn deserialize_redaction() {\n\n let json_data = redaction();\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnyMessageEvent>>(json_data)\n\n .unwrap()\n\n .deserialize()\n\n .unwrap(),\n\n AnyMessageEvent::RoomRedaction(RedactionEvent {\n\n content: RedactionEventContent { reason: Some(reas) },\n\n redacts,\n\n event_id,\n\n origin_server_ts,\n\n room_id,\n\n sender,\n\n unsigned,\n\n }) if reas == \"being a turd\"\n\n && event_id == event_id!(\"$h29iv0s8:example.com\")\n\n && redacts == event_id!(\"$nomore:example.com\")\n\n && origin_server_ts == UNIX_EPOCH + Duration::from_millis(1)\n\n && room_id == room_id!(\"!roomid:room.com\")\n\n && sender == user_id!(\"@carl:example.com\")\n\n && unsigned.is_empty()\n\n );\n\n}\n", "file_path": "ruma-events/tests/redaction.rs", "rank": 63, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn serialize_redaction() {\n\n let aliases_event = RedactionEvent {\n\n content: RedactionEventContent { reason: Some(\"being a turd\".into()) },\n\n redacts: event_id!(\"$nomore:example.com\"),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n room_id: room_id!(\"!roomid:room.com\"),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: Unsigned::default(),\n\n };\n\n\n\n let actual = to_json_value(&aliases_event).unwrap();\n\n let expected = redaction();\n\n\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/redaction.rs", "rank": 64, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.pass(\"tests/ui/01-api-sanity-check.rs\");\n\n t.compile_fail(\"tests/ui/02-invalid-path.rs\");\n\n t.pass(\"tests/ui/03-move-value.rs\");\n\n t.compile_fail(\"tests/ui/04-attributes.rs\");\n\n}\n", "file_path": "ruma-api/tests/ruma_api.rs", "rank": 65, "score": 125786.78240967002 }, { "content": "#[test]\n\nfn ui() {\n\n let t = trybuild::TestCases::new();\n\n t.pass(\"tests/ui/07-enum-sanity-check.rs\");\n\n t.compile_fail(\"tests/ui/08-enum-invalid-path.rs\");\n\n t.compile_fail(\"tests/ui/09-enum-invalid-kind.rs\");\n\n}\n\n\n", "file_path": "ruma-events/tests/event_enums.rs", "rank": 66, "score": 125786.78240967002 }, { "content": "fn main() {}\n", "file_path": "ruma-api/tests/ui/04-attributes.rs", "rank": 67, "score": 125781.00021999318 }, { "content": "fn split_for_impl_lifetime_less(generics: &mut Generics) -> (ImplGenerics, TypeGenerics) {\n\n generics.params = generics\n\n .params\n\n .clone()\n\n .into_iter()\n\n .filter(|param| !matches!(param, GenericParam::Lifetime(_)))\n\n .collect();\n\n\n\n let (impl_gen, ty_gen, _) = generics.split_for_impl();\n\n (impl_gen, ty_gen)\n\n}\n\n\n", "file_path": "ruma-serde-macros/src/outgoing.rs", "rank": 68, "score": 124889.08056581125 }, { "content": "/// Create a `EphemeralRoomEventContent` implementation for a struct\n\npub fn expand_ephemeral_room_event_content(\n\n input: &DeriveInput,\n\n ruma_events: &TokenStream,\n\n) -> syn::Result<TokenStream> {\n\n let ident = input.ident.clone();\n\n let event_content_impl = expand_event_content(input, false, ruma_events)?;\n\n\n\n Ok(quote! {\n\n #event_content_impl\n\n\n\n #[automatically_derived]\n\n impl #ruma_events::EphemeralRoomEventContent for #ident {}\n\n })\n\n}\n\n\n", "file_path": "ruma-events-macros/src/event_content.rs", "rank": 69, "score": 124407.69504163263 }, { "content": "/// Redacts an event using the rules specified in the Matrix client-server specification.\n\n///\n\n/// This is part of the process of signing an event.\n\n///\n\n/// Redaction is also suggested when a verifying an event with `verify_event` returns\n\n/// `Verified::Signatures`. See the documentation for `Verified` for details.\n\n///\n\n/// Returns a new JSON object with all applicable fields redacted.\n\n///\n\n/// # Parameters\n\n///\n\n/// * object: A JSON object to redact.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if:\n\n///\n\n/// * `object` contains a field called `content` that is not a JSON object.\n\n/// * `object` contains a field called `hashes` that is not a JSON object.\n\n/// * `object` contains a field called `signatures` that is not a JSON object.\n\n/// * `object` is missing the `type` field or the field is not a JSON string.\n\npub fn redact(\n\n object: &CanonicalJsonObject,\n\n version: &RoomVersionId,\n\n) -> Result<CanonicalJsonObject, Error> {\n\n let mut event = object.clone();\n\n\n\n let event_type_value = match event.get(\"type\") {\n\n Some(event_type_value) => event_type_value,\n\n None => return Err(Error::new(\"field `type` in JSON value must be present\")),\n\n };\n\n\n\n let allowed_content_keys = match event_type_value {\n\n CanonicalJsonValue::String(event_type) => allowed_content_keys_for(event_type, version),\n\n _ => return Err(Error::new(\"field `type` in JSON value must be a JSON string\")),\n\n };\n\n\n\n if let Some(content_value) = event.get_mut(\"content\") {\n\n let content = match content_value {\n\n CanonicalJsonValue::Object(map) => map,\n\n _ => return Err(Error::new(\"field `content` in JSON value must be a JSON object\")),\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 70, "score": 123848.73095295943 }, { "content": "#[test]\n\nfn redacted_aliases_deserialize() {\n\n let unsigned = sync_unsigned();\n\n\n\n let redacted = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"hello\",\n\n \"unsigned\": unsigned,\n\n \"type\": \"m.room.aliases\"\n\n });\n\n\n\n let actual = to_json_value(&redacted).unwrap();\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnySyncRoomEvent>>(actual)\n\n .unwrap()\n\n .deserialize()\n\n .unwrap(),\n\n AnySyncRoomEvent::RedactedState(AnyRedactedSyncStateEvent::RoomAliases(\n\n RedactedSyncStateEvent {\n\n content: RedactedAliasesEventContent { aliases },\n\n event_id,\n\n ..\n\n },\n\n )) if event_id == event_id!(\"$h29iv0s8:example.com\")\n\n && aliases.is_none()\n\n )\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 71, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_reader() {\n\n let result = vec![(\"first\".to_owned(), 23), (\"last\".to_owned(), 42)];\n\n\n\n assert_eq!(urlencoded::from_reader(b\"first=23&last=42\" as &[_]), Ok(result));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 72, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn alias_event_deserialization() {\n\n let json_data = aliases_event();\n\n\n\n assert_matches!(\n\n from_json_value::<AnyEvent>(json_data),\n\n Ok(AnyEvent::State(\n\n AnyStateEvent::RoomAliases(StateEvent {\n\n content: AliasesEventContent {\n\n aliases,\n\n ..\n\n },\n\n ..\n\n })\n\n ))\n\n if aliases == vec![ room_alias_id!(\"#somewhere:localhost\") ]\n\n );\n\n}\n\n\n", "file_path": "ruma-events/tests/enums.rs", "rank": 73, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_bytes() {\n\n let result = vec![(\"first\".to_owned(), 23), (\"last\".to_owned(), 42)];\n\n\n\n assert_eq!(urlencoded::from_bytes(b\"first=23&last=42\"), Ok(result));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 74, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn serialize_map() {\n\n let mut s = std::collections::BTreeMap::new();\n\n s.insert(\"hello\", \"world\");\n\n s.insert(\"seri\", \"alize\");\n\n s.insert(\"matrix\", \"ruma\");\n\n\n\n let encoded = urlencoded::to_string(s).unwrap();\n\n assert_eq!(\"hello=world&matrix=ruma&seri=alize\", encoded);\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_serialize.rs", "rank": 75, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn message_event_deserialization() {\n\n let json_data = message_event();\n\n\n\n assert_matches!(\n\n from_json_value::<AnyEvent>(json_data),\n\n Ok(AnyEvent::Message(\n\n AnyMessageEvent::RoomMessage(MessageEvent {\n\n content: MessageEventContent::Text(TextMessageEventContent {\n\n body,\n\n formatted: Some(formatted),\n\n relates_to: None,\n\n ..\n\n }),\n\n ..\n\n })\n\n ))\n\n if body == \"baba\" && formatted.body == \"<strong>baba</strong>\"\n\n );\n\n}\n\n\n", "file_path": "ruma-events/tests/enums.rs", "rank": 76, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_struct() {\n\n let de = Params { a: 10, b: \"Hello\", c: None };\n\n assert_eq!(urlencoded::from_str(\"a=10&b=Hello\"), Ok(de));\n\n assert_eq!(urlencoded::from_str(\"b=Hello&a=10\"), Ok(de));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 77, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_newstruct() {\n\n let de = NewStruct { list: vec![\"hello\", \"world\"] };\n\n assert_eq!(urlencoded::from_str(\"list=hello&list=world\"), Ok(de));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 78, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_str() {\n\n let result = vec![(\"first\".to_owned(), 23), (\"last\".to_owned(), 42)];\n\n\n\n assert_eq!(urlencoded::from_str(\"first=23&last=42\"), Ok(result));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 79, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn as_ref_str() {\n\n assert_eq!(MyEnum::First.as_ref(), \"first\");\n\n assert_eq!(MyEnum::Second.as_ref(), \"second\");\n\n assert_eq!(MyEnum::Third.as_ref(), \"m.third\");\n\n assert_eq!(MyEnum::HelloWorld.as_ref(), \"hello_world\");\n\n assert_eq!(MyEnum::_Custom(\"HelloWorld\".into()).as_ref(), \"HelloWorld\");\n\n}\n\n\n", "file_path": "ruma-serde/tests/enum_derive.rs", "rank": 80, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_option() {\n\n let result = vec![(\"first\".to_owned(), Some(23)), (\"last\".to_owned(), Some(42))];\n\n assert_eq!(urlencoded::from_str(\"first=23&last=42\"), Ok(result));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 81, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_unit() {\n\n assert_eq!(urlencoded::from_str(\"\"), Ok(()));\n\n assert_eq!(urlencoded::from_str(\"&\"), Ok(()));\n\n assert_eq!(urlencoded::from_str(\"&&\"), Ok(()));\n\n assert!(urlencoded::from_str::<()>(\"first=23\").is_err());\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 82, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn deserialize_numlist() {\n\n let de = NumList { list: vec![1, 2, 3, 4] };\n\n assert_eq!(urlencoded::from_str(\"list=1&list=2&list=3&list=4\"), Ok(de));\n\n}\n\n\n", "file_path": "ruma-serde/tests/url_deserialize.rs", "rank": 83, "score": 123600.34728425478 }, { "content": "#[test]\n\nfn redacted_deserialize_any_room() {\n\n let unsigned = full_unsigned();\n\n\n\n let redacted = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"room_id\": \"!roomid:room.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"unsigned\": unsigned,\n\n \"type\": \"m.room.message\"\n\n });\n\n\n\n let actual = to_json_value(&redacted).unwrap();\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnyRoomEvent>>(actual)\n\n .unwrap()\n\n .deserialize()\n\n .unwrap(),\n\n AnyRoomEvent::RedactedMessage(AnyRedactedMessageEvent::RoomMessage(RedactedMessageEvent {\n\n content: RedactedMessageEventContent,\n\n event_id, room_id, ..\n\n })) if event_id == event_id!(\"$h29iv0s8:example.com\")\n\n && room_id == room_id!(\"!roomid:room.com\")\n\n )\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 84, "score": 123600.34728425478 }, { "content": "fn main() {}\n", "file_path": "ruma-api/tests/ui/03-move-value.rs", "rank": 85, "score": 123594.56509457793 }, { "content": "fn main() {}\n", "file_path": "ruma-events/tests/ui/05-named-fields.rs", "rank": 86, "score": 123594.56509457793 }, { "content": "fn main() {}\n", "file_path": "ruma-api/tests/ui/02-invalid-path.rs", "rank": 87, "score": 123594.56509457793 }, { "content": "fn main() {}\n", "file_path": "ruma-events/tests/ui/02-no-event-type.rs", "rank": 88, "score": 123594.56509457793 }, { "content": "/// Uses a set of public keys to verify a signed event.\n\n///\n\n/// Some room versions may require signatures from multiple homeservers, so this function takes a\n\n/// map from servers to sets of public keys. For each homeserver present in the map, this function\n\n/// will require a valid signature. All known public keys for a homeserver should be provided. The\n\n/// first one found on the given event will be used.\n\n///\n\n/// If the `Ok` variant is returned by this function, it will contain a `Verified` value which\n\n/// distinguishes an event with valid signatures and a matching content hash with an event with\n\n/// only valid signatures. See the documentation for `Verified` for details.\n\n///\n\n/// # Parameters\n\n///\n\n/// * public_key_map: A map from entity identifiers to a map from key identifiers to public keys.\n\n/// Generally, entity identifiers are server names—the host/IP/port of a homeserver (e.g.\n\n/// \"example.com\") for which a signature must be verified. Key identifiers for each server (e.g.\n\n/// \"ed25519:1\") then map to their respective public keys.\n\n/// * object: The JSON object of the event that was signed.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// # use std::collections::BTreeMap;\n\n/// # use ruma_identifiers::RoomVersionId;\n\n/// # use ruma_signatures::verify_event;\n\n/// #\n\n/// const PUBLIC_KEY: &str = \"XGX0JRS2Af3be3knz2fBiRbApjm2Dh61gXDJA8kcJNI\";\n\n///\n\n/// // Deserialize an event from JSON.\n\n/// let object = serde_json::from_str(\n\n/// r#\"{\n\n/// \"auth_events\": [],\n\n/// \"content\": {},\n\n/// \"depth\": 3,\n\n/// \"hashes\": {\n\n/// \"sha256\": \"5jM4wQpv6lnBo7CLIghJuHdW+s2CMBJPUOGOC89ncos\"\n\n/// },\n\n/// \"origin\": \"domain\",\n\n/// \"origin_server_ts\": 1000000,\n\n/// \"prev_events\": [],\n\n/// \"room_id\": \"!x:domain\",\n\n/// \"sender\": \"@a:domain\",\n\n/// \"signatures\": {\n\n/// \"domain\": {\n\n/// \"ed25519:1\": \"KxwGjPSDEtvnFgU00fwFz+l6d2pJM6XBIaMEn81SXPTRl16AqLAYqfIReFGZlHi5KLjAWbOoMszkwsQma+lYAg\"\n\n/// }\n\n/// },\n\n/// \"type\": \"X\",\n\n/// \"unsigned\": {\n\n/// \"age_ts\": 1000000\n\n/// }\n\n/// }\"#\n\n/// ).unwrap();\n\n///\n\n/// // Create the `PublicKeyMap` that will inform `verify_json` which signatures to verify.\n\n/// let mut public_key_set = BTreeMap::new();\n\n/// public_key_set.insert(\"ed25519:1\".into(), PUBLIC_KEY.to_string());\n\n/// let mut public_key_map = BTreeMap::new();\n\n/// public_key_map.insert(\"domain\".into(), public_key_set);\n\n///\n\n/// // Verify at least one signature for each entity in `public_key_map`.\n\n/// assert!(verify_event(&public_key_map, &object, &RoomVersionId::Version6).is_ok());\n\n/// ```\n\npub fn verify_event(\n\n public_key_map: &PublicKeyMap,\n\n object: &CanonicalJsonObject,\n\n version: &RoomVersionId,\n\n) -> Result<Verified, Error> {\n\n let redacted = redact(object, version)?;\n\n\n\n let hash = match object.get(\"hashes\") {\n\n Some(hashes_value) => match hashes_value {\n\n CanonicalJsonValue::Object(hashes) => match hashes.get(\"sha256\") {\n\n Some(hash_value) => match hash_value {\n\n CanonicalJsonValue::String(hash) => hash,\n\n _ => return Err(Error::new(\"sha256 hash must be a JSON string\")),\n\n },\n\n None => return Err(Error::new(\"field `hashes` must be a JSON object\")),\n\n },\n\n _ => return Err(Error::new(\"event missing sha256 hash\")),\n\n },\n\n None => return Err(Error::new(\"field `hashes` must be present\")),\n\n };\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 89, "score": 121666.46435412695 }, { "content": "/// Creates a *reference hash* for an event.\n\n///\n\n/// Returns the hash as a Base64-encoded string, using the standard character set, without padding.\n\n///\n\n/// The reference hash of an event covers the essential fields of an event, including content\n\n/// hashes. It is used to generate event identifiers and is described in the Matrix server-server\n\n/// specification.\n\n///\n\n/// # Parameters\n\n///\n\n/// object: A JSON object to generate a reference hash for.\n\n///\n\n/// # Errors\n\n///\n\n/// Returns an error if redaction fails.\n\npub fn reference_hash(\n\n value: &CanonicalJsonObject,\n\n version: &RoomVersionId,\n\n) -> Result<String, Error> {\n\n let redacted_value = redact(value, version)?;\n\n\n\n let json =\n\n canonical_json_with_fields_to_remove(&redacted_value, REFERENCE_HASH_FIELDS_TO_REMOVE);\n\n\n\n let hash = digest(&SHA256, json.as_bytes());\n\n\n\n Ok(encode_config(\n\n &hash,\n\n match version {\n\n RoomVersionId::Version1 | RoomVersionId::Version2 | RoomVersionId::Version3 => {\n\n STANDARD_NO_PAD\n\n }\n\n // Room versions higher than version 3 are url safe base64 encoded\n\n _ => URL_SAFE_NO_PAD,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "ruma-signatures/src/functions.rs", "rank": 90, "score": 121645.87218712873 }, { "content": "#[test]\n\nfn redacted_state_event_deserialize() {\n\n let unsigned = sync_unsigned();\n\n\n\n let redacted = json!({\n\n \"content\": {\n\n \"creator\": \"@carl:example.com\",\n\n },\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"hello there\",\n\n \"unsigned\": unsigned,\n\n \"type\": \"m.room.create\"\n\n });\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnySyncRoomEvent>>(redacted)\n\n .unwrap()\n\n .deserialize()\n\n .unwrap(),\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 91, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn redacted_message_event_serialize() {\n\n let redacted = RedactedSyncMessageEvent {\n\n content: RedactedMessageEventContent,\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: RedactedSyncUnsigned::default(),\n\n };\n\n\n\n let expected = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"type\": \"m.room.message\"\n\n });\n\n\n\n let actual = to_json_value(&redacted).unwrap();\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 92, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn redacted_custom_event_serialize() {\n\n let unsigned = sync_unsigned();\n\n\n\n let redacted = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"hello there\",\n\n \"unsigned\": unsigned,\n\n \"type\": \"m.made.up\"\n\n });\n\n\n\n assert_matches!(\n\n from_json_value::<Raw<AnySyncRoomEvent>>(redacted.clone())\n\n .unwrap()\n\n .deserialize()\n\n .unwrap(),\n\n AnySyncRoomEvent::RedactedState(AnyRedactedSyncStateEvent::Custom(RedactedSyncStateEvent {\n\n content: RedactedCustomEventContent {\n\n event_type,\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 93, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn redacted_deserialize_any_room_sync() {\n\n let mut unsigned = RedactedUnsigned::default();\n\n // The presence of `redacted_because` triggers the event enum (AnySyncRoomEvent in this case)\n\n // to return early with `RedactedContent` instead of failing to deserialize according\n\n // to the event type string.\n\n unsigned.redacted_because = Some(Box::new(RedactionEvent {\n\n content: RedactionEventContent { reason: Some(\"redacted because\".into()) },\n\n redacts: event_id!(\"$h29iv0s8:example.com\"),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n room_id: room_id!(\"!roomid:room.com\"),\n\n sender: user_id!(\"@carl:example.com\"),\n\n unsigned: Unsigned::default(),\n\n }));\n\n\n\n let redacted = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"unsigned\": unsigned,\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 94, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn redact_method_properly_redacts() {\n\n let ev = json!({\n\n \"type\": \"m.room.message\",\n\n \"event_id\": \"$143273582443PhrSn:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"room_id\": \"!roomid:room.com\",\n\n \"sender\": \"@user:example.com\",\n\n \"content\": {\n\n \"body\": \"test\",\n\n \"msgtype\": \"m.audio\",\n\n \"url\": \"http://example.com/audio.mp3\",\n\n }\n\n });\n\n\n\n let redaction = RedactionEvent {\n\n content: RedactionEventContent { reason: Some(\"redacted because\".into()) },\n\n redacts: event_id!(\"$143273582443PhrSn:example.com\"),\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n room_id: room_id!(\"!roomid:room.com\"),\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 95, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn deserialize_stripped_state_events() {\n\n let name_event = json!({\n\n \"type\": \"m.room.name\",\n\n \"state_key\": \"\",\n\n \"sender\": \"@example:localhost\",\n\n \"content\": { \"name\": \"Ruma\" }\n\n });\n\n\n\n let join_rules_event = json!({\n\n \"type\": \"m.room.join_rules\",\n\n \"state_key\": \"\",\n\n \"sender\": \"@example:localhost\",\n\n \"content\": { \"join_rule\": \"public\" }\n\n });\n\n\n\n let avatar_event = json!({\n\n \"type\": \"m.room.avatar\",\n\n \"state_key\": \"\",\n\n \"sender\": \"@example:localhost\",\n\n \"content\": {\n", "file_path": "ruma-events/tests/stripped.rs", "rank": 96, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn empty_response_http_repr() {\n\n let res = Response {};\n\n let http_res = http::Response::<Vec<u8>>::try_from(res).unwrap();\n\n\n\n assert_eq!(http_res.body(), b\"{}\");\n\n}\n", "file_path": "ruma-api/tests/no_fields.rs", "rank": 97, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn empty_request_http_repr() {\n\n let req = Request {};\n\n let http_req = req.try_into_http_request(\"https://homeserver.tld\", None).unwrap();\n\n\n\n assert!(http_req.body().is_empty());\n\n}\n\n\n", "file_path": "ruma-api/tests/no_fields.rs", "rank": 98, "score": 121510.8129538037 }, { "content": "#[test]\n\nfn redacted_custom_event_deserialize() {\n\n let unsigned = sync_unsigned();\n\n\n\n let redacted = RedactedSyncStateEvent {\n\n content: RedactedCustomEventContent { event_type: \"m.made.up\".into() },\n\n event_id: event_id!(\"$h29iv0s8:example.com\"),\n\n sender: user_id!(\"@carl:example.com\"),\n\n state_key: \"hello there\".into(),\n\n origin_server_ts: UNIX_EPOCH + Duration::from_millis(1),\n\n unsigned: unsigned.clone(),\n\n };\n\n\n\n let expected = json!({\n\n \"event_id\": \"$h29iv0s8:example.com\",\n\n \"origin_server_ts\": 1,\n\n \"sender\": \"@carl:example.com\",\n\n \"state_key\": \"hello there\",\n\n \"unsigned\": unsigned,\n\n \"type\": \"m.made.up\"\n\n });\n\n\n\n let actual = to_json_value(&redacted).unwrap();\n\n assert_eq!(actual, expected);\n\n}\n\n\n", "file_path": "ruma-events/tests/redacted.rs", "rank": 99, "score": 121510.8129538037 } ]
Rust
src/graphics/shader.rs
mooman219/glpaly
7731c705614ee2de8bc8685dc28361caa050a0af
use crate::graphics::{ graphics, resource, std140::Std140Struct, Buffer, DrawMode, Texture, Uniform, VertexDescriptor, }; use crate::{App, Context}; use alloc::format; use core::iter::IntoIterator; use core::marker::PhantomData; pub trait ShaderDescriptor<const TEXTURES: usize> { const VERTEX_SHADER: &'static str; const FRAGMENT_SHADER: &'static str; const TEXTURE_NAMES: [&'static str; TEXTURES]; const VERTEX_UNIFORM_NAME: &'static str; type VertexUniformType: Std140Struct; type VertexDescriptor: VertexDescriptor + Copy; } pub struct Shader<T: ShaderDescriptor<TEXTURES>, const TEXTURES: usize> { _unsend: core::marker::PhantomData<*const ()>, program: resource::Program, vertex_uniform_location: u32, texture_locations: [resource::UniformLocation; TEXTURES], phantom: PhantomData<T>, } impl<T: ShaderDescriptor<TEXTURES>, const TEXTURES: usize> Shader<T, TEXTURES> { pub fn new(_ctx: &Context<impl App>) -> Shader<T, TEXTURES> { let gl = graphics().gl(); let program = gl.shader_program(T::VERTEX_SHADER, T::FRAGMENT_SHADER); let vertex_uniform_location = gl.get_uniform_block_index(program, T::VERTEX_UNIFORM_NAME).expect( &format!("Failed to find uniform block named '{}' in vertex shader.", T::VERTEX_UNIFORM_NAME), ); gl.uniform_block_binding(program, vertex_uniform_location, 0); let texture_locations = T::TEXTURE_NAMES.map(|name| { gl.get_uniform_location(program, name) .expect(&format!("Failed to find texture named '{}' in fragment shader.", name)) }); Shader { _unsend: core::marker::PhantomData, program, vertex_uniform_location, texture_locations, phantom: PhantomData, } } fn bind(&self, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES]) { let gl = graphics().gl(); gl.use_program(Some(self.program)); uniform.bind(0); for i in 0..TEXTURES { textures[i].bind(i as u32); gl.uniform_1_i32(Some(&self.texture_locations[i]), i as i32); } } pub fn draw<'a, Item, Iter>( &self, mode: DrawMode, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES], buffers: Iter, ) where Item: AsRef<Buffer<T::VertexDescriptor>> + 'a, Iter: IntoIterator<Item = &'a Item>, { let instancing = T::VertexDescriptor::INSTANCING; if instancing.is_instanced() { self.draw_instanced(mode, uniform, textures, buffers, instancing.count); } else { self.draw_non_instanced(mode, uniform, textures, buffers); } } fn draw_instanced<'a, Item, Iter>( &self, mode: DrawMode, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES], buffers: Iter, count: i32, ) where Item: AsRef<Buffer<T::VertexDescriptor>> + 'a, Iter: IntoIterator<Item = &'a Item>, { self.bind(uniform, textures); for buffer in buffers { let buffer = buffer.as_ref(); if buffer.len() > 0 { buffer.bind(); graphics().gl().draw_arrays_instanced(mode, 0, count, buffer.len() as i32); } } } fn draw_non_instanced<'a, Item, Iter>( &self, mode: DrawMode, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES], buffers: Iter, ) where Item: AsRef<Buffer<T::VertexDescriptor>> + 'a, Iter: IntoIterator<Item = &'a Item>, { self.bind(uniform, textures); for buffer in buffers { let buffer = buffer.as_ref(); if buffer.len() > 0 { buffer.bind(); graphics().gl().draw_arrays(mode, 0, buffer.len() as i32); } } } } impl<T: ShaderDescriptor<TEXTURES>, const TEXTURES: usize> Drop for Shader<T, TEXTURES> { fn drop(&mut self) { let gl = graphics().gl(); gl.delete_program(self.program); } }
use crate::graphics::{ graphics, resource, std140::Std140Struct, Buffer, DrawMode, Texture, Uniform, VertexDescriptor, }; use crate::{App, Context}; use alloc::format; use core::iter::IntoIterator; use core::marker::PhantomData; pub trait ShaderDescriptor<const TEXTURES: usize> { const VERTEX_SHADER: &'static str; const FRAGMENT_SHADER: &'static str; const TEXTURE_NAMES: [&'static str; TEXTURES]; const VERTEX_UNIFORM_NAME: &'static str; type VertexUniformType: Std140Struct; type VertexDescriptor: VertexDescriptor + Copy; } pub struct Shader<T: ShaderDescriptor<TEXTURES>, const TEXTURES: usize> { _unsend: core::marker::PhantomData<*const ()>, program: resource::Program, vertex_uniform_location: u32, texture_locations: [resource::UniformLocation; TEXTURES], phantom: PhantomData<T>, } impl<T: ShaderDescriptor<TEXTURES>, const TEXTURES: usize> Shader<T, TEXTURES> { pub fn new(_ctx: &Context<impl App>) -> Shader<T, TEXTURES> { let gl = graphics().gl(); let program = gl.shader_program(T::VERTEX_SHADER, T::FRAGMENT_SHADER); let vertex_uniform_location = gl.get_uniform_block_index(program, T::VERTEX_UNIFORM_NAME).expect( &format!("Failed to find uniform block named '{}' in vertex shader.", T::VERTEX_UNIFORM_NAME), ); gl.uniform_block_binding(program, vertex_uniform_location, 0); let texture_locations = T::TEXTURE_NAMES.map(|name| { gl.get_uniform_location(program, name) .expect(&format!("Failed to find texture named '{}' in fragment shader.", name)) }); Shader { _unsend: core::marker::PhantomData, program, vertex_uniform_location, texture_locations, phantom: PhantomData, } } fn bind(&self, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES]) { let gl = graphics().gl(); gl.use_program(Some(self.program)); uniform.bind(0); for i in 0..TEXTURES { textures[i].bind(i as u32); gl.uniform_1_i32(Some(&self.texture_locations[i]), i as i32); } } pub fn draw<'a, Item, Iter>( &self, mode: DrawMode, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES], buffers: Iter, ) where Item: AsRef<Buffer<T::VertexDescriptor>> + 'a, Iter: IntoIterator<Item = &'a Item>, {
fn draw_instanced<'a, Item, Iter>( &self, mode: DrawMode, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES], buffers: Iter, count: i32, ) where Item: AsRef<Buffer<T::VertexDescriptor>> + 'a, Iter: IntoIterator<Item = &'a Item>, { self.bind(uniform, textures); for buffer in buffers { let buffer = buffer.as_ref(); if buffer.len() > 0 { buffer.bind(); graphics().gl().draw_arrays_instanced(mode, 0, count, buffer.len() as i32); } } } fn draw_non_instanced<'a, Item, Iter>( &self, mode: DrawMode, uniform: &Uniform<T::VertexUniformType>, textures: [&Texture; TEXTURES], buffers: Iter, ) where Item: AsRef<Buffer<T::VertexDescriptor>> + 'a, Iter: IntoIterator<Item = &'a Item>, { self.bind(uniform, textures); for buffer in buffers { let buffer = buffer.as_ref(); if buffer.len() > 0 { buffer.bind(); graphics().gl().draw_arrays(mode, 0, buffer.len() as i32); } } } } impl<T: ShaderDescriptor<TEXTURES>, const TEXTURES: usize> Drop for Shader<T, TEXTURES> { fn drop(&mut self) { let gl = graphics().gl(); gl.delete_program(self.program); } }
let instancing = T::VertexDescriptor::INSTANCING; if instancing.is_instanced() { self.draw_instanced(mode, uniform, textures, buffers, instancing.count); } else { self.draw_non_instanced(mode, uniform, textures, buffers); } }
function_block-function_prefix_line
[ { "content": "/// Type that holds all of your application state and handles events.\n\npub trait App: 'static + Sized {\n\n /// Function to create the app from a context.\n\n /// # Arguments\n\n ///\n\n /// * `ctx` - The engine context. This can be used to call various API functions.\n\n fn new(_ctx: &mut Context<Self>) -> Self;\n\n\n\n /// This event is useful as a place to put your code that should be run after all state-changing\n\n /// events have been handled and you want to do stuff (updating state, performing calculations,\n\n /// etc) that happens as the \"main body\" of your event loop.\n\n /// # Arguments\n\n ///\n\n /// * `ctx` - The engine context. This can be used to call various API functions.\n\n /// * `delta` - The time passed since the last update in seconds.\n\n fn on_update(&mut self, _ctx: &mut Context<Self>, _delta: f32) {}\n\n\n\n /// The window has requested it close.\n\n fn on_close_requested(&mut self, _ctx: &mut Context<Self>) {}\n\n\n\n /// Received a character. This includes control characters.\n", "file_path": "src/lib.rs", "rank": 1, "score": 198006.75574742866 }, { "content": "/// A trait to describe vertices that will be consumed by a shader. The INSTANCING field describes\n\n/// if vertices with this VertexDescriptor will be drawn instanced or non instanced. The ATTRIBUTES\n\n/// field describes the fields contained in your vertex struct.\n\n///\n\n/// # Example\n\n/// ```\n\n/// // This is an example for how to implement VertexDescriptor for a simple type.\n\n/// use storm::cgmath::*;\n\n/// use storm::graphics::*;\n\n///\n\n/// #[repr(C)]\n\n/// #[derive(Copy, Clone)]\n\n/// struct Demo {\n\n/// pos: Vector3<f32>,\n\n/// size: Vector2<u16>,\n\n/// }\n\n///\n\n/// impl VertexDescriptor for Demo {\n\n/// // Don't apply any instancing to this vertex type.\n\n/// const INSTANCING: VertexInstancing = VertexInstancing::none();\n\n/// // These are the attributes that describe the fields contained in this vertex.\n\n/// const ATTRIBUTES: &'static [VertexAttribute] = &[\n\n/// // This value represents the three f32s in pos's Vector3<f32>. When invoked in the\n\n/// // shader, the values will be read as f32s.\n\n/// VertexAttribute::new(3, VertexInputType::F32, VertexOutputType::F32),\n\n/// // This value represents the two u16s in size's Vector3<u16>. When invoked in the\n\n/// // shader, the values will be read as f32s.\n\n/// VertexAttribute::new(2, VertexInputType::U16, VertexOutputType::F32),\n\n/// ];\n\n/// }\n\n/// ```\n\npub trait VertexDescriptor {\n\n const INSTANCING: VertexInstancing;\n\n const ATTRIBUTES: &'static [VertexAttribute];\n\n}\n\n\n\n/// Describes how instancing will apply to verticies of this type.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct VertexInstancing {\n\n /// The rate at which generic vertex attributes advance when rendering multiple instances of\n\n /// primitives in a single draw call. If divisor is zero, the attribute at slot index advances\n\n /// once per vertex. If divisor is non-zero, the attribute advances once per divisor instances\n\n /// of the set(s) of vertices being rendered.\n\n pub divisor: u32,\n\n /// The number of verticies required to render a single instance.\n\n pub count: i32,\n\n}\n\n\n\nimpl VertexInstancing {\n\n /// No instancing will be applied when this vertex is drawn.\n\n pub const fn none() -> VertexInstancing {\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 2, "score": 195139.65438424784 }, { "content": "struct TextureApp {\n\n sprite_shader: SpriteShader,\n\n texture_atlas: Texture,\n\n sprite_buffer: Buffer<Sprite>,\n\n transform: OrthographicCamera,\n\n transform_uniform: Uniform<std140::mat4>,\n\n sound: SoundControl,\n\n sprites: [Sprite; 3],\n\n clicking: bool,\n\n}\n\n\n\nimpl App for TextureApp {\n\n fn new(ctx: &mut Context<Self>) -> Self {\n\n ctx.wait_periodic(Some(Duration::from_secs_f32(1.0 / 144.0)));\n\n\n\n let sprite_shader = SpriteShader::new(ctx);\n\n let texture_atlas = Texture::from_png(ctx, TEXTURE_A, TextureFiltering::none());\n\n let mut sprite_buffer = Buffer::new(ctx);\n\n\n\n let mut transform = OrthographicCamera::new(ctx.window_logical_size());\n", "file_path": "examples/texture.rs", "rank": 3, "score": 138398.38684735756 }, { "content": "/// Initializes the context. Graphics, audio, assets, and you app, are initialized by this function.\n\npub fn start<A: App>(desc: WindowSettings) -> ! {\n\n if unsafe { INITIALIZED.swap(true, Ordering::Relaxed) } {\n\n panic!(\"Start has already been called.\");\n\n }\n\n\n\n init_logger();\n\n\n\n let event_loop = winit::event_loop::EventLoop::new();\n\n OpenGLState::init(&desc, &event_loop);\n\n AudioState::init();\n\n let assets = AssetState::init();\n\n let mut ctx = Context {\n\n assets,\n\n stop: false,\n\n control_flow: Some(ControlFlow::Poll),\n\n last_update: Instant::now(),\n\n wait_next: Instant::now(),\n\n wait_periodic: None,\n\n };\n\n let mut input = EventConverter::new();\n", "file_path": "src/engine.rs", "rank": 4, "score": 129862.38601575412 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct CharCacheValue {\n\n uv: TextureSection,\n\n size: Vector2<f32>,\n\n}\n\n\n\n/// Holds the state required to cache and draw text to the screen.\n\npub struct TextShaderPass {\n\n uniform: Uniform<std140::mat4>,\n\n atlas: TextureAtlas,\n\n buffer: Buffer<TextSprite>,\n\n\n\n sprites: Vec<TextSprite>,\n\n layout: Layout<TextUserData>,\n\n cache: HashMap<GlyphRasterConfig, CharCacheValue>,\n\n dirty: bool,\n\n}\n\n\n\nimpl TextShaderPass {\n\n pub fn new(ctx: &Context<impl App>, ortho: Matrix4<f32>) -> TextShaderPass {\n\n let max = ctx.max_texture_size().min(4096) as u32;\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 5, "score": 120795.71831889974 }, { "content": "/// A trait to describe size and layout of color components.\n\n///\n\n/// # Example\n\n/// ```\n\n/// // This is an example for how to implement ColorDescriptor for a simple type.\n\n/// use storm::color::*;\n\n///\n\n/// #[repr(C)]\n\n/// #[derive(Copy, Clone)]\n\n/// pub struct BGRA8 {\n\n/// pub b: u8,\n\n/// pub r: u8,\n\n/// pub g: u8,\n\n/// pub a: u8,\n\n/// }\n\n///\n\n/// // This allows for bytes to represent single channel resources.\n\n/// impl ColorDescriptor for BGRA8 {\n\n/// fn component_type() -> ColorComponentType {\n\n/// ColorComponentType::U8\n\n/// }\n\n/// fn layout() -> ColorLayoutFormat {\n\n/// ColorLayoutFormat::BGRA\n\n/// }\n\n/// }\n\n/// ```\n\npub trait ColorDescriptor: Sized + Copy + Default {\n\n /// Gets the component type of the color.\n\n fn component_type() -> ColorComponentType;\n\n\n\n /// Gets the layout of the color.\n\n fn layout() -> ColorLayoutFormat;\n\n}\n\n\n\n/// Represents the type of each color component.\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\n#[repr(u32)]\n\npub enum ColorComponentType {\n\n U8 = PixelType::UnsignedByte as u32,\n\n F32 = PixelType::Float as u32,\n\n}\n\n\n\nimpl ColorComponentType {\n\n pub(crate) fn pixel_type(&self) -> PixelType {\n\n unsafe { core::mem::transmute(*self) }\n\n }\n", "file_path": "src/color/mod.rs", "rank": 6, "score": 116166.84438779748 }, { "content": "#[proc_macro_attribute]\n\npub fn uniform(args: TokenStream, input: TokenStream) -> TokenStream {\n\n assert!(args.is_empty(), \"#[uniform] does not take arguments.\");\n\n let input = parse_macro_input!(input as DeriveInput);\n\n parse_uniform(&input).unwrap_or_else(compile_error).into()\n\n}\n\n\n", "file_path": "storm_macro/src/lib.rs", "rank": 7, "score": 110285.91193978593 }, { "content": "/// Extra functions on floating point values.\n\npub trait Float {\n\n /// Computes the sine of a number. Input in degrees, output in radians.\n\n ///\n\n /// * Average error of 0.00060 radians.\n\n /// * Largest error of 0.00229 radians.\n\n /// * Speedup of 30x over f32.sin();\n\n fn sin_deg_fast(self) -> Self;\n\n\n\n /// Computes the sine of a number. Input in radians, output in radians.\n\n ///\n\n /// * Average error of 0.00060 radians.\n\n /// * Largest error of 0.00229 radians.\n\n /// * Speedup of 30x over f32.sin();\n\n fn sin_rad_fast(self) -> Self;\n\n\n\n /// Computes the cosine of a number. Input in degrees, output in radians.\n\n ///\n\n /// * Average error of 0.00060 radians.\n\n /// * Largest error of 0.00229 radians.\n\n /// * Speedup of 30x over f32.cos();\n", "file_path": "src/math/num/mod.rs", "rank": 8, "score": 106204.25947241051 }, { "content": "/// Returns an instant corresponding to “now”.\n\npub fn now() -> Instant {\n\n Instant::now()\n\n}\n", "file_path": "src/time/mod.rs", "rank": 9, "score": 98755.88559694671 }, { "content": "/// Creates a new SPSC Queue, returning a Producer and Consumer handle\n\n///\n\n/// Capacity specifies the size of the bounded queue to create. Actual memory usage\n\n/// will be `capacity.next_power_of_two() * size_of::<T>()`, since ringbuffers with\n\n/// power of two sizes are more efficient to operate on (can use a bitwise AND to index\n\n/// into the ring instead of a more expensive modulo operator).\n\n///\n\n/// Of course, a SPSC queue is really only useful if you plan to use it in a multi-threaded\n\n/// environment. The Producer and Consumer can both be sent to a thread, providing a fast, bounded\n\n/// one-way communication channel between those threads.\n\n///\n\n/// # Panics\n\n///\n\n/// If the requested queue size is larger than available memory (e.g.\n\n/// `capacity.next_power_of_two() * size_of::<T>() > available memory` ), this function will abort\n\n/// with an OOM panic.\n\npub fn make<T>(capacity: usize) -> (Producer<T>, Consumer<T>) {\n\n let ptr = unsafe { allocate_buffer(capacity) };\n\n\n\n let arc = Arc::new(Buffer {\n\n buffer: ptr,\n\n capacity,\n\n allocated_size: capacity.next_power_of_two(),\n\n _padding1: [0; CACHELINE - 3],\n\n\n\n head: AtomicUsize::new(0),\n\n shadow_tail: Cell::new(0),\n\n _padding2: [0; CACHELINE - 2],\n\n\n\n tail: AtomicUsize::new(0),\n\n shadow_head: Cell::new(0),\n\n _padding3: [0; CACHELINE - 2],\n\n });\n\n\n\n (\n\n Producer {\n", "file_path": "src/sync/spsc.rs", "rank": 10, "score": 97558.06541041676 }, { "content": "struct ParticlesApp {\n\n is_dragging: bool,\n\n sprite_shader: SpriteShader,\n\n particle_buffer: Buffer<Sprite>,\n\n default_texture: Texture,\n\n transform: OrthographicCamera,\n\n transform_uniform: Uniform<std140::mat4>,\n\n sprites: Vec<Sprite>,\n\n particles: Vec<Particle>,\n\n}\n\n\n\nimpl App for ParticlesApp {\n\n fn new(ctx: &mut Context<Self>) -> Self {\n\n ctx.wait_periodic(Some(Duration::from_secs_f32(1.0 / 144.0)));\n\n let is_dragging = false;\n\n\n\n let sprite_shader = SpriteShader::new(ctx);\n\n let mut particle_buffer = Buffer::new(ctx);\n\n let default_texture = ctx.default_texture();\n\n\n", "file_path": "examples/particles.rs", "rank": 11, "score": 94296.41615904073 }, { "content": "struct PongApp {\n\n text_shader: TextShader,\n\n sprite_shader: SpriteShader,\n\n default_texture: Texture,\n\n background: Buffer<Sprite>,\n\n paddles: Buffer<Sprite>,\n\n ball: Buffer<Sprite>,\n\n transform_uniform: Uniform<std140::mat4>,\n\n boop: Sound,\n\n text_layer: TextShaderPass,\n\n up: bool,\n\n down: bool,\n\n paddle_speed: [f32; 2],\n\n paddle_sprites: [Sprite; 2],\n\n ball_speed: Vector3<f32>,\n\n ball_sprites: [Sprite; 1],\n\n}\n\n\n\nimpl App for PongApp {\n\n fn new(ctx: &mut Context<Self>) -> Self {\n", "file_path": "examples/pong.rs", "rank": 12, "score": 94296.41615904073 }, { "content": "struct TextApp {\n\n is_dragging: bool,\n\n transform: OrthographicCamera,\n\n text_shader: TextShader,\n\n text_layer: TextShaderPass,\n\n fonts: [Font; 1],\n\n layout_settings: LayoutSettings,\n\n message: String,\n\n}\n\n\n\nimpl App for TextApp {\n\n fn new(ctx: &mut Context<Self>) -> Self {\n\n ctx.wait_periodic(Some(Duration::from_secs_f32(1.0 / 144.0)));\n\n let is_dragging = false;\n\n let mut transform = OrthographicCamera::new(ctx.window_logical_size());\n\n let text_shader = TextShader::new(ctx);\n\n\n\n // Create a Layers to draw on.\n\n let mut text_layer = TextShaderPass::new(ctx, transform.matrix());\n\n\n", "file_path": "examples/text.rs", "rank": 13, "score": 94296.41615904073 }, { "content": "/// Run with: cargo run --example texture --release\n\nfn main() {\n\n start::<TextureApp>(WindowSettings {\n\n title: String::from(\"Storm: Texture\"),\n\n display_mode: DisplayMode::Windowed {\n\n width: 1280,\n\n height: 1024,\n\n resizable: true,\n\n },\n\n vsync: Vsync::Disabled,\n\n });\n\n}\n\n\n", "file_path": "examples/texture.rs", "rank": 14, "score": 93740.84578116266 }, { "content": "#[inline]\n\npub fn as_milliseconds(duration: &Duration) -> u64 {\n\n let mut secs = duration.as_secs();\n\n let mut nanos = duration.subsec_nanos() as u64;\n\n secs *= MILLIS_PER_SEC;\n\n nanos /= NANOS_PER_MILLI;\n\n secs + nanos\n\n}\n\n\n\n/// Converts a duration into a number of microseconds, rounding down.\n", "file_path": "src/time/convert.rs", "rank": 15, "score": 87662.7080169018 }, { "content": "#[inline]\n\npub fn as_microseconds(duration: &Duration) -> u64 {\n\n let mut secs = duration.as_secs();\n\n let mut nanos = duration.subsec_nanos() as u64;\n\n secs *= MICROS_PER_SEC;\n\n nanos /= NANOS_PER_MICRO;\n\n secs + nanos\n\n}\n\n\n\n/// Converts a duration into a number of nanoseconds, rounding down.\n", "file_path": "src/time/convert.rs", "rank": 16, "score": 87662.7080169018 }, { "content": "#[inline]\n\npub fn as_minutes(duration: &Duration) -> u64 {\n\n let secs = duration.as_secs();\n\n secs / SECS_PER_MINUTE\n\n}\n\n\n\n/// Converts a duration into a number of seconds, rounding down.\n", "file_path": "src/time/convert.rs", "rank": 17, "score": 87662.7080169018 }, { "content": "#[inline]\n\npub fn as_nanoseconds(duration: &Duration) -> u64 {\n\n let mut secs = duration.as_secs();\n\n let nanos = duration.subsec_nanos() as u64;\n\n secs *= NANOS_PER_SEC;\n\n secs + nanos\n\n}\n", "file_path": "src/time/convert.rs", "rank": 18, "score": 87662.7080169018 }, { "content": "#[inline]\n\npub fn as_seconds(duration: &Duration) -> u64 {\n\n duration.as_secs()\n\n}\n\n\n\n/// Converts a duration into a number of milliseconds, rounding down.\n", "file_path": "src/time/convert.rs", "rank": 19, "score": 87662.7080169018 }, { "content": "#[inline]\n\npub fn as_hours(duration: &Duration) -> u64 {\n\n let secs = duration.as_secs();\n\n secs / SECS_PER_HOUR\n\n}\n\n\n\n/// Converts a duration into a number of minutes, rounding down.\n", "file_path": "src/time/convert.rs", "rank": 20, "score": 87662.7080169018 }, { "content": "#[inline]\n\npub fn as_days(duration: &Duration) -> u64 {\n\n let secs = duration.as_secs();\n\n secs / SECS_PER_DAY\n\n}\n\n\n\n/// Converts a duration into a number of hours, rounding down.\n", "file_path": "src/time/convert.rs", "rank": 21, "score": 87662.7080169018 }, { "content": "/// Computes the sine of a number. Input in radians, output in radians.\n\n///\n\n/// * Average error of 0.00060 radians.\n\n/// * Largest error of 0.00229 radians.\n\n/// * Speedup of 30x over f32.sin();\n\npub fn sin_rad(rad: f32) -> f32 {\n\n unsafe { *SIN.get_unchecked((((rad * RAD_INDEX) as i32) & SIN_MASK) as usize) }\n\n}\n\n\n", "file_path": "src/math/num/trigonometry.rs", "rank": 22, "score": 83487.4219882862 }, { "content": "/// Computes the sine of a number. Input in degrees, output in radians.\n\n///\n\n/// * Average error of 0.00060 radians.\n\n/// * Largest error of 0.00229 radians.\n\n/// * Speedup of 30x over f32.sin();\n\npub fn sin_deg(deg: f32) -> f32 {\n\n unsafe { *SIN.get_unchecked((((deg * DEG_INDEX) as i32) & SIN_MASK) as usize) }\n\n}\n\n\n", "file_path": "src/math/num/trigonometry.rs", "rank": 23, "score": 83487.4219882862 }, { "content": "/// Computes the cosine of a number. Input in radians, output in radians.\n\n///\n\n/// * Average error of 0.00060 radians.\n\n/// * Largest error of 0.00229 radians.\n\n/// * Speedup of 30x over f32.cos();\n\npub fn cos_rad(rad: f32) -> f32 {\n\n unsafe { *COS.get_unchecked((((rad * RAD_INDEX) as i32) & SIN_MASK) as usize) }\n\n}\n\n\n", "file_path": "src/math/num/trigonometry.rs", "rank": 24, "score": 83487.4219882862 }, { "content": "/// Computes the cosine of a number. Input in degrees, output in radians.\n\n///\n\n/// * Average error of 0.00060 radians.\n\n/// * Largest error of 0.00229 radians.\n\n/// * Speedup of 30x over f32.cos();\n\npub fn cos_deg(deg: f32) -> f32 {\n\n unsafe { *COS.get_unchecked((((deg * DEG_INDEX) as i32) & SIN_MASK) as usize) }\n\n}\n\n\n\n// Tokenization is skipped for long lines\n\n#[rustfmt::skip] #[allow(clippy::all)] static SIN: [f32; SIZE] = [0f32, 0.004601926f32, 0.0076698293f32, 0.010737659f32, 0.0138053885f32, 0.016872989f32, 0.019940428f32, 0.023007682f32, 0.026074719f32, 0.02914151f32, 0.03220803f32, 0.035274237f32, 0.03834012f32, 0.041405644f32, 0.044470772f32, 0.047535487f32, 0.05059975f32, 0.05366354f32, 0.05672682f32, 0.059789572f32, 0.06285176f32, 0.06591336f32, 0.06897433f32, 0.07203466f32, 0.075094305f32, 0.078153245f32, 0.08121145f32, 0.08426889f32, 0.087325536f32, 0.09038136f32, 0.09343634f32, 0.096490435f32, 0.099543616f32, 0.10259587f32, 0.105647154f32, 0.108697444f32, 0.11174671f32, 0.11479493f32, 0.11784206f32, 0.12088809f32, 0.12393298f32, 0.1269767f32, 0.13001922f32, 0.13306053f32, 0.13610058f32, 0.13913935f32, 0.1421768f32, 0.14521293f32, 0.14824769f32, 0.15128104f32, 0.15431298f32, 0.15734346f32, 0.16037247f32, 0.16339995f32, 0.1664259f32, 0.1694503f32, 0.17247309f32, 0.17549427f32, 0.17851378f32, 0.18153162f32, 0.18454774f32, 0.18756212f32, 0.19057477f32, 0.19358559f32, 0.19659461f32, 0.19960175f32, 0.20260705f32, 0.20561042f32, 0.20861185f32, 0.21161133f32, 0.21460882f32, 0.21760428f32, 0.2205977f32, 0.22358905f32, 0.22657827f32, 0.22956537f32, 0.23255032f32, 0.23553306f32, 0.2385136f32, 0.24149188f32, 0.2444679f32, 0.24744162f32, 0.250413f32, 0.25338206f32, 0.2563487f32, 0.25931293f32, 0.2622747f32, 0.26523405f32, 0.26819086f32, 0.27114516f32, 0.27409694f32, 0.2770461f32, 0.27999264f32, 0.28293657f32, 0.28587785f32, 0.2888164f32, 0.29175228f32, 0.2946854f32, 0.29761574f32, 0.30054325f32, 0.30346796f32, 0.3063898f32, 0.30930877f32, 0.3122248f32, 0.31513795f32, 0.3180481f32, 0.32095525f32, 0.32385936f32, 0.32676047f32, 0.32965845f32, 0.3325534f32, 0.33544517f32, 0.33833376f32, 0.34121922f32, 0.34410143f32, 0.34698042f32, 0.34985614f32, 0.35272858f32, 0.35559767f32, 0.3584634f32, 0.3613258f32, 0.3641848f32, 0.36704037f32, 0.36989245f32, 0.37274107f32, 0.37558618f32, 0.37842774f32, 0.3812658f32, 0.3841002f32, 0.38693103f32, 0.38975817f32, 0.39258167f32, 0.3954015f32, 0.39821756f32, 0.4010299f32, 0.4038385f32, 0.40664324f32, 0.40944415f32, 0.41224125f32, 0.41503444f32, 0.4178237f32, 0.4206091f32, 0.42339048f32, 0.4261679f32, 0.4289413f32, 0.43171066f32, 0.434476f32, 0.43723717f32, 0.43999428f32, 0.44274724f32, 0.44549602f32, 0.4482406f32, 0.450981f32, 0.45371714f32, 0.45644897f32, 0.45917657f32, 0.46189982f32, 0.46461868f32, 0.46733323f32, 0.47004336f32, 0.47274905f32, 0.47545028f32, 0.47814706f32, 0.4808393f32, 0.4835271f32, 0.4862103f32, 0.48888892f32, 0.49156293f32, 0.49423233f32, 0.4968971f32, 0.4995571f32, 0.50221246f32, 0.50486314f32, 0.507509f32, 0.51015013f32, 0.51278645f32, 0.5154179f32, 0.5180445f32, 0.52066624f32, 0.5232831f32, 0.52589506f32, 0.52850205f32, 0.531104f32, 0.533701f32, 0.53629297f32, 0.53887993f32, 0.54146177f32, 0.54403853f32, 0.5466102f32, 0.5491767f32, 0.55173796f32, 0.5542941f32, 0.55684507f32, 0.5593907f32, 0.56193113f32, 0.5644663f32, 0.5669961f32, 0.56952053f32, 0.5720396f32, 0.5745534f32, 0.5770617f32, 0.5795646f32, 0.582062f32, 0.58455396f32, 0.58704036f32, 0.5895213f32, 0.5919967f32, 0.5944665f32, 0.59693074f32, 0.5993893f32, 0.6018423f32, 0.60428953f32, 0.6067311f32, 0.60916704f32, 0.6115972f32, 0.6140216f32, 0.6164402f32, 0.61885303f32, 0.62126f32, 0.6236611f32, 0.6260564f32, 0.6284458f32, 0.6308293f32, 0.6332068f32, 0.63557833f32, 0.6379439f32, 0.6403035f32, 0.64265704f32, 0.6450046f32, 0.647346f32, 0.6496813f32, 0.65201056f32, 0.6543336f32, 0.65665054f32, 0.6589613f32, 0.66126585f32, 0.6635642f32, 0.66585624f32, 0.6681421f32, 0.67042154f32, 0.6726948f32, 0.6749617f32, 0.6772222f32, 0.6794763f32, 0.68172413f32, 0.68396544f32, 0.6862003f32, 0.68842876f32, 0.6906507f32, 0.6928662f32, 0.69507515f32, 0.69727755f32, 0.6994734f32, 0.7016626f32, 0.70384526f32, 0.70602125f32, 0.7081907f32, 0.7103534f32, 0.7125094f32, 0.7146587f32, 0.7168013f32, 0.71893716f32, 0.72106624f32, 0.7231885f32, 0.725304f32, 0.72741264f32, 0.7295144f32, 0.7316094f32, 0.7336975f32, 0.7357786f32, 0.7378528f32, 0.73992014f32, 0.74198043f32, 0.74403375f32, 0.7460801f32, 0.7481194f32, 0.75015163f32, 0.7521769f32, 0.754195f32, 0.75620604f32, 0.7582099f32, 0.7602067f32, 0.7621963f32, 0.76417875f32, 0.766154f32, 0.7681221f32, 0.7700829f32, 0.7720364f32, 0.7739827f32, 0.7759217f32, 0.7778534f32, 0.7797778f32, 0.7816948f32, 0.78360456f32, 0.78550684f32, 0.78740174f32, 0.78928924f32, 0.79116935f32, 0.793042f32, 0.79490715f32, 0.79676485f32, 0.798615f32, 0.80045766f32, 0.8022928f32, 0.8041204f32, 0.8059404f32, 0.80775285f32, 0.8095577f32, 0.8113549f32, 0.81314445f32, 0.8149263f32, 0.8167006f32, 0.81846714f32, 0.820226f32, 0.82197714f32, 0.8237205f32, 0.82545614f32, 0.827184f32, 0.82890415f32, 0.8306164f32, 0.83232087f32, 0.8340175f32, 0.8357063f32, 0.8373872f32, 0.83906025f32, 0.84072536f32, 0.8423826f32, 0.8440319f32, 0.84567326f32, 0.84730667f32, 0.8489321f32, 0.8505495f32, 0.8521589f32, 0.8537603f32, 0.8553537f32, 0.856939f32, 0.8585163f32, 0.8600854f32, 0.86164653f32, 0.8631994f32, 0.86474425f32, 0.866281f32, 0.8678095f32, 0.86932987f32, 0.8708421f32, 0.87234604f32, 0.8738419f32, 0.87532943f32, 0.87680876f32, 0.8782798f32, 0.8797426f32, 0.88119715f32, 0.8826434f32, 0.88408124f32, 0.88551086f32, 0.88693213f32, 0.88834506f32, 0.8897496f32, 0.89114577f32, 0.89253354f32, 0.893913f32, 0.89528394f32, 0.8966465f32, 0.8980006f32, 0.8993463f32, 0.90068346f32, 0.90201217f32, 0.90333235f32, 0.9046441f32, 0.90594727f32, 0.907242f32, 0.90852815f32, 0.9098057f32, 0.91107476f32, 0.9123352f32, 0.9135871f32, 0.9148303f32, 0.916065f32, 0.91729105f32, 0.9185084f32, 0.9197172f32, 0.9209172f32, 0.92210865f32, 0.92329144f32, 0.9244655f32, 0.9256308f32, 0.9267875f32, 0.9279354f32, 0.9290746f32, 0.93020505f32, 0.93132675f32, 0.9324396f32, 0.9335438f32, 0.93463916f32, 0.9357257f32, 0.93680346f32, 0.93787235f32, 0.9389325f32, 0.9399838f32, 0.9410262f32, 0.94205976f32, 0.9430845f32, 0.94410026f32, 0.9451072f32, 0.94610524f32, 0.9470944f32, 0.94807464f32, 0.9490459f32, 0.9500082f32, 0.95096165f32, 0.95190614f32, 0.95284164f32, 0.9537682f32, 0.95468575f32, 0.95559436f32, 0.9564939f32, 0.9573845f32, 0.9582661f32, 0.95913863f32, 0.9600022f32, 0.9608567f32, 0.9617021f32, 0.9625385f32, 0.9633658f32, 0.96418405f32, 0.96499324f32, 0.9657934f32, 0.9665844f32, 0.9673663f32, 0.9681391f32, 0.9689028f32, 0.9696574f32, 0.97040284f32, 0.9711392f32, 0.97186637f32, 0.97258437f32, 0.97329324f32, 0.97399294f32, 0.9746835f32, 0.97536486f32, 0.9760371f32, 0.97670007f32, 0.97735393f32, 0.9779985f32, 0.97863394f32, 0.97926015f32, 0.9798771f32, 0.9804849f32, 0.9810834f32, 0.9816727f32, 0.9822527f32, 0.98282355f32, 0.9833851f32, 0.98393744f32, 0.98448044f32, 0.98501426f32, 0.9855387f32, 0.98605394f32, 0.9865599f32, 0.98705655f32, 0.98754394f32, 0.988022f32, 0.9884908f32, 0.98895025f32, 0.98940045f32, 0.9898413f32, 0.9902728f32, 0.990695f32, 0.99110794f32, 0.99151146f32, 0.9919057f32, 0.9922906f32, 0.9926661f32, 0.99303234f32, 0.99338925f32, 0.99373674f32, 0.9940749f32, 0.99440366f32, 0.99472314f32, 0.9950332f32, 0.9953339f32, 0.99562526f32, 0.99590725f32, 0.9961798f32, 0.99644303f32, 0.9966969f32, 0.9969414f32, 0.99717647f32, 0.99740213f32, 0.99761844f32, 0.9978254f32, 0.99802285f32, 0.998211f32, 0.9983897f32, 0.99855906f32, 0.99871904f32, 0.99886954f32, 0.9990107f32, 0.9991424f32, 0.9992648f32, 0.99937767f32, 0.9994812f32, 0.9995753f32, 0.99966f32, 0.9997353f32, 0.99980116f32, 0.99985766f32, 0.9999047f32, 0.99994236f32, 0.9999706f32, 0.9999894f32, 0.9999988f32, 1f32, 0.9999894f32, 0.9999706f32, 0.99994236f32, 0.9999047f32, 0.99985766f32, 0.99980116f32, 0.9997353f32, 0.99966f32, 0.9995753f32, 0.9994812f32, 0.99937767f32, 0.9992647f32, 0.9991424f32, 0.9990107f32, 0.99886954f32, 0.99871904f32, 0.99855906f32, 0.9983897f32, 0.998211f32, 0.99802285f32, 0.9978253f32, 0.99761844f32, 0.99740213f32, 0.9971764f32, 0.9969413f32, 0.9966969f32, 0.99644303f32, 0.9961798f32, 0.99590725f32, 0.99562526f32, 0.9953339f32, 0.9950332f32, 0.9947231f32, 0.99440366f32, 0.9940749f32, 0.99373674f32, 0.9933892f32, 0.99303234f32, 0.9926661f32, 0.9922906f32, 0.9919057f32, 0.99151146f32, 0.9911079f32, 0.990695f32, 0.9902728f32, 0.9898413f32, 0.9894004f32, 0.98895025f32, 0.9884908f32, 0.988022f32, 0.98754394f32, 0.98705655f32, 0.9865599f32, 0.98605394f32, 0.9855387f32, 0.9850142f32, 0.98448044f32, 0.9839374f32, 0.9833851f32, 0.98282355f32, 0.9822527f32, 0.9816727f32, 0.9810834f32, 0.98048484f32, 0.9798771f32, 0.97926015f32, 0.97863394f32, 0.9779985f32, 0.9773539f32, 0.97670007f32, 0.9760371f32, 0.97536486f32, 0.97468346f32, 0.97399294f32, 0.97329324f32, 0.97258437f32, 0.9718663f32, 0.97113913f32, 0.97040284f32, 0.96965736f32, 0.96890277f32, 0.9681391f32, 0.9673663f32, 0.9665844f32, 0.9657933f32, 0.96499324f32, 0.96418405f32, 0.9633658f32, 0.9625384f32, 0.96170205f32, 0.9608566f32, 0.9600021f32, 0.95913863f32, 0.9582661f32, 0.95738447f32, 0.9564939f32, 0.9555943f32, 0.95468575f32, 0.9537682f32, 0.95284164f32, 0.9519061f32, 0.95096165f32, 0.9500082f32, 0.9490459f32, 0.9480746f32, 0.9470944f32, 0.94610524f32, 0.94510716f32, 0.94410026f32, 0.9430844f32, 0.9420597f32, 0.94102615f32, 0.9399837f32, 0.9389325f32, 0.93787235f32, 0.9368034f32, 0.9357256f32, 0.93463916f32, 0.93354374f32, 0.9324396f32, 0.9313267f32, 0.930205f32, 0.9290746f32, 0.92793536f32, 0.92678744f32, 0.9256308f32, 0.9244654f32, 0.9232914f32, 0.92210865f32, 0.9209172f32, 0.91971713f32, 0.9185084f32, 0.917291f32, 0.916065f32, 0.9148303f32, 0.91358703f32, 0.91233516f32, 0.9110747f32, 0.90980566f32, 0.90852815f32, 0.907242f32, 0.9059473f32, 0.9046441f32, 0.90333235f32, 0.90201217f32, 0.9006834f32, 0.89934623f32, 0.8980006f32, 0.89664644f32, 0.8952839f32, 0.8939129f32, 0.89253354f32, 0.8911457f32, 0.8897495f32, 0.888345f32, 0.8869321f32, 0.8855108f32, 0.8840812f32, 0.8826433f32, 0.88119704f32, 0.8797425f32, 0.87827975f32, 0.87680864f32, 0.8753293f32, 0.87384176f32, 0.8723461f32, 0.8708421f32, 0.86932987f32, 0.86780953f32, 0.866281f32, 0.86474425f32, 0.8631994f32, 0.8616465f32, 0.86008537f32, 0.8585162f32, 0.85693896f32, 0.85535365f32, 0.8537603f32, 0.85215884f32, 0.85054946f32, 0.848932f32, 0.8473066f32, 0.8456732f32, 0.8440319f32, 0.84238255f32, 0.8407253f32, 0.8390602f32, 0.83738714f32, 0.83570623f32, 0.8340174f32, 0.83232075f32, 0.8306163f32, 0.82890403f32, 0.8271841f32, 0.8254562f32, 0.8237205f32, 0.82197714f32, 0.820226f32, 0.81846714f32, 0.8167006f32, 0.8149263f32, 0.8131444f32, 0.8113548f32, 0.8095576f32, 0.8077528f32, 0.80594033f32, 0.80412036f32, 0.80229276f32, 0.8004576f32, 0.7986149f32, 0.79676473f32, 0.79490703f32, 0.7930419f32, 0.7911692f32, 0.7892892f32, 0.7874017f32, 0.7855067f32, 0.78360444f32, 0.7816947f32, 0.7797777f32, 0.7778534f32, 0.7759217f32, 0.7739827f32, 0.77203643f32, 0.77008283f32, 0.768122f32, 0.766154f32, 0.76417875f32, 0.7621963f32, 0.76020664f32, 0.7582099f32, 0.756206f32, 0.7541949f32, 0.7521768f32, 0.7501516f32, 0.74811935f32, 0.74608004f32, 0.7440337f32, 0.7419804f32, 0.73992f32, 0.73785275f32, 0.7357785f32, 0.73369735f32, 0.7316093f32, 0.72951436f32, 0.7274125f32, 0.7253038f32, 0.72318834f32, 0.72106624f32, 0.71893716f32, 0.7168013f32, 0.7146587f32, 0.7125094f32, 0.7103534f32, 0.7081906f32, 0.70602125f32, 0.7038452f32, 0.7016626f32, 0.6994733f32, 0.6972775f32, 0.6950751f32, 0.69286615f32, 0.69065064f32, 0.6884287f32, 0.68620026f32, 0.6839653f32, 0.681724f32, 0.67947626f32, 0.6772221f32, 0.67496157f32, 0.6726947f32, 0.6704214f32, 0.6681419f32, 0.6658561f32, 0.663564f32, 0.6612657f32, 0.65896136f32, 0.6566506f32, 0.65433365f32, 0.65201056f32, 0.6496813f32, 0.64734596f32, 0.6450045f32, 0.64265704f32, 0.6403035f32, 0.63794386f32, 0.6355783f32, 0.6332067f32, 0.63082916f32, 0.6284457f32, 0.6260563f32, 0.62366104f32, 0.6212599f32, 0.6188529f32, 0.61644006f32, 0.6140215f32, 0.61159706f32, 0.6091669f32, 0.606731f32, 0.6042894f32, 0.6018421f32, 0.59938914f32, 0.59693056f32, 0.59446657f32, 0.5919967f32, 0.58952135f32, 0.5870404f32, 0.58455396f32, 0.582062f32, 0.5795646f32, 0.57706165f32, 0.5745533f32, 0.5720396f32, 0.5695205f32, 0.56699604f32, 0.5644662f32, 0.5619311f32, 0.55939066f32, 0.55684495f32, 0.55429405f32, 0.5517379f32, 0.5491766f32, 0.54661006f32, 0.5440384f32, 0.54146165f32, 0.5388798f32, 0.53629285f32, 0.5337009f32, 0.53110385f32, 0.52850187f32, 0.5258949f32, 0.5232831f32, 0.5206663f32, 0.51804453f32, 0.5154179f32, 0.5127864f32, 0.5101501f32, 0.507509f32, 0.5048631f32, 0.50221246f32, 0.49955708f32, 0.496897f32, 0.49423227f32, 0.49156287f32, 0.48888883f32, 0.4862102f32, 0.483527f32, 0.48083925f32, 0.47814697f32, 0.4754502f32, 0.47274894f32, 0.4700432f32, 0.46733308f32, 0.46461856f32, 0.46189964f32, 0.4591764f32, 0.45644882f32, 0.45371696f32, 0.4509808f32, 0.44824064f32, 0.44549605f32, 0.44274724f32, 0.43999428f32, 0.43723717f32, 0.43447596f32, 0.43171066f32, 0.42894128f32, 0.42616788f32, 0.42339045f32, 0.42060903f32, 0.41782367f32, 0.41503435f32, 0.41224116f32, 0.40944406f32, 0.40664312f32, 0.40383837f32, 0.4010298f32, 0.39821744f32, 0.39540136f32, 0.39258155f32, 0.38975805f32, 0.38693085f32, 0.38410005f32, 0.3812656f32, 0.3784276f32, 0.375586f32, 0.3727411f32, 0.36989248f32, 0.36704037f32, 0.3641848f32, 0.3613258f32, 0.3584634f32, 0.35559765f32, 0.35272855f32, 0.3498561f32, 0.34698036f32, 0.34410137f32, 0.34121916f32, 0.3383337f32, 0.33544508f32, 0.3325533f32, 0.3296584f32, 0.32676035f32, 0.32385927f32, 0.32095513f32, 0.31804797f32, 0.3151378f32, 0.3122247f32, 0.30930862f32, 0.30638966f32, 0.30346778f32, 0.30054307f32, 0.29761553f32, 0.29468518f32, 0.2917523f32, 0.28881642f32, 0.28587785f32, 0.28293657f32, 0.27999264f32, 0.27704608f32, 0.2740969f32, 0.27114514f32, 0.26819083f32, 0.265234f32, 0.26227465f32, 0.25931287f32, 0.2563486f32, 0.25338197f32, 0.2504129f32, 0.24744153f32, 0.24446781f32, 0.24149178f32, 0.23851348f32, 0.23553294f32, 0.23255017f32, 0.22956522f32, 0.22657812f32, 0.22358887f32, 0.22059752f32, 0.2176041f32, 0.21460862f32, 0.21161114f32, 0.20861188f32, 0.20561044f32, 0.20260705f32, 0.19960177f32, 0.1965946f32, 0.19358557f32, 0.19057474f32, 0.1875621f32, 0.18454769f32, 0.18153156f32, 0.17851372f32, 0.1754942f32, 0.17247301f32, 0.16945021f32, 0.16642581f32, 0.16339985f32, 0.16037235f32, 0.15734334f32, 0.15431285f32, 0.15128091f32, 0.14824754f32, 0.14521277f32, 0.14217664f32, 0.13913918f32, 0.1361004f32, 0.13306034f32, 0.13001902f32, 0.12697673f32, 0.123933f32, 0.12088811f32, 0.11784207f32, 0.114794925f32, 0.111746706f32, 0.10869743f32, 0.10564713f32, 0.102595836f32, 0.09954358f32, 0.09649038f32, 0.09343628f32, 0.090381294f32, 0.08732546f32, 0.0842688f32, 0.08121135f32, 0.07815314f32, 0.075094186f32, 0.07203453f32, 0.0689742f32, 0.065913215f32, 0.06285161f32, 0.059789415f32, 0.056726657f32, 0.053663366f32, 0.050599568f32, 0.047535293f32, 0.04447057f32, 0.04140567f32, 0.03834014f32, 0.035274252f32, 0.03220803f32, 0.029141504f32, 0.026074704f32, 0.02300766f32, 0.019940397f32, 0.016872948f32, 0.01380534f32, 0.010737603f32, 0.007669763f32, 0.0046018516f32, 0.0015338971f32, -0.00000008742278f32, -0.0046020267f32, -0.007669938f32, -0.010737777f32, -0.013805515f32, -0.016873123f32, -0.019940572f32, -0.023007834f32, -0.026074879f32, -0.02914168f32, -0.032208204f32, -0.035274427f32, -0.038340315f32, -0.041405845f32, -0.044470746f32, -0.047535468f32, -0.050599743f32, -0.05366354f32, -0.056726832f32, -0.05978959f32, -0.06285179f32, -0.06591339f32, -0.068974376f32, -0.07203471f32, -0.075094365f32, -0.07815331f32, -0.08121153f32, -0.08426897f32, -0.08732563f32, -0.090381466f32, -0.09343645f32, -0.096490555f32, -0.09954375f32, -0.10259601f32, -0.1056473f32, -0.1086976f32, -0.11174688f32, -0.1147951f32, -0.11784224f32, -0.12088828f32, -0.12393317f32, -0.1269769f32, -0.1300192f32, -0.13306051f32, -0.13610058f32, -0.13913935f32, -0.14217682f32, -0.14521295f32, -0.1482477f32, -0.15128107f32, -0.15431303f32, -0.1573435f32, -0.16037253f32, -0.16340002f32, -0.16642599f32, -0.16945039f32, -0.17247318f32, -0.17549436f32, -0.17851388f32, -0.18153173f32, -0.18454787f32, -0.18756227f32, -0.1905749f32, -0.19358575f32, -0.19659476f32, -0.19960193f32, -0.20260723f32, -0.2056106f32, -0.20861205f32, -0.2116113f32, -0.2146088f32, -0.21760426f32, -0.2205977f32, -0.22358905f32, -0.22657828f32, -0.2295654f32, -0.23255034f32, -0.2355331f32, -0.23851365f32, -0.24149194f32, -0.24446797f32, -0.2474417f32, -0.2504131f32, -0.25338215f32, -0.2563488f32, -0.25931302f32, -0.26227483f32, -0.26523417f32, -0.26819098f32, -0.2711453f32, -0.27409706f32, -0.27704623f32, -0.27999282f32, -0.28293675f32, -0.28587803f32, -0.2888166f32, -0.29175246f32, -0.29468536f32, -0.2976157f32, -0.30054325f32, -0.30346796f32, -0.3063898f32, -0.3093088f32, -0.31222484f32, -0.31513798f32, -0.31804812f32, -0.32095528f32, -0.32385942f32, -0.32676053f32, -0.32965854f32, -0.33255345f32, -0.33544526f32, -0.33833387f32, -0.3412193f32, -0.34410155f32, -0.34698054f32, -0.34985626f32, -0.3527287f32, -0.35559782f32, -0.3584636f32, -0.36132598f32, -0.36418498f32, -0.36704054f32, -0.36989263f32, -0.37274128f32, -0.37558615f32, -0.37842774f32, -0.38126576f32, -0.3841002f32, -0.38693103f32, -0.3897582f32, -0.3925817f32, -0.39540154f32, -0.39821762f32, -0.40102994f32, -0.40383852f32, -0.4066433f32, -0.40944424f32, -0.4122413f32, -0.41503453f32, -0.41782382f32, -0.42060918f32, -0.4233906f32, -0.42616802f32, -0.42894143f32, -0.4317108f32, -0.4344761f32, -0.43723732f32, -0.43999445f32, -0.4427474f32, -0.4454962f32, -0.44824082f32, -0.45098096f32, -0.4537171f32, -0.45644897f32, -0.45917657f32, -0.46189982f32, -0.4646187f32, -0.46733323f32, -0.47004336f32, -0.47274908f32, -0.47545034f32, -0.47814712f32, -0.4808394f32, -0.48352715f32, -0.48621035f32, -0.48888898f32, -0.49156302f32, -0.49423242f32, -0.49689716f32, -0.49955723f32, -0.5022126f32, -0.50486326f32, -0.5075091f32, -0.51015025f32, -0.51278657f32, -0.51541805f32, -0.51804465f32, -0.5206664f32, -0.5232833f32, -0.525895f32, -0.528502f32, -0.531104f32, -0.533701f32, -0.53629297f32, -0.53887993f32, -0.5414618f32, -0.5440386f32, -0.54661024f32, -0.5491767f32, -0.551738f32, -0.55429417f32, -0.5568451f32, -0.5593908f32, -0.5619312f32, -0.56446636f32, -0.56699616f32, -0.56952065f32, -0.5720397f32, -0.5745535f32, -0.57706183f32, -0.5795647f32, -0.5820621f32, -0.5845541f32, -0.58704054f32, -0.58952147f32, -0.59199685f32, -0.5944667f32, -0.5969307f32, -0.5993893f32, -0.6018423f32, -0.60428953f32, -0.6067312f32, -0.60916704f32, -0.6115972f32, -0.6140216f32, -0.61644024f32, -0.61885303f32, -0.62126005f32, -0.62366116f32, -0.6260565f32, -0.62844586f32, -0.63082933f32, -0.63320684f32, -0.6355784f32, -0.63794404f32, -0.6403036f32, -0.64265716f32, -0.6450047f32, -0.6473461f32, -0.64968145f32, -0.6520107f32, -0.6543338f32, -0.6566507f32, -0.6589615f32, -0.66126585f32, -0.66356415f32, -0.66585624f32, -0.6681421f32, -0.6704216f32, -0.6726948f32, -0.6749617f32, -0.6772222f32, -0.6794764f32, -0.68172413f32, -0.68396544f32, -0.6862004f32, -0.6884288f32, -0.69065076f32, -0.69286627f32, -0.6950752f32, -0.6972776f32, -0.69947344f32, -0.7016627f32, -0.7038453f32, -0.70602137f32, -0.70819074f32, -0.7103535f32, -0.7125095f32, -0.71465886f32, -0.7168014f32, -0.7189373f32, -0.72106636f32, -0.72318846f32, -0.72530395f32, -0.72741264f32, -0.7295145f32, -0.7316094f32, -0.7336975f32, -0.7357786f32, -0.7378529f32, -0.73992014f32, -0.74198043f32, -0.7440338f32, -0.74608016f32, -0.7481195f32, -0.7501517f32, -0.75217694f32, -0.75419503f32, -0.7562061f32, -0.7582098f32, -0.76020664f32, -0.76219624f32, -0.7641787f32, -0.76615393f32, -0.768122f32, -0.77008283f32, -0.7720364f32, -0.7739827f32, -0.7759217f32, -0.77785337f32, -0.77977777f32, -0.7816948f32, -0.7836045f32, -0.78550684f32, -0.7874018f32, -0.7892893f32, -0.79116935f32, -0.793042f32, -0.79490715f32, -0.79676485f32, -0.79861504f32, -0.8004577f32, -0.8022929f32, -0.8041204f32, -0.80594045f32, -0.8077529f32, -0.80955774f32, -0.81135494f32, -0.8131445f32, -0.81492645f32, -0.8167007f32, -0.81846726f32, -0.8202261f32, -0.82197726f32, -0.82372063f32, -0.82545626f32, -0.82718414f32, -0.8289043f32, -0.83061653f32, -0.832321f32, -0.83401763f32, -0.8357064f32, -0.8373874f32, -0.83906037f32, -0.84072554f32, -0.8423828f32, -0.84403205f32, -0.84567344f32, -0.8473068f32, -0.8489322f32, -0.85054964f32, -0.85215884f32, -0.85376024f32, -0.8553536f32, -0.85693896f32, -0.85851616f32, -0.86008537f32, -0.8616464f32, -0.8631994f32, -0.86474425f32, -0.8662809f32, -0.8678095f32, -0.86932987f32, -0.87084204f32, -0.87234604f32, -0.8738418f32, -0.87532943f32, -0.87680876f32, -0.8782798f32, -0.8797426f32, -0.88119715f32, -0.88264334f32, -0.8840813f32, -0.88551086f32, -0.88693213f32, -0.88834506f32, -0.88974965f32, -0.8911458f32, -0.8925336f32, -0.89391303f32, -0.895284f32, -0.89664656f32, -0.89800066f32, -0.8993463f32, -0.9006835f32, -0.9020122f32, -0.9033325f32, -0.9046442f32, -0.9059474f32, -0.90724206f32, -0.9085282f32, -0.9098058f32, -0.9110748f32, -0.9123353f32, -0.91358715f32, -0.91483045f32, -0.9160651f32, -0.9172911f32, -0.9185085f32, -0.91971725f32, -0.92091733f32, -0.92210877f32, -0.92329156f32, -0.9244656f32, -0.925631f32, -0.9267876f32, -0.92793536f32, -0.9290745f32, -0.930205f32, -0.9313267f32, -0.9324396f32, -0.93354374f32, -0.9346391f32, -0.9357257f32, -0.9368034f32, -0.93787235f32, -0.9389325f32, -0.9399837f32, -0.94102615f32, -0.94205976f32, -0.9430844f32, -0.94410026f32, -0.9451072f32, -0.94610524f32, -0.9470944f32, -0.9480746f32, -0.9490459f32, -0.9500083f32, -0.9509617f32, -0.95190614f32, -0.9528417f32, -0.9537682f32, -0.9546858f32, -0.95559436f32, -0.956494f32, -0.9573845f32, -0.95826614f32, -0.9591387f32, -0.9600022f32, -0.9608567f32, -0.9617021f32, -0.96253854f32, -0.96336585f32, -0.9641841f32, -0.9649933f32, -0.96579343f32, -0.96658444f32, -0.96736634f32, -0.9681392f32, -0.9689029f32, -0.9696574f32, -0.9704029f32, -0.97113925f32, -0.9718664f32, -0.9725844f32, -0.9732933f32, -0.97399306f32, -0.9746836f32, -0.975365f32, -0.97603714f32, -0.9767002f32, -0.977354f32, -0.9779985f32, -0.9786339f32, -0.9792601f32, -0.9798771f32, -0.98048484f32, -0.9810834f32, -0.9816727f32, -0.9822527f32, -0.98282355f32, -0.9833851f32, -0.9839374f32, -0.98448044f32, -0.98501426f32, -0.9855387f32, -0.98605394f32, -0.9865599f32, -0.98705655f32, -0.98754394f32, -0.988022f32, -0.9884908f32, -0.98895025f32, -0.98940045f32, -0.9898413f32, -0.9902728f32, -0.99069506f32, -0.99110794f32, -0.99151146f32, -0.9919057f32, -0.9922906f32, -0.9926662f32, -0.9930324f32, -0.99338925f32, -0.99373674f32, -0.9940749f32, -0.9944037f32, -0.99472314f32, -0.9950332f32, -0.9953339f32, -0.99562526f32, -0.99590725f32, -0.9961799f32, -0.9964431f32, -0.9966969f32, -0.9969414f32, -0.99717647f32, -0.99740213f32, -0.99761844f32, -0.9978254f32, -0.9980229f32, -0.998211f32, -0.9983898f32, -0.9985591f32, -0.99871904f32, -0.99886954f32, -0.9990107f32, -0.9991424f32, -0.9992647f32, -0.99937767f32, -0.9994812f32, -0.9995753f32, -0.99966f32, -0.9997353f32, -0.99980116f32, -0.99985766f32, -0.9999047f32, -0.99994236f32, -0.9999706f32, -0.9999894f32, -0.9999988f32, -1f32, -0.9999894f32, -0.9999706f32, -0.99994236f32, -0.9999047f32, -0.99985766f32, -0.99980116f32, -0.9997353f32, -0.99966f32, -0.9995753f32, -0.9994812f32, -0.99937767f32, -0.9992647f32, -0.9991424f32, -0.9990107f32, -0.99886954f32, -0.998719f32, -0.99855906f32, -0.9983897f32, -0.99821097f32, -0.99802285f32, -0.9978253f32, -0.99761844f32, -0.99740213f32, -0.9971764f32, -0.9969413f32, -0.9966969f32, -0.99644303f32, -0.9961798f32, -0.9959072f32, -0.99562526f32, -0.9953339f32, -0.99503314f32, -0.9947231f32, -0.99440366f32, -0.9940748f32, -0.9937367f32, -0.9933892f32, -0.99303234f32, -0.9926661f32, -0.99229056f32, -0.9919056f32, -0.99151146f32, -0.99110794f32, -0.99069506f32, -0.9902728f32, -0.9898413f32, -0.98940045f32, -0.98895025f32, -0.9884908f32, -0.988022f32, -0.98754394f32, -0.98705655f32, -0.9865599f32, -0.98605394f32, -0.9855387f32, -0.9850142f32, -0.98448044f32, -0.9839374f32, -0.9833851f32, -0.98282355f32, -0.9822527f32, -0.98167264f32, -0.9810834f32, -0.98048484f32, -0.97987705f32, -0.9792601f32, -0.9786339f32, -0.9779985f32, -0.9773539f32, -0.97670007f32, -0.976037f32, -0.97536486f32, -0.97468346f32, -0.97399294f32, -0.9732932f32, -0.9725843f32, -0.9718663f32, -0.97113913f32, -0.9704028f32, -0.9696573f32, -0.96890277f32, -0.96813905f32, -0.9673662f32, -0.9665843f32, -0.9657933f32, -0.9649932f32, -0.964184f32, -0.96336573f32, -0.96253836f32, -0.961702f32, -0.96085656f32, -0.96000206f32, -0.9591385f32, -0.95826596f32, -0.9573844f32, -0.9564938f32, -0.95559436f32, -0.9546858f32, -0.9537682f32, -0.95284164f32, -0.95190614f32, -0.9509617f32, -0.9500083f32, -0.9490459f32, -0.9480746f32, -0.9470944f32, -0.94610524f32, -0.9451072f32, -0.94410026f32, -0.9430844f32, -0.94205976f32, -0.94102615f32, -0.9399837f32, -0.9389325f32, -0.93787235f32, -0.9368034f32, -0.9357257f32, -0.9346391f32, -0.93354374f32, -0.93243957f32, -0.9313267f32, -0.930205f32, -0.9290745f32, -0.92793536f32, -0.92678744f32, -0.92563075f32, -0.9244654f32, -0.9232913f32, -0.9221086f32, -0.92091715f32, -0.9197171f32, -0.9185083f32, -0.9172909f32, -0.91606486f32, -0.9148302f32, -0.913587f32, -0.9123351f32, -0.91107464f32, -0.9098056f32, -0.90852803f32, -0.9072419f32, -0.90594715f32, -0.90464395f32, -0.90333223f32, -0.902012f32, -0.9006833f32, -0.8993461f32, -0.8980004f32, -0.8966463f32, -0.89528376f32, -0.8939128f32, -0.8925336f32, -0.8911458f32, -0.88974965f32, -0.88834506f32, -0.88693213f32, -0.88551086f32, -0.8840813f32, -0.88264334f32, -0.8811971f32, -0.8797426f32, -0.8782798f32, -0.8768087f32, -0.8753294f32, -0.8738418f32, -0.87234604f32, -0.87084204f32, -0.86932987f32, -0.8678095f32, -0.8662809f32, -0.86474425f32, -0.86319935f32, -0.8616464f32, -0.86008537f32, -0.85851616f32, -0.8569389f32, -0.8553536f32, -0.85376024f32, -0.85215884f32, -0.8505494f32, -0.84893197f32, -0.84730655f32, -0.84567314f32, -0.8440318f32, -0.8423825f32, -0.84072524f32, -0.8390601f32, -0.8373871f32, -0.8357062f32, -0.8340174f32, -0.83232075f32, -0.83061624f32, -0.828904f32, -0.8271839f32, -0.825456f32, -0.82372034f32, -0.82197696f32, -0.82022583f32, -0.81846696f32, -0.8167004f32, -0.81492615f32, -0.8131442f32, -0.81135464f32, -0.80955744f32, -0.8077526f32, -0.80594015f32, -0.8041201f32, -0.8022928f32, -0.8004577f32, -0.79861504f32, -0.79676485f32, -0.79490715f32, -0.793042f32, -0.79116935f32, -0.78928924f32, -0.78740174f32, -0.78550684f32, -0.7836045f32, -0.7816948f32, -0.77977777f32, -0.77785337f32, -0.7759217f32, -0.77398264f32, -0.7720364f32, -0.7700828f32, -0.76812196f32, -0.76615393f32, -0.7641787f32, -0.76219624f32, -0.76020664f32, -0.7582098f32, -0.7562059f32, -0.7541949f32, -0.75217676f32, -0.7501515f32, -0.7481193f32, -0.74608f32, -0.74403363f32, -0.7419803f32, -0.73991996f32, -0.7378527f32, -0.73577845f32, -0.7336973f32, -0.7316092f32, -0.7295143f32, -0.72741246f32, -0.72530377f32, -0.7231883f32, -0.721066f32, -0.7189369f32, -0.71680105f32, -0.7146585f32, -0.71250916f32, -0.71035314f32, -0.7081904f32, -0.706021f32, -0.703845f32, -0.70166236f32, -0.6994731f32, -0.69727725f32, -0.69507486f32, -0.6928659f32, -0.69065076f32, -0.6884288f32, -0.6862004f32, -0.68396544f32, -0.68172413f32, -0.6794763f32, -0.6772222f32, -0.6749617f32, -0.6726948f32, -0.67042154f32, -0.668142f32, -0.66585624f32, -0.66356415f32, -0.6612658f32, -0.65896124f32, -0.6566505f32, -0.6543336f32, -0.6520105f32, -0.64968127f32, -0.6473459f32, -0.64500445f32, -0.642657f32, -0.6403034f32, -0.6379438f32, -0.6355782f32, -0.63320667f32, -0.6308291f32, -0.6284456f32, -0.62605625f32, -0.623661f32, -0.6212598f32, -0.61885285f32, -0.61644f32, -0.6140214f32, -0.611597f32, -0.60916686f32, -0.60673094f32, -0.60428935f32, -0.60184205f32, -0.5993891f32, -0.5969305f32, -0.59446627f32, -0.5919965f32, -0.5895211f32, -0.5870401f32, -0.5845537f32, -0.5820617f32, -0.5795643f32, -0.5770614f32, -0.5745531f32, -0.57203937f32, -0.56952024f32, -0.56699574f32, -0.56446594f32, -0.5619308f32, -0.5593908f32, -0.55684507f32, -0.55429417f32, -0.551738f32, -0.5491767f32, -0.5466102f32, -0.54403853f32, -0.54146177f32, -0.53887993f32, -0.53629297f32, -0.533701f32, -0.53110397f32, -0.528502f32, -0.525895f32, -0.52328306f32, -0.5206662f32, -0.5180445f32, -0.5154178f32, -0.5127863f32, -0.51015f32, -0.50750893f32, -0.504863f32, -0.5022124f32, -0.49955702f32, -0.49689692f32, -0.49423218f32, -0.49156278f32, -0.48888877f32, -0.48621014f32, -0.48352692f32, -0.48083916f32, -0.47814688f32, -0.4754501f32, -0.47274885f32, -0.47004315f32, -0.46733302f32, -0.46461847f32, -0.46189958f32, -0.45917633f32, -0.45644876f32, -0.45371687f32, -0.45098075f32, -0.44824037f32, -0.44549575f32, -0.44274697f32, -0.43999398f32, -0.43723688f32, -0.43447566f32, -0.43171036f32, -0.42894098f32, -0.42616758f32, -0.42339015f32, -0.42060873f32, -0.41782337f32, -0.41503406f32, -0.41224086f32, -0.4094442f32, -0.40664327f32, -0.40383852f32, -0.40102994f32, -0.3982176f32, -0.3954015f32, -0.3925817f32, -0.38975817f32, -0.386931f32, -0.3841002f32, -0.38126576f32, -0.3784277f32, -0.37558615f32, -0.372741f32, -0.3698924f32, -0.36704028f32, -0.36418474f32, -0.36132574f32, -0.35846335f32, -0.35559756f32, -0.35272846f32, -0.34985602f32, -0.3469803f32, -0.3441013f32, -0.34121907f32, -0.33833364f32, -0.335445f32, -0.3325532f32, -0.3296583f32, -0.3267603f32, -0.32385918f32, -0.32095504f32, -0.31804788f32, -0.3151377f32, -0.3122246f32, -0.30930853f32, -0.30638957f32, -0.3034677f32, -0.30054298f32, -0.29761544f32, -0.2946851f32, -0.29175198f32, -0.28881612f32, -0.28587756f32, -0.28293628f32, -0.27999234f32, -0.27704576f32, -0.27409658f32, -0.27114484f32, -0.2681905f32, -0.26523367f32, -0.26227435f32, -0.25931254f32, -0.2563483f32, -0.25338164f32, -0.25041306f32, -0.24744168f32, -0.24446794f32, -0.24149193f32, -0.23851362f32, -0.23553309f32, -0.23255032f32, -0.22956537f32, -0.22657827f32, -0.22358902f32, -0.22059767f32, -0.21760425f32, -0.21460877f32, -0.21161129f32, -0.2086118f32, -0.20561035f32, -0.20260696f32, -0.19960168f32, -0.1965945f32, -0.19358549f32, -0.19057465f32, -0.18756202f32, -0.18454762f32, -0.18153147f32, -0.17851363f32, -0.1754941f32, -0.17247292f32, -0.16945012f32, -0.16642573f32, -0.16339977f32, -0.16037227f32, -0.15734325f32, -0.15431276f32, -0.15128082f32, -0.14824745f32, -0.1452127f32, -0.14217655f32, -0.13913909f32, -0.1361003f32, -0.13306026f32, -0.13001895f32, -0.12697642f32, -0.123932675f32, -0.12088778f32, -0.11784175f32, -0.114794604f32, -0.11174638f32, -0.1086971f32, -0.105646804f32, -0.10259551f32, -0.09954325f32, -0.096490055f32, -0.09343595f32, -0.09038097f32, -0.08732513f32, -0.08426895f32, -0.0812115f32, -0.07815329f32, -0.07509434f32, -0.07203468f32, -0.068974346f32, -0.065913364f32, -0.062851764f32, -0.059789564f32, -0.056726806f32, -0.053663515f32, -0.050599717f32, -0.047535446f32, -0.044470724f32, -0.041405585f32, -0.038340054f32, -0.035274163f32, -0.032207943f32, -0.029141417f32, -0.026074616f32, -0.023007572f32, -0.019940311f32, -0.01687286f32, -0.0138052525f32, -0.010737515f32, -0.0076696756f32, -0.0046017645f32, -0.0015338097f32, ];\n\n#[rustfmt::skip] #[allow(clippy::all)] static COS: [f32; SIZE] = [1f32, 0.9999894f32, 0.9999706f32, 0.99994236f32, 0.9999047f32, 0.99985766f32, 0.99980116f32, 0.9997353f32, 0.99966f32, 0.9995753f32, 0.9994812f32, 0.99937767f32, 0.9992648f32, 0.9991424f32, 0.9990107f32, 0.99886954f32, 0.99871904f32, 0.99855906f32, 0.9983897f32, 0.998211f32, 0.99802285f32, 0.9978253f32, 0.99761844f32, 0.99740213f32, 0.9971764f32, 0.9969413f32, 0.9966969f32, 0.99644303f32, 0.9961798f32, 0.99590725f32, 0.99562526f32, 0.9953339f32, 0.9950332f32, 0.99472314f32, 0.99440366f32, 0.9940749f32, 0.99373674f32, 0.9933892f32, 0.99303234f32, 0.9926661f32, 0.9922906f32, 0.9919057f32, 0.99151146f32, 0.99110794f32, 0.990695f32, 0.9902728f32, 0.9898413f32, 0.98940045f32, 0.98895025f32, 0.9884908f32, 0.988022f32, 0.98754394f32, 0.98705655f32, 0.9865599f32, 0.98605394f32, 0.9855387f32, 0.98501426f32, 0.98448044f32, 0.98393744f32, 0.9833851f32, 0.98282355f32, 0.9822527f32, 0.9816727f32, 0.9810834f32, 0.98048484f32, 0.9798771f32, 0.97926015f32, 0.97863394f32, 0.9779985f32, 0.9773539f32, 0.97670007f32, 0.9760371f32, 0.97536486f32, 0.9746835f32, 0.97399294f32, 0.97329324f32, 0.97258437f32, 0.9718663f32, 0.97113913f32, 0.97040284f32, 0.96965736f32, 0.9689028f32, 0.9681391f32, 0.9673663f32, 0.9665844f32, 0.9657934f32, 0.96499324f32, 0.96418405f32, 0.9633658f32, 0.9625385f32, 0.96170205f32, 0.9608566f32, 0.9600021f32, 0.95913863f32, 0.9582661f32, 0.9573845f32, 0.9564939f32, 0.9555943f32, 0.95468575f32, 0.9537682f32, 0.95284164f32, 0.95190614f32, 0.95096165f32, 0.9500082f32, 0.9490459f32, 0.9480746f32, 0.9470944f32, 0.94610524f32, 0.94510716f32, 0.94410026f32, 0.9430844f32, 0.94205976f32, 0.94102615f32, 0.9399837f32, 0.9389325f32, 0.93787235f32, 0.93680346f32, 0.9357257f32, 0.9346391f32, 0.9335438f32, 0.9324396f32, 0.9313267f32, 0.930205f32, 0.9290746f32, 0.92793536f32, 0.9267875f32, 0.9256308f32, 0.9244655f32, 0.9232914f32, 0.92210865f32, 0.9209172f32, 0.91971713f32, 0.9185084f32, 0.917291f32, 0.916065f32, 0.9148303f32, 0.91358703f32, 0.91233516f32, 0.91107476f32, 0.9098057f32, 0.9085281f32, 0.907242f32, 0.90594727f32, 0.9046441f32, 0.90333235f32, 0.9020121f32, 0.9006834f32, 0.89934623f32, 0.8980006f32, 0.89664644f32, 0.89528394f32, 0.893913f32, 0.89253354f32, 0.89114577f32, 0.8897496f32, 0.888345f32, 0.88693213f32, 0.88551086f32, 0.88408124f32, 0.88264334f32, 0.8811971f32, 0.87974256f32, 0.8782798f32, 0.8768087f32, 0.8753294f32, 0.8738418f32, 0.87234604f32, 0.87084204f32, 0.86932987f32, 0.8678095f32, 0.866281f32, 0.86474425f32, 0.8631994f32, 0.8616465f32, 0.86008537f32, 0.8585162f32, 0.85693896f32, 0.85535365f32, 0.8537603f32, 0.8521589f32, 0.85054946f32, 0.848932f32, 0.8473066f32, 0.84567326f32, 0.8440319f32, 0.8423826f32, 0.84072536f32, 0.83906025f32, 0.8373872f32, 0.83570623f32, 0.8340175f32, 0.83232087f32, 0.8306164f32, 0.8289041f32, 0.827184f32, 0.82545614f32, 0.8237205f32, 0.82197714f32, 0.82022595f32, 0.81846714f32, 0.8167006f32, 0.8149263f32, 0.8131444f32, 0.8113548f32, 0.8095577f32, 0.80775285f32, 0.8059404f32, 0.80412036f32, 0.80229276f32, 0.80045766f32, 0.798615f32, 0.7967648f32, 0.79490715f32, 0.79304194f32, 0.79116935f32, 0.78928924f32, 0.78740174f32, 0.7855068f32, 0.7836045f32, 0.7816948f32, 0.77977777f32, 0.77785337f32, 0.7759217f32, 0.77398264f32, 0.7720364f32, 0.77008283f32, 0.768122f32, 0.766154f32, 0.76417875f32, 0.7621963f32, 0.76020664f32, 0.7582099f32, 0.756206f32, 0.754195f32, 0.7521768f32, 0.75015163f32, 0.74811935f32, 0.74608004f32, 0.7440337f32, 0.74198043f32, 0.7399201f32, 0.7378528f32, 0.73577857f32, 0.7336974f32, 0.73160934f32, 0.7295144f32, 0.72741264f32, 0.72530395f32, 0.72318846f32, 0.7210662f32, 0.7189371f32, 0.7168012f32, 0.7146587f32, 0.7125094f32, 0.7103533f32, 0.7081906f32, 0.70602125f32, 0.7038452f32, 0.70166254f32, 0.6994733f32, 0.6972775f32, 0.6950751f32, 0.69286615f32, 0.6906507f32, 0.6884287f32, 0.68620026f32, 0.68396544f32, 0.6817241f32, 0.6794763f32, 0.67722213f32, 0.6749616f32, 0.67269474f32, 0.67042154f32, 0.668142f32, 0.66585624f32, 0.66356415f32, 0.66126585f32, 0.6589613f32, 0.65665054f32, 0.6543336f32, 0.65201056f32, 0.6496813f32, 0.64734596f32, 0.6450045f32, 0.642657f32, 0.64030343f32, 0.63794386f32, 0.63557833f32, 0.6332067f32, 0.6308292f32, 0.62844574f32, 0.6260564f32, 0.6236611f32, 0.6212599f32, 0.618853f32, 0.6164402f32, 0.61402154f32, 0.6115971f32, 0.609167f32, 0.6067311f32, 0.6042895f32, 0.6018422f32, 0.5993893f32, 0.5969307f32, 0.5944665f32, 0.59199667f32, 0.5895213f32, 0.58704036f32, 0.58455396f32, 0.582062f32, 0.5795646f32, 0.57706165f32, 0.5745533f32, 0.5720396f32, 0.5695205f32, 0.56699604f32, 0.56446624f32, 0.56193113f32, 0.55939066f32, 0.556845f32, 0.5542941f32, 0.55173796f32, 0.54917663f32, 0.5466102f32, 0.54403853f32, 0.54146177f32, 0.53887993f32, 0.53629297f32, 0.533701f32, 0.53110397f32, 0.528502f32, 0.525895f32, 0.52328306f32, 0.52066624f32, 0.5180445f32, 0.5154178f32, 0.5127863f32, 0.51015f32, 0.50750893f32, 0.50486314f32, 0.50221246f32, 0.4995571f32, 0.49689704f32, 0.4942323f32, 0.4915629f32, 0.48888886f32, 0.48621026f32, 0.48352703f32, 0.48083928f32, 0.478147f32, 0.47545022f32, 0.47274897f32, 0.47004324f32, 0.46733323f32, 0.4646187f32, 0.4618998f32, 0.45917654f32, 0.45644897f32, 0.4537171f32, 0.45098096f32, 0.44824058f32, 0.44549596f32, 0.44274718f32, 0.43999422f32, 0.4372371f32, 0.4344759f32, 0.43171057f32, 0.4289413f32, 0.4261679f32, 0.42339048f32, 0.42060906f32, 0.4178237f32, 0.4150344f32, 0.4122412f32, 0.40944412f32, 0.40664318f32, 0.4038384f32, 0.40102983f32, 0.3982175f32, 0.3954014f32, 0.39258158f32, 0.3897582f32, 0.386931f32, 0.3841002f32, 0.38126576f32, 0.37842774f32, 0.37558615f32, 0.37274104f32, 0.36989242f32, 0.3670403f32, 0.36418474f32, 0.36132574f32, 0.35846335f32, 0.3555976f32, 0.35272858f32, 0.34985614f32, 0.34698042f32, 0.34410143f32, 0.3412192f32, 0.33833376f32, 0.33544514f32, 0.33255333f32, 0.32965842f32, 0.3267604f32, 0.3238593f32, 0.32095516f32, 0.318048f32, 0.31513783f32, 0.31222484f32, 0.30930877f32, 0.3063898f32, 0.30346793f32, 0.30054322f32, 0.29761568f32, 0.29468533f32, 0.29175222f32, 0.28881636f32, 0.2858778f32, 0.2829365f32, 0.27999258f32, 0.277046f32, 0.27409682f32, 0.27114516f32, 0.26819086f32, 0.26523402f32, 0.2622747f32, 0.2593129f32, 0.25634867f32, 0.253382f32, 0.25041297f32, 0.24744157f32, 0.24446784f32, 0.24149182f32, 0.23851351f32, 0.23553298f32, 0.23255022f32, 0.22956538f32, 0.22657827f32, 0.22358903f32, 0.22059768f32, 0.21760426f32, 0.21460879f32, 0.21161129f32, 0.20861182f32, 0.20561036f32, 0.20260698f32, 0.1996017f32, 0.19659452f32, 0.1935855f32, 0.19057466f32, 0.18756214f32, 0.18454774f32, 0.18153161f32, 0.17851377f32, 0.17549424f32, 0.17247306f32, 0.16945025f32, 0.16642585f32, 0.16339989f32, 0.16037239f32, 0.15734339f32, 0.1543129f32, 0.15128095f32, 0.14824758f32, 0.14521293f32, 0.1421768f32, 0.13913934f32, 0.13610056f32, 0.1330605f32, 0.13001919f32, 0.12697665f32, 0.12393293f32, 0.12088803f32, 0.117841996f32, 0.11479485f32, 0.11174663f32, 0.108697355f32, 0.10564717f32, 0.10259588f32, 0.099543616f32, 0.09649042f32, 0.093436316f32, 0.09038134f32, 0.0873255f32, 0.084268846f32, 0.081211396f32, 0.07815318f32, 0.07509423f32, 0.072034575f32, 0.06897424f32, 0.06591326f32, 0.06285177f32, 0.059789576f32, 0.05672682f32, 0.053663526f32, 0.05059973f32, 0.047535457f32, 0.044470735f32, 0.041405596f32, 0.038340066f32, 0.035274178f32, 0.032207955f32, 0.029141428f32, 0.02607463f32, 0.023007585f32, 0.019940441f32, 0.016872993f32, 0.013805384f32, 0.010737646f32, 0.007669807f32, 0.0046018953f32, 0.0015339408f32, -0.00000004371139f32, -0.004601983f32, -0.007669894f32, -0.010737733f32, -0.013805471f32, -0.01687308f32, -0.019940529f32, -0.02300767f32, -0.026074717f32, -0.029141515f32, -0.03220804f32, -0.035274263f32, -0.038340155f32, -0.04140568f32, -0.044470824f32, -0.047535542f32, -0.050599817f32, -0.053663615f32, -0.056726906f32, -0.059789665f32, -0.06285186f32, -0.06591334f32, -0.06897433f32, -0.072034664f32, -0.07509432f32, -0.07815327f32, -0.081211485f32, -0.084268935f32, -0.08732559f32, -0.09038142f32, -0.093436405f32, -0.09649051f32, -0.099543706f32, -0.10259596f32, -0.10564726f32, -0.10869744f32, -0.11174671f32, -0.11479494f32, -0.117842086f32, -0.120888114f32, -0.12393301f32, -0.12697674f32, -0.13001928f32, -0.13306059f32, -0.13610065f32, -0.13913943f32, -0.1421769f32, -0.14521302f32, -0.14824767f32, -0.15128103f32, -0.15431298f32, -0.15734348f32, -0.16037248f32, -0.16339998f32, -0.16642594f32, -0.16945034f32, -0.17247315f32, -0.17549431f32, -0.17851384f32, -0.1815317f32, -0.18454783f32, -0.18756223f32, -0.19057475f32, -0.19358559f32, -0.19659461f32, -0.19960177f32, -0.20260707f32, -0.20561045f32, -0.20861189f32, -0.21161138f32, -0.21460888f32, -0.21760434f32, -0.22059777f32, -0.2235891f32, -0.22657835f32, -0.22956547f32, -0.23255031f32, -0.23553306f32, -0.2385136f32, -0.2414919f32, -0.24446793f32, -0.24744165f32, -0.25041306f32, -0.2533821f32, -0.25634873f32, -0.259313f32, -0.26227477f32, -0.2652341f32, -0.26819095f32, -0.27114525f32, -0.2740969f32, -0.27704608f32, -0.27999267f32, -0.2829366f32, -0.28587785f32, -0.28881645f32, -0.2917523f32, -0.29468542f32, -0.29761577f32, -0.3005433f32, -0.30346802f32, -0.3063899f32, -0.30930886f32, -0.31222492f32, -0.31513792f32, -0.3180481f32, -0.32095525f32, -0.3238594f32, -0.32676047f32, -0.3296585f32, -0.33255342f32, -0.3354452f32, -0.33833385f32, -0.34121928f32, -0.34410152f32, -0.3469805f32, -0.34985623f32, -0.35272866f32, -0.35559767f32, -0.35846344f32, -0.36132583f32, -0.36418483f32, -0.36704037f32, -0.36989248f32, -0.3727411f32, -0.37558624f32, -0.37842783f32, -0.38126585f32, -0.3841003f32, -0.3869311f32, -0.38975826f32, -0.39258167f32, -0.39540148f32, -0.3982176f32, -0.4010299f32, -0.4038385f32, -0.40664324f32, -0.40944418f32, -0.41224128f32, -0.41503447f32, -0.41782367f32, -0.42060903f32, -0.42339045f32, -0.42616788f32, -0.42894128f32, -0.43171066f32, -0.43447596f32, -0.43723717f32, -0.4399943f32, -0.44274727f32, -0.44549605f32, -0.44824067f32, -0.45098105f32, -0.45371717f32, -0.45644906f32, -0.45917663f32, -0.46189988f32, -0.46461877f32, -0.46733332f32, -0.47004345f32, -0.47274914f32, -0.4754504f32, -0.47814718f32, -0.48083946f32, -0.4835272f32, -0.48621044f32, -0.48888886f32, -0.49156287f32, -0.49423227f32, -0.496897f32, -0.4995571f32, -0.50221246f32, -0.5048631f32, -0.507509f32, -0.51015013f32, -0.51278645f32, -0.5154179f32, -0.51804453f32, -0.5206663f32, -0.5232831f32, -0.52589506f32, -0.52850205f32, -0.5311041f32, -0.53370106f32, -0.5362931f32, -0.53888f32, -0.5414619f32, -0.54403865f32, -0.5466103f32, -0.54917675f32, -0.55173814f32, -0.5542942f32, -0.5568452f32, -0.55939084f32, -0.5619311f32, -0.5644662f32, -0.56699604f32, -0.5695205f32, -0.5720396f32, -0.5745534f32, -0.57706165f32, -0.5795646f32, -0.582062f32, -0.58455396f32, -0.5870404f32, -0.58952135f32, -0.5919967f32, -0.59446657f32, -0.59693074f32, -0.5993894f32, -0.60184234f32, -0.6042896f32, -0.60673124f32, -0.6091671f32, -0.61159724f32, -0.61402166f32, -0.6164403f32, -0.6188531f32, -0.6212601f32, -0.6236612f32, -0.62605655f32, -0.62844574f32, -0.6308292f32, -0.6332067f32, -0.6355783f32, -0.63794386f32, -0.6403035f32, -0.64265704f32, -0.6450046f32, -0.64734596f32, -0.6496813f32, -0.65201056f32, -0.65433365f32, -0.6566506f32, -0.65896136f32, -0.6612659f32, -0.6635642f32, -0.6658563f32, -0.6681421f32, -0.67042166f32, -0.67269486f32, -0.67496175f32, -0.67722225f32, -0.67947644f32, -0.6817242f32, -0.6839655f32, -0.68620044f32, -0.6884289f32, -0.6906508f32, -0.69286615f32, -0.6950751f32, -0.6972775f32, -0.6994733f32, -0.7016626f32, -0.70384526f32, -0.70602125f32, -0.7081906f32, -0.7103534f32, -0.7125094f32, -0.71465874f32, -0.7168013f32, -0.71893716f32, -0.72106624f32, -0.7231885f32, -0.725304f32, -0.7274127f32, -0.7295145f32, -0.73160946f32, -0.73369753f32, -0.7357787f32, -0.73785293f32, -0.7399202f32, -0.7419805f32, -0.7440339f32, -0.74608016f32, -0.7481195f32, -0.75015175f32, -0.7521768f32, -0.754195f32, -0.756206f32, -0.7582099f32, -0.7602067f32, -0.7621963f32, -0.76417875f32, -0.766154f32, -0.768122f32, -0.77008283f32, -0.77203643f32, -0.7739827f32, -0.77592176f32, -0.7778534f32, -0.7797778f32, -0.7816949f32, -0.78360456f32, -0.7855069f32, -0.7874018f32, -0.78928936f32, -0.7911694f32, -0.79304206f32, -0.7949072f32, -0.7967649f32, -0.7986151f32, -0.8004578f32, -0.8022929f32, -0.80412036f32, -0.8059404f32, -0.8077528f32, -0.8095576f32, -0.8113548f32, -0.8131444f32, -0.8149263f32, -0.8167006f32, -0.81846714f32, -0.820226f32, -0.82197714f32, -0.8237205f32, -0.8254562f32, -0.8271841f32, -0.82890415f32, -0.8306165f32, -0.8323209f32, -0.8340176f32, -0.83570635f32, -0.83738726f32, -0.8390603f32, -0.8407254f32, -0.84238267f32, -0.844032f32, -0.8456733f32, -0.8473067f32, -0.84893215f32, -0.8505496f32, -0.8521589f32, -0.8537603f32, -0.85535365f32, -0.85693896f32, -0.8585162f32, -0.86008537f32, -0.8616465f32, -0.8631994f32, -0.86474425f32, -0.866281f32, -0.86780953f32, -0.86932987f32, -0.8708421f32, -0.8723461f32, -0.8738419f32, -0.87532943f32, -0.87680876f32, -0.87827986f32, -0.8797426f32, -0.88119715f32, -0.8826434f32, -0.8840813f32, -0.8855109f32, -0.8869322f32, -0.8883451f32, -0.88974965f32, -0.8911458f32, -0.89253366f32, -0.8939129f32, -0.89528394f32, -0.89664644f32, -0.8980006f32, -0.89934623f32, -0.9006834f32, -0.90201217f32, -0.90333235f32, -0.90464413f32, -0.9059473f32, -0.907242f32, -0.90852815f32, -0.9098057f32, -0.91107476f32, -0.9123352f32, -0.9135871f32, -0.9148303f32, -0.91606504f32, -0.91729105f32, -0.91850847f32, -0.9197172f32, -0.9209173f32, -0.9221087f32, -0.9232915f32, -0.92446554f32, -0.9256309f32, -0.92678756f32, -0.92793536f32, -0.9290746f32, -0.930205f32, -0.9313267f32, -0.9324396f32, -0.9335438f32, -0.93463916f32, -0.9357257f32, -0.93680346f32, -0.9378724f32, -0.9389325f32, -0.9399838f32, -0.9410262f32, -0.94205976f32, -0.9430845f32, -0.94410026f32, -0.9451072f32, -0.94610524f32, -0.9470944f32, -0.94807464f32, -0.9490459f32, -0.9500083f32, -0.9509617f32, -0.9519062f32, -0.9528417f32, -0.95376825f32, -0.9546858f32, -0.9555944f32, -0.9564939f32, -0.95738447f32, -0.9582661f32, -0.95913863f32, -0.9600021f32, -0.9608566f32, -0.9617021f32, -0.9625385f32, -0.9633658f32, -0.96418405f32, -0.96499324f32, -0.9657934f32, -0.9665844f32, -0.96736634f32, -0.9681391f32, -0.9689028f32, -0.9696574f32, -0.97040284f32, -0.9711392f32, -0.97186637f32, -0.9725844f32, -0.9732933f32, -0.973993f32, -0.9746835f32, -0.9753649f32, -0.97603714f32, -0.9767001f32, -0.97735393f32, -0.9779985f32, -0.97863394f32, -0.97926015f32, -0.9798771f32, -0.98048484f32, -0.9810834f32, -0.9816727f32, -0.9822528f32, -0.98282355f32, -0.98338515f32, -0.98393744f32, -0.98448044f32, -0.98501426f32, -0.9855387f32, -0.986054f32, -0.9865599f32, -0.9870566f32, -0.98754394f32, -0.988022f32, -0.9884908f32, -0.9889503f32, -0.98940045f32, -0.9898413f32, -0.9902728f32, -0.99069506f32, -0.99110794f32, -0.9915115f32, -0.9919057f32, -0.9922906f32, -0.9926661f32, -0.99303234f32, -0.9933892f32, -0.99373674f32, -0.9940749f32, -0.99440366f32, -0.99472314f32, -0.9950332f32, -0.9953339f32, -0.99562526f32, -0.99590725f32, -0.9961798f32, -0.99644303f32, -0.9966969f32, -0.9969414f32, -0.99717647f32, -0.99740213f32, -0.99761844f32, -0.9978254f32, -0.99802285f32, -0.998211f32, -0.9983897f32, -0.99855906f32, -0.99871904f32, -0.99886954f32, -0.9990107f32, -0.9991424f32, -0.9992647f32, -0.99937767f32, -0.9994812f32, -0.9995753f32, -0.99966f32, -0.9997353f32, -0.99980116f32, -0.99985766f32, -0.9999047f32, -0.99994236f32, -0.9999706f32, -0.9999894f32, -0.9999988f32, -1f32, -0.9999894f32, -0.9999706f32, -0.99994236f32, -0.9999047f32, -0.99985766f32, -0.99980116f32, -0.9997353f32, -0.99966f32, -0.9995753f32, -0.9994812f32, -0.99937767f32, -0.9992647f32, -0.9991424f32, -0.9990107f32, -0.99886954f32, -0.99871904f32, -0.99855906f32, -0.9983897f32, -0.998211f32, -0.99802285f32, -0.9978253f32, -0.99761844f32, -0.99740213f32, -0.9971764f32, -0.9969413f32, -0.9966969f32, -0.99644303f32, -0.9961798f32, -0.99590725f32, -0.99562526f32, -0.9953339f32, -0.9950332f32, -0.9947231f32, -0.99440366f32, -0.9940749f32, -0.9937367f32, -0.9933892f32, -0.99303234f32, -0.9926661f32, -0.99229056f32, -0.9919057f32, -0.99151146f32, -0.99110794f32, -0.990695f32, -0.9902728f32, -0.9898413f32, -0.98940045f32, -0.98895025f32, -0.98849076f32, -0.988022f32, -0.98754394f32, -0.98705655f32, -0.9865599f32, -0.98605394f32, -0.9855387f32, -0.9850142f32, -0.98448044f32, -0.9839374f32, -0.9833851f32, -0.98282355f32, -0.9822527f32, -0.98167264f32, -0.98108333f32, -0.98048484f32, -0.97987705f32, -0.9792601f32, -0.9786339f32, -0.9779985f32, -0.97735393f32, -0.97670007f32, -0.9760371f32, -0.97536486f32, -0.9746835f32, -0.97399294f32, -0.97329324f32, -0.97258437f32, -0.9718663f32, -0.97113913f32, -0.97040284f32, -0.96965736f32, -0.96890277f32, -0.9681391f32, -0.9673663f32, -0.9665843f32, -0.9657933f32, -0.96499324f32, -0.96418405f32, -0.96336573f32, -0.9625384f32, -0.96170205f32, -0.96085656f32, -0.9600021f32, -0.9591386f32, -0.958266f32, -0.95738447f32, -0.95649385f32, -0.95559436f32, -0.95468575f32, -0.9537682f32, -0.95284164f32, -0.95190614f32, -0.95096165f32, -0.9500082f32, -0.9490459f32, -0.9480746f32, -0.9470943f32, -0.9461052f32, -0.94510716f32, -0.9441002f32, -0.9430844f32, -0.9420597f32, -0.94102615f32, -0.9399837f32, -0.9389324f32, -0.93787235f32, -0.9368034f32, -0.9357256f32, -0.9346391f32, -0.9335437f32, -0.93243957f32, -0.9313266f32, -0.9302049f32, -0.9290745f32, -0.9279353f32, -0.9267875f32, -0.9256308f32, -0.9244655f32, -0.9232914f32, -0.92210865f32, -0.9209172f32, -0.91971713f32, -0.91850835f32, -0.917291f32, -0.9160649f32, -0.91483027f32, -0.91358703f32, -0.91233516f32, -0.9110747f32, -0.90980566f32, -0.9085281f32, -0.90724194f32, -0.90594727f32, -0.904644f32, -0.9033323f32, -0.90201205f32, -0.90068334f32, -0.8993462f32, -0.8980005f32, -0.8966464f32, -0.8952838f32, -0.89391285f32, -0.89253354f32, -0.89114577f32, -0.8897496f32, -0.888345f32, -0.88693213f32, -0.88551086f32, -0.88408124f32, -0.88264334f32, -0.8811971f32, -0.87974256f32, -0.87827975f32, -0.8768087f32, -0.8753294f32, -0.8738418f32, -0.872346f32, -0.870842f32, -0.8693298f32, -0.8678094f32, -0.8662809f32, -0.8647442f32, -0.86319935f32, -0.86164635f32, -0.8600853f32, -0.85851616f32, -0.8569389f32, -0.85535353f32, -0.8537602f32, -0.8521588f32, -0.85054946f32, -0.848932f32, -0.8473066f32, -0.84567326f32, -0.8440319f32, -0.8423826f32, -0.84072536f32, -0.8390602f32, -0.83738714f32, -0.83570623f32, -0.83401746f32, -0.8323208f32, -0.83061635f32, -0.82890403f32, -0.82718396f32, -0.8254561f32, -0.82372046f32, -0.821977f32, -0.8202259f32, -0.818467f32, -0.81670046f32, -0.8149262f32, -0.8131443f32, -0.81135476f32, -0.8095575f32, -0.80775267f32, -0.8059403f32, -0.80412024f32, -0.8022928f32, -0.80045766f32, -0.798615f32, -0.7967648f32, -0.7949071f32, -0.79304194f32, -0.7911693f32, -0.78928924f32, -0.78740174f32, -0.7855068f32, -0.78360444f32, -0.78169477f32, -0.7797777f32, -0.7778533f32, -0.77592164f32, -0.7739826f32, -0.7720363f32, -0.7700828f32, -0.76812196f32, -0.7661539f32, -0.76417863f32, -0.7621962f32, -0.7602066f32, -0.75820976f32, -0.75620586f32, -0.75419486f32, -0.7521767f32, -0.75015163f32, -0.74811935f32, -0.74608004f32, -0.74403375f32, -0.7419804f32, -0.7399201f32, -0.7378528f32, -0.73577857f32, -0.7336974f32, -0.73160934f32, -0.72951436f32, -0.7274126f32, -0.7253039f32, -0.7231884f32, -0.7210661f32, -0.71893704f32, -0.71680117f32, -0.7146586f32, -0.7125093f32, -0.71035326f32, -0.7081905f32, -0.70602113f32, -0.7038451f32, -0.7016625f32, -0.6994732f32, -0.69727737f32, -0.695075f32, -0.692866f32, -0.6906507f32, -0.68842876f32, -0.6862003f32, -0.6839654f32, -0.6817241f32, -0.6794763f32, -0.67722213f32, -0.6749616f32, -0.67269474f32, -0.6704215f32, -0.66814196f32, -0.6658562f32, -0.6635641f32, -0.66126573f32, -0.6589612f32, -0.6566504f32, -0.65433353f32, -0.6520106f32, -0.6496814f32, -0.647346f32, -0.6450046f32, -0.6426571f32, -0.6403035f32, -0.6379439f32, -0.63557833f32, -0.6332068f32, -0.6308292f32, -0.62844574f32, -0.6260564f32, -0.6236611f32, -0.62126f32, -0.618853f32, -0.6164401f32, -0.61402154f32, -0.6115971f32, -0.609167f32, -0.60673106f32, -0.6042895f32, -0.60184216f32, -0.5993892f32, -0.5969306f32, -0.5944664f32, -0.5919966f32, -0.5895212f32, -0.5870403f32, -0.58455384f32, -0.5820619f32, -0.57956445f32, -0.57706153f32, -0.5745532f32, -0.5720395f32, -0.56952035f32, -0.56699586f32, -0.56446606f32, -0.56193095f32, -0.55939054f32, -0.55684483f32, -0.55429393f32, -0.5517378f32, -0.54917645f32, -0.54660994f32, -0.5440383f32, -0.5414615f32, -0.53887963f32, -0.53629273f32, -0.5337007f32, -0.53110373f32, -0.5285017f32, -0.5258947f32, -0.5232832f32, -0.52066636f32, -0.5180446f32, -0.51541793f32, -0.51278645f32, -0.51015013f32, -0.50750905f32, -0.50486314f32, -0.5022125f32, -0.49955714f32, -0.49689707f32, -0.49423233f32, -0.49156293f32, -0.4888889f32, -0.4862103f32, -0.48352706f32, -0.4808393f32, -0.47814703f32, -0.47545025f32, -0.472749f32, -0.47004327f32, -0.46733314f32, -0.46461862f32, -0.46189973f32, -0.45917648f32, -0.45644888f32, -0.45371702f32, -0.45098087f32, -0.4482405f32, -0.4454959f32, -0.4427471f32, -0.43999413f32, -0.43723702f32, -0.4344758f32, -0.4317105f32, -0.42894113f32, -0.42616773f32, -0.4233903f32, -0.42060888f32, -0.41782352f32, -0.4150342f32, -0.412241f32, -0.4094439f32, -0.40664297f32, -0.40383822f32, -0.40102965f32, -0.3982173f32, -0.3954012f32, -0.3925814f32, -0.3897579f32, -0.3869307f32, -0.3840999f32, -0.38126546f32, -0.37842745f32, -0.37558585f32, -0.37274116f32, -0.36989254f32, -0.36704043f32, -0.3641849f32, -0.3613259f32, -0.3584635f32, -0.35559773f32, -0.3527286f32, -0.34985617f32, -0.34698045f32, -0.34410146f32, -0.34121922f32, -0.3383338f32, -0.33544517f32, -0.33255336f32, -0.32965845f32, -0.32676044f32, -0.32385933f32, -0.3209552f32, -0.31804803f32, -0.31513786f32, -0.31222475f32, -0.30930868f32, -0.30638972f32, -0.30346787f32, -0.30054316f32, -0.29761562f32, -0.29468527f32, -0.29175213f32, -0.28881627f32, -0.2858777f32, -0.28293642f32, -0.2799925f32, -0.2770459f32, -0.27409673f32, -0.271145f32, -0.26819068f32, -0.26523384f32, -0.2622745f32, -0.2593127f32, -0.25634846f32, -0.25338182f32, -0.25041276f32, -0.24744137f32, -0.24446765f32, -0.24149162f32, -0.23851332f32, -0.23553278f32, -0.23255001f32, -0.22956507f32, -0.22657795f32, -0.2235887f32, -0.22059736f32, -0.21760394f32, -0.21460846f32, -0.21161097f32, -0.20861195f32, -0.20561051f32, -0.20260713f32, -0.19960184f32, -0.19659467f32, -0.19358565f32, -0.19057481f32, -0.18756217f32, -0.18454777f32, -0.18153164f32, -0.1785138f32, -0.17549427f32, -0.17247309f32, -0.16945028f32, -0.16642588f32, -0.16339992f32, -0.16037242f32, -0.15734342f32, -0.15431292f32, -0.15128098f32, -0.14824761f32, -0.14521284f32, -0.14217672f32, -0.13913925f32, -0.13610047f32, -0.13306041f32, -0.1300191f32, -0.12697656f32, -0.12393284f32, -0.12088794f32, -0.11784191f32, -0.11479477f32, -0.11174654f32, -0.108697265f32, -0.10564697f32, -0.10259567f32, -0.099543415f32, -0.09649022f32, -0.093436114f32, -0.09038113f32, -0.0873253f32, -0.08426864f32, -0.08121119f32, -0.07815298f32, -0.07509403f32, -0.07203437f32, -0.06897403f32, -0.06591305f32, -0.06285145f32, -0.05978925f32, -0.056726493f32, -0.0536632f32, -0.050599404f32, -0.04753513f32, -0.04447041f32, -0.041405745f32, -0.03834022f32, -0.035274327f32, -0.032208104f32, -0.029141579f32, -0.02607478f32, -0.023007736f32, -0.019940473f32, -0.016873024f32, -0.0138054155f32, -0.010737678f32, -0.0076698386f32, -0.0046019275f32, -0.0015339726f32, 0.000000011924881f32, 0.004601951f32, 0.0076698624f32, 0.010737701f32, 0.01380544f32, 0.016873049f32, 0.019940497f32, 0.023007758f32, 0.026074804f32, 0.029141603f32, 0.03220813f32, 0.03527435f32, 0.03834024f32, 0.04140577f32, 0.04447091f32, 0.04753563f32, 0.050599903f32, 0.0536637f32, 0.056726992f32, 0.05978975f32, 0.06285195f32, 0.06591355f32, 0.06897453f32, 0.072034866f32, 0.07509453f32, 0.078153476f32, 0.081211686f32, 0.084269136f32, 0.0873258f32, 0.09038163f32, 0.09343661f32, 0.09649072f32, 0.099543914f32, 0.10259617f32, 0.10564747f32, 0.108697765f32, 0.11174704f32, 0.11479526f32, 0.117842406f32, 0.12088844f32, 0.12393334f32, 0.12697707f32, 0.13001913f32, 0.13306044f32, 0.1361005f32, 0.13913928f32, 0.14217675f32, 0.14521287f32, 0.14824763f32, 0.151281f32, 0.15431295f32, 0.15734343f32, 0.16037245f32, 0.16339995f32, 0.16642591f32, 0.16945031f32, 0.1724731f32, 0.17549428f32, 0.17851381f32, 0.18153165f32, 0.1845478f32, 0.1875622f32, 0.19057482f32, 0.19358568f32, 0.19659469f32, 0.19960186f32, 0.20260715f32, 0.20561053f32, 0.20861198f32, 0.21161146f32, 0.21460895f32, 0.21760443f32, 0.22059785f32, 0.2235892f32, 0.22657844f32, 0.22956555f32, 0.2325505f32, 0.23553327f32, 0.23851381f32, 0.24149211f32, 0.24446814f32, 0.24744186f32, 0.25041324f32, 0.2533823f32, 0.25634894f32, 0.2593132f32, 0.26227498f32, 0.26523432f32, 0.26819116f32, 0.27114546f32, 0.27409723f32, 0.2770464f32, 0.27999297f32, 0.2829369f32, 0.28587818f32, 0.28881675f32, 0.29175264f32, 0.29468527f32, 0.29761562f32, 0.30054316f32, 0.30346787f32, 0.30638975f32, 0.3093087f32, 0.31222478f32, 0.3151379f32, 0.31804806f32, 0.32095522f32, 0.32385936f32, 0.32676044f32, 0.32965848f32, 0.3325534f32, 0.33544517f32, 0.33833382f32, 0.34121925f32, 0.3441015f32, 0.34698048f32, 0.3498562f32, 0.35272864f32, 0.35559773f32, 0.35846353f32, 0.36132592f32, 0.3641849f32, 0.36704046f32, 0.36989257f32, 0.3727412f32, 0.37558633f32, 0.3784279f32, 0.38126594f32, 0.38410035f32, 0.38693118f32, 0.38975835f32, 0.39258185f32, 0.3954017f32, 0.39821777f32, 0.4010301f32, 0.40383866f32, 0.40664345f32, 0.4094444f32, 0.41224146f32, 0.41503468f32, 0.41782397f32, 0.42060933f32, 0.42339075f32, 0.42616817f32, 0.42894158f32, 0.43171096f32, 0.43447626f32, 0.43723747f32, 0.43999457f32, 0.44274756f32, 0.44549635f32, 0.44824094f32, 0.4509809f32, 0.45371705f32, 0.4564489f32, 0.45917648f32, 0.46189973f32, 0.46461865f32, 0.46733317f32, 0.4700433f32, 0.47274902f32, 0.47545028f32, 0.47814706f32, 0.48083934f32, 0.4835271f32, 0.4862103f32, 0.48888892f32, 0.49156296f32, 0.49423236f32, 0.4968971f32, 0.49955717f32, 0.5022125f32, 0.5048632f32, 0.50750905f32, 0.5101502f32, 0.5127865f32, 0.515418f32, 0.5180446f32, 0.52066636f32, 0.52328324f32, 0.5258952f32, 0.52850217f32, 0.53110415f32, 0.5337012f32, 0.53629315f32, 0.53888005f32, 0.54146194f32, 0.5440387f32, 0.54661036f32, 0.5491769f32, 0.5517382f32, 0.55429435f32, 0.55684525f32, 0.5593909f32, 0.5619314f32, 0.5644665f32, 0.5669963f32, 0.5695208f32, 0.5720399f32, 0.5745536f32, 0.57706195f32, 0.5795648f32, 0.58206224f32, 0.58455426f32, 0.58704066f32, 0.58952165f32, 0.591997f32, 0.5944668f32, 0.5969306f32, 0.59938926f32, 0.6018422f32, 0.6042895f32, 0.6067311f32, 0.609167f32, 0.6115971f32, 0.61402154f32, 0.6164402f32, 0.618853f32, 0.62126f32, 0.6236611f32, 0.62605643f32, 0.6284458f32, 0.6308293f32, 0.6332068f32, 0.6355784f32, 0.637944f32, 0.64030355f32, 0.6426571f32, 0.64500463f32, 0.647346f32, 0.6496814f32, 0.6520106f32, 0.6543337f32, 0.65665066f32, 0.6589614f32, 0.66126597f32, 0.66356426f32, 0.66585636f32, 0.6681422f32, 0.6704217f32, 0.6726949f32, 0.6749618f32, 0.6772223f32, 0.6794765f32, 0.68172425f32, 0.68396556f32, 0.6862005f32, 0.68842894f32, 0.6906509f32, 0.6928664f32, 0.69507533f32, 0.6972777f32, 0.69947356f32, 0.70166284f32, 0.70384544f32, 0.7060215f32, 0.70819086f32, 0.7103536f32, 0.71250963f32, 0.7146589f32, 0.7168015f32, 0.7189374f32, 0.7210665f32, 0.72318846f32, 0.72530395f32, 0.7274126f32, 0.7295144f32, 0.73160934f32, 0.7336974f32, 0.73577857f32, 0.7378528f32, 0.7399201f32, 0.74198043f32, 0.74403375f32, 0.7460801f32, 0.7481194f32, 0.7501517f32, 0.7521769f32, 0.75419503f32, 0.75620604f32, 0.75820994f32, 0.76020676f32, 0.76219636f32, 0.7641788f32, 0.76615405f32, 0.7681221f32, 0.7700829f32, 0.7720365f32, 0.77398276f32, 0.77592176f32, 0.7778535f32, 0.7797779f32, 0.78169495f32, 0.7836046f32, 0.78550696f32, 0.78740185f32, 0.78928936f32, 0.79116946f32, 0.7930421f32, 0.7949073f32, 0.79676497f32, 0.79861516f32, 0.80045784f32, 0.80229294f32, 0.80412054f32, 0.80594057f32, 0.80775297f32, 0.8095578f32, 0.81135505f32, 0.8131446f32, 0.8149265f32, 0.81670076f32, 0.8184673f32, 0.8202262f32, 0.8219773f32, 0.8237207f32, 0.8254564f32, 0.82718426f32, 0.8289041f32, 0.83061635f32, 0.8323208f32, 0.83401746f32, 0.83570623f32, 0.8373872f32, 0.83906025f32, 0.84072536f32, 0.8423826f32, 0.8440319f32, 0.84567326f32, 0.84730667f32, 0.8489321f32, 0.8505495f32, 0.8521589f32, 0.8537603f32, 0.8553537f32, 0.856939f32, 0.8585163f32, 0.8600854f32, 0.86164653f32, 0.8631995f32, 0.8647443f32, 0.86628103f32, 0.86780953f32, 0.8693299f32, 0.87084216f32, 0.87234616f32, 0.87384194f32, 0.8753295f32, 0.8768088f32, 0.87827986f32, 0.8797427f32, 0.8811972f32, 0.88264346f32, 0.88408136f32, 0.885511f32, 0.88693225f32, 0.8883451f32, 0.8897497f32, 0.8911459f32, 0.89253366f32, 0.8939131f32, 0.89528406f32, 0.8966466f32, 0.8980007f32, 0.89934635f32, 0.9006836f32, 0.9020123f32, 0.90333253f32, 0.90464425f32, 0.90594745f32, 0.9072421f32, 0.90852827f32, 0.9098059f32, 0.9110749f32, 0.91233516f32, 0.91358703f32, 0.91483027f32, 0.916065f32, 0.917291f32, 0.9185084f32, 0.91971713f32, 0.9209172f32, 0.92210865f32, 0.92329144f32, 0.9244655f32, 0.92563087f32, 0.9267875f32, 0.9279354f32, 0.9290746f32, 0.93020505f32, 0.93132675f32, 0.9324397f32, 0.9335438f32, 0.93463916f32, 0.93572575f32, 0.93680346f32, 0.9378724f32, 0.93893254f32, 0.9399838f32, 0.9410262f32, 0.9420598f32, 0.9430845f32, 0.9441003f32, 0.9451073f32, 0.9461053f32, 0.94709444f32, 0.94807464f32, 0.94904596f32, 0.95000833f32, 0.95096177f32, 0.9519062f32, 0.9528417f32, 0.95376825f32, 0.9546858f32, 0.9555944f32, 0.956494f32, 0.9573846f32, 0.95826614f32, 0.9591387f32, 0.96000224f32, 0.96085674f32, 0.96170217f32, 0.96253854f32, 0.9633659f32, 0.96418417f32, 0.96499336f32, 0.96579343f32, 0.9665845f32, 0.9673664f32, 0.9681391f32, 0.96890277f32, 0.96965736f32, 0.97040284f32, 0.97113913f32, 0.9718663f32, 0.97258437f32, 0.97329324f32, 0.97399294f32, 0.9746835f32, 0.97536486f32, 0.9760371f32, 0.97670007f32, 0.97735393f32, 0.97799855f32, 0.97863394f32, 0.97926015f32, 0.9798771f32, 0.9804849f32, 0.9810834f32, 0.9816727f32, 0.9822528f32, 0.98282355f32, 0.98338515f32, 0.98393744f32, 0.9844805f32, 0.98501426f32, 0.9855388f32, 0.986054f32, 0.9865599f32, 0.9870566f32, 0.987544f32, 0.988022f32, 0.9884908f32, 0.9889503f32, 0.98940045f32, 0.98984134f32, 0.9902728f32, 0.99069506f32, 0.99110794f32, 0.9915115f32, 0.99190575f32, 0.9922906f32, 0.9926662f32, 0.9930324f32, 0.99338925f32, 0.99373674f32, 0.99407494f32, 0.9944037f32, 0.99472314f32, 0.99503326f32, 0.99533397f32, 0.9956253f32, 0.99590725f32, 0.9961799f32, 0.99644303f32, 0.9966969f32, 0.9969413f32, 0.9971764f32, 0.99740213f32, 0.99761844f32, 0.9978253f32, 0.99802285f32, 0.998211f32, 0.9983897f32, 0.99855906f32, 0.99871904f32, 0.99886954f32, 0.9990107f32, 0.9991424f32, 0.9992648f32, 0.99937767f32, 0.9994812f32, 0.9995753f32, 0.99966f32, 0.9997353f32, 0.99980116f32, 0.99985766f32, 0.9999047f32, 0.99994236f32, 0.9999706f32, 0.9999894f32, 0.9999988f32, ];\n", "file_path": "src/math/num/trigonometry.rs", "rank": 25, "score": 83487.4219882862 }, { "content": "/// Interpret a slice of bytes as a PNG and decodes it into an RGBA image.\n\npub fn read_png(bytes: &[u8]) -> Image<RGBA8> {\n\n let decoder = Decoder::new(bytes);\n\n let (info, mut reader) = decoder.read_info().expect(\"Unable to read PNG info.\");\n\n let mut input = vec![0; info.buffer_size()];\n\n reader.next_frame(&mut input).expect(\"Unable to read PNG payload.\");\n\n\n\n match info.color_type {\n\n ColorType::RGB => {\n\n let mut output = Vec::with_capacity((input.len() / 3) * 4);\n\n for rgb in input.chunks_exact(3) {\n\n output.push(RGBA8::new(rgb[0], rgb[1], rgb[2], 255));\n\n }\n\n Image::from_vec(output, info.width, info.height)\n\n }\n\n ColorType::RGBA => {\n\n let mut output = Vec::with_capacity(input.len());\n\n for rgba in input.chunks_exact(4) {\n\n output.push(RGBA8::new(rgba[0], rgba[1], rgba[2], rgba[3]));\n\n }\n\n Image::from_vec(output, info.width, info.height)\n", "file_path": "src/image/png.rs", "rank": 26, "score": 81121.06622050489 }, { "content": "/// Computes the four quadrant arctangent of self (y) and other (x) in radians.\n\n///\n\n/// * Average error of 0.00231 radians.\n\n/// * Largest error of 0.00488 radians.\n\n/// * Speedup of 20.67x over f32.atan2(y);\n\npub fn atan2(y: f32, x: f32) -> f32 {\n\n const CONST: f32 = 0.28087f32; // Trial and error\n\n if x == 0f32 {\n\n if y > 0f32 {\n\n return PI_2;\n\n }\n\n if y == 0f32 {\n\n return 0f32;\n\n }\n\n return PI_NEG_2;\n\n }\n\n let z: f32 = y / x;\n\n let atan: f32;\n\n if z.abs() < 1f32 {\n\n atan = z / (z * z * CONST + 1f32);\n\n if x < 0f32 {\n\n if y < 0f32 {\n\n return atan - PI;\n\n }\n\n return atan + PI;\n", "file_path": "src/math/num/trigonometry.rs", "rank": 27, "score": 81116.20941290041 }, { "content": "/// Linearly interpolates between a and b by t.\n\npub fn lerp(a: f32, b: f32, t: f32) -> f32 {\n\n a + t * (b - a)\n\n}\n", "file_path": "src/math/mod.rs", "rank": 28, "score": 77853.53265427066 }, { "content": "/// Creates an orthographic matrix from screen bounds with a fixed aspect ratio and with 0,0 in the\n\n/// center.\n\npub fn ortho_from_bounds(bounds: &Vector2<f32>) -> Matrix4<f32> {\n\n let w = bounds.x / 2.0;\n\n let h = bounds.y / 2.0;\n\n ortho(-w.floor(), w.ceil(), -h.floor(), h.ceil(), -1.0, 1.0)\n\n}\n\n\n\npub struct OrthographicParams {\n\n /// The translation of the layer.\n\n pub translation: Vector3<f32>,\n\n /// The zoom level of the layer. This is 1.0 by default, meaning 1 pixel takes up 1x1 pixels on\n\n /// screen.\n\n pub scale: f32,\n\n /// Rotation is measured in turns from [0, 1). Values outside of the range are wrapped into the\n\n /// range. For example, 1.75 is wrapped into 0.75, -0.4 is wrapped into 0.6.\n\n pub rotation: f32,\n\n}\n\n\n\n/// Simple camera for orthographic projections.\n\npub struct OrthographicCamera {\n\n params: OrthographicParams,\n", "file_path": "src/math/orthographic.rs", "rank": 29, "score": 77233.46455580184 }, { "content": "/// Fast Vector2<f32> normalization.\n\npub fn fast_normalize2(vector: Vector2<f32>) -> Vector2<f32> {\n\n vector * (vector.x * vector.x + vector.y * vector.y).inv_sqrt()\n\n}\n\n\n", "file_path": "src/math/mod.rs", "rank": 30, "score": 77233.46455580184 }, { "content": "/// Interpret a slice of bytes as a FLAC file and decodes it into a sound.\n\npub fn read_flac(bytes: &[u8]) -> Result<Sound, SoundError> {\n\n let mut reader = FlacReader::new(bytes).map_err(map)?;\n\n let mut buffer = if let Some(samples) = reader.streaminfo().samples {\n\n Vec::with_capacity(samples as usize)\n\n } else {\n\n Vec::new()\n\n };\n\n let scale = (1 << reader.streaminfo().bits_per_sample) / 2;\n\n let scale = 1.0 / scale as f32;\n\n match reader.streaminfo().channels {\n\n 1 => {\n\n for sample in reader.samples() {\n\n let x = sample.unwrap() as f32 * scale;\n\n buffer.push([x, x]);\n\n }\n\n }\n\n 2 => {\n\n let mut iter = reader.samples();\n\n while let Some(sample) = iter.next() {\n\n let x = sample.unwrap() as f32 * scale;\n\n let y = iter.next().unwrap().unwrap() as f32 * scale;\n\n buffer.push([x, y]);\n\n }\n\n }\n\n _ => return Err(SoundError::UnsupportedChannelCount),\n\n }\n\n Sound::new(reader.streaminfo().sample_rate, buffer)\n\n}\n\n\n", "file_path": "src/audio/loaders/flac.rs", "rank": 31, "score": 73859.31717229569 }, { "content": "use crate::{\n\n graphics::{\n\n shaders::sprite::Sprite, std140, Buffer, DrawMode, Shader, ShaderDescriptor, Texture, Uniform,\n\n },\n\n App, Context,\n\n};\n\n\n\nimpl ShaderDescriptor<1> for SpriteShader {\n\n const VERTEX_SHADER: &'static str = include_str!(\"vertex.glsl\");\n\n const FRAGMENT_SHADER: &'static str = include_str!(\"fragment.glsl\");\n\n const TEXTURE_NAMES: [&'static str; 1] = [\"tex\"];\n\n const VERTEX_UNIFORM_NAME: &'static str = \"vertex\";\n\n type VertexUniformType = std140::mat4;\n\n type VertexDescriptor = Sprite;\n\n}\n\n\n\n/// Shader object for sprites. This holds no mutable state, so it's recommended to reuse this as\n\n/// much as possible.\n\npub struct SpriteShader {\n\n shader: Shader<SpriteShader, 1>,\n", "file_path": "src/graphics/shaders/sprite/shader.rs", "rank": 32, "score": 73492.87367209286 }, { "content": " const TEXTURE_NAMES: [&'static str; 1] = [\"tex\"];\n\n const VERTEX_UNIFORM_NAME: &'static str = \"vertex\";\n\n type VertexUniformType = std140::mat4;\n\n type VertexDescriptor = TextSprite;\n\n}\n\n\n\n/// Shader object for sprites. This holds no mutable state, so it's recommended to reuse this as\n\n/// much as possible.\n\npub struct TextShader {\n\n shader: Shader<TextShader, 1>,\n\n}\n\n\n\nimpl TextShader {\n\n /// Creates a new text shader.\n\n pub fn new(ctx: &Context<impl App>) -> TextShader {\n\n TextShader {\n\n shader: Shader::new(ctx),\n\n }\n\n }\n\n\n\n /// Draws to the screen.\n\n pub fn draw(&self, uniform: &Uniform<std140::mat4>, atlas: &Texture, buffer: &Buffer<TextSprite>) {\n\n self.shader.draw(DrawMode::TriangleStrip, uniform, [atlas], &[buffer]);\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 33, "score": 73486.24929107362 }, { "content": "use crate::color::R8;\n\nuse crate::graphics::{\n\n shaders::text::{Text, TextSprite, TextUserData},\n\n std140,\n\n texture_atlas::TextureAtlas,\n\n Buffer, DrawMode, Shader, ShaderDescriptor, Texture, TextureFiltering, TextureSection, Uniform,\n\n};\n\nuse crate::image::Image;\n\nuse crate::{App, Context};\n\nuse alloc::vec::Vec;\n\nuse cgmath::*;\n\nuse fontdue::{\n\n layout::{CoordinateSystem, GlyphRasterConfig, Layout, LayoutSettings},\n\n Font,\n\n};\n\nuse hashbrown::HashMap;\n\n\n\nimpl ShaderDescriptor<1> for TextShader {\n\n const VERTEX_SHADER: &'static str = include_str!(\"vertex.glsl\");\n\n const FRAGMENT_SHADER: &'static str = include_str!(\"fragment.glsl\");\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 34, "score": 73482.99058374747 }, { "content": "}\n\n\n\nimpl SpriteShader {\n\n /// Creates a new sprite shader.\n\n pub fn new(ctx: &Context<impl App>) -> SpriteShader {\n\n SpriteShader {\n\n shader: Shader::new(ctx),\n\n }\n\n }\n\n\n\n /// Helper function to draw sprites to the screen.\n\n pub fn draw(&self, uniform: &Uniform<std140::mat4>, atlas: &Texture, buffers: &[&Buffer<Sprite>]) {\n\n self.shader.draw(DrawMode::TriangleStrip, uniform, [atlas], buffers);\n\n }\n\n}\n", "file_path": "src/graphics/shaders/sprite/shader.rs", "rank": 35, "score": 73472.53033331977 }, { "content": " TextShaderPass {\n\n uniform: Uniform::new(ctx, ortho),\n\n atlas: TextureAtlas::new::<R8, _>(ctx, max, TextureFiltering::none()),\n\n buffer: Buffer::new(ctx),\n\n\n\n sprites: Vec::new(),\n\n layout: Layout::new(CoordinateSystem::PositiveYUp),\n\n cache: HashMap::new(),\n\n dirty: false,\n\n }\n\n }\n\n\n\n /// Sets the orthographic projection used to draw this pass. If none is passed, this function\n\n /// does nothing.\n\n pub fn set_ortho(&mut self, ortho: Matrix4<f32>) {\n\n self.uniform.set(ortho);\n\n }\n\n\n\n /// Draws the pass to the screen.\n\n pub fn draw(&mut self, shader: &TextShader) {\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 36, "score": 73464.2858466674 }, { "content": " if self.sprites.len() > 0 {\n\n if self.dirty {\n\n self.dirty = false;\n\n self.buffer.set(&self.sprites);\n\n }\n\n shader.draw(&self.uniform, self.atlas.get(), &self.buffer);\n\n }\n\n }\n\n\n\n /// Appends text to the instance.\n\n pub fn append(&mut self, fonts: &[Font], layout: &LayoutSettings, styles: &[Text]) {\n\n self.layout.reset(layout);\n\n for style in styles {\n\n self.layout.append(fonts, &style.into());\n\n }\n\n // log::info!(\"{:?}\", self.layout.glyphs());\n\n // log::info!(\"{:?}\", self.layout.lines());\n\n for glyph in self.layout.glyphs() {\n\n if glyph.width == 0 {\n\n continue;\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 37, "score": 73456.4226762956 }, { "content": " }\n\n let value = match self.cache.get(&glyph.key).copied() {\n\n Some(value) => value,\n\n None => {\n\n let font = &fonts[glyph.font_index];\n\n let (metrics, bitmap) = font.rasterize_config(glyph.key);\n\n // info!(\"{:?}\", metrics); // Debug\n\n let image = Image::from_vec(bitmap, metrics.width as u32, metrics.height as u32);\n\n let uv = self.atlas.pack(&image).expect(\"Text packer is full.\");\n\n let value = CharCacheValue {\n\n uv,\n\n size: Vector2::new(metrics.width as f32, metrics.height as f32),\n\n };\n\n self.cache.insert(glyph.key, value);\n\n value\n\n }\n\n };\n\n self.sprites.push(TextSprite::new(\n\n Vector3::new(glyph.x, glyph.y, glyph.user_data.depth),\n\n value.size,\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 38, "score": 73450.64791589139 }, { "content": " value.uv,\n\n glyph.user_data.color,\n\n ));\n\n self.dirty = true;\n\n }\n\n }\n\n\n\n /// Clears all the text, drawing nothing.\n\n pub fn clear_text(&mut self) {\n\n self.sprites.clear();\n\n self.dirty = true;\n\n }\n\n}\n", "file_path": "src/graphics/shaders/text/shader.rs", "rank": 39, "score": 73448.89191683146 }, { "content": "fn horizontal_sample(image: &Image<RGBA8>, new_width: u32) -> Image<RGBA8> {\n\n const SUPPORT: f32 = 3.0;\n\n let (width, height) = (image.width(), image.height());\n\n let mut out = Image::from_color(RGBA8::MAGENTA, new_width, height);\n\n let mut ws = Vec::new();\n\n\n\n let ratio = width as f32 / new_width as f32;\n\n let sratio = if ratio < 1.0 {\n\n 1.0\n\n } else {\n\n ratio\n\n };\n\n let src_support = SUPPORT * sratio;\n\n\n\n for outx in 0..new_width {\n\n // Find the point in the input image corresponding to the centre\n\n // of the current pixel in the output image.\n\n let inputx = (outx as f32 + 0.5) * ratio;\n\n\n\n // Left and right are slice bounds for the input pixels relevant\n", "file_path": "src/image/resize.rs", "rank": 40, "score": 73296.87755730061 }, { "content": "fn vertical_sample(image: &Image<RGBA8>, new_height: u32) -> Image<RGBA8> {\n\n const SUPPORT: f32 = 3.0;\n\n let (width, height) = (image.width(), image.height());\n\n let mut out = Image::from_color(RGBA8::MAGENTA, width, new_height);\n\n let mut ws = Vec::new();\n\n\n\n let ratio = height as f32 / new_height as f32;\n\n let sratio = if ratio < 1.0 {\n\n 1.0\n\n } else {\n\n ratio\n\n };\n\n let src_support = SUPPORT * sratio;\n\n\n\n for outy in 0..new_height {\n\n let inputy = (outy as f32 + 0.5) * ratio;\n\n\n\n let left = (inputy - src_support).floor() as i64;\n\n let left = clamp(left, 0, <i64 as From<_>>::from(height) - 1) as u32;\n\n\n", "file_path": "src/image/resize.rs", "rank": 41, "score": 73296.87755730061 }, { "content": "use crate::graphics::{\n\n graphics, resource, std140::Std140Struct, BufferBindingTarget, BufferBlockBindingTarget, BufferUsage,\n\n};\n\nuse crate::{App, Context};\n\nuse core::marker::PhantomData;\n\n\n\n/// Stores a uniform on the device.\n\npub struct Uniform<T: Std140Struct> {\n\n _unsend: core::marker::PhantomData<*const ()>,\n\n vbo: resource::Buffer,\n\n phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T: Std140Struct> Uniform<T> {\n\n /// Creates a new uniform.\n\n pub fn new(_ctx: &Context<impl App>, uniform: impl Into<T>) -> Uniform<T> {\n\n let gl = graphics().gl();\n\n\n\n let vbo = gl.create_buffer();\n\n gl.bind_buffer(BufferBindingTarget::UniformBuffer, Some(vbo));\n", "file_path": "src/graphics/uniform.rs", "rank": 42, "score": 69949.66876604478 }, { "content": "use crate::graphics::{\n\n configure_vertex, graphics, resource, BufferBindingTarget, BufferUsage, VertexDescriptor,\n\n};\n\nuse crate::{App, Context};\n\nuse core::marker::PhantomData;\n\n\n\n/// Buffers a set of elements on the device.\n\npub struct Buffer<T: VertexDescriptor + Copy> {\n\n // This type is !Send + !Sync.\n\n _unsend: core::marker::PhantomData<*const ()>,\n\n vbo: resource::Buffer,\n\n vao: resource::VertexArray,\n\n vertices: usize,\n\n phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T: VertexDescriptor + Copy> Buffer<T> {\n\n /// Creates a new buffer.\n\n pub fn new(_ctx: &Context<impl App>) -> Buffer<T> {\n\n let gl = graphics().gl();\n", "file_path": "src/graphics/buffer.rs", "rank": 43, "score": 69949.66609043504 }, { "content": " gl.buffer_data(BufferBindingTarget::UniformBuffer, &[uniform.into()], BufferUsage::StaticDraw);\n\n\n\n Uniform {\n\n _unsend: core::marker::PhantomData,\n\n vbo,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Sets the value of the uniform.\n\n pub fn set(&mut self, uniform: impl Into<T>) {\n\n let gl = graphics().gl();\n\n gl.bind_buffer(BufferBindingTarget::UniformBuffer, Some(self.vbo));\n\n gl.buffer_data(BufferBindingTarget::UniformBuffer, &[uniform.into()], BufferUsage::StaticDraw);\n\n }\n\n\n\n pub(crate) fn bind(&self, block: u32) {\n\n let gl = graphics().gl();\n\n gl.bind_buffer_base(BufferBlockBindingTarget::UniformBuffer, block, Some(self.vbo));\n\n }\n\n}\n\n\n\nimpl<T: Std140Struct> Drop for Uniform<T> {\n\n fn drop(&mut self) {\n\n let gl = graphics().gl();\n\n gl.delete_buffer(self.vbo);\n\n }\n\n}\n", "file_path": "src/graphics/uniform.rs", "rank": 44, "score": 69942.61825239274 }, { "content": " }\n\n\n\n /// Clears all elements from the buffer.\n\n pub fn clear(&mut self) {\n\n self.vertices = 0;\n\n }\n\n\n\n /// Sets the elements in the buffer.\n\n pub fn set(&mut self, items: &[T]) {\n\n self.vertices = items.len();\n\n if self.vertices > 0 {\n\n let gl = graphics().gl();\n\n gl.bind_buffer(BufferBindingTarget::ArrayBuffer, Some(self.vbo));\n\n gl.buffer_data(BufferBindingTarget::ArrayBuffer, items, BufferUsage::StaticDraw);\n\n }\n\n }\n\n\n\n pub(crate) fn bind(&self) {\n\n let gl = graphics().gl();\n\n gl.bind_vertex_array(Some(self.vao));\n", "file_path": "src/graphics/buffer.rs", "rank": 45, "score": 69934.66433743526 }, { "content": "\n\n let vao = gl.create_vertex_array();\n\n gl.bind_vertex_array(Some(vao));\n\n let vbo = gl.create_buffer();\n\n gl.bind_buffer(BufferBindingTarget::ArrayBuffer, Some(vbo));\n\n configure_vertex::<T>(&T::ATTRIBUTES, gl);\n\n gl.bind_vertex_array(None);\n\n\n\n Buffer {\n\n _unsend: core::marker::PhantomData,\n\n vbo,\n\n vao,\n\n vertices: 0,\n\n phantom: PhantomData,\n\n }\n\n }\n\n\n\n /// Gets the number of elements in the buffer.\n\n pub fn len(&self) -> usize {\n\n self.vertices\n", "file_path": "src/graphics/buffer.rs", "rank": 46, "score": 69931.43517239882 }, { "content": " }\n\n}\n\n\n\nimpl<T: VertexDescriptor + Copy> Drop for Buffer<T> {\n\n fn drop(&mut self) {\n\n let gl = graphics().gl();\n\n gl.delete_buffer(self.vbo);\n\n gl.delete_vertex_array(self.vao);\n\n }\n\n}\n\n\n\nimpl<T: VertexDescriptor + Copy> AsRef<Buffer<T>> for Buffer<T> {\n\n fn as_ref(&self) -> &Buffer<T> {\n\n self\n\n }\n\n}\n", "file_path": "src/graphics/buffer.rs", "rank": 47, "score": 69929.69110758712 }, { "content": " 0,\n\n offset_x as i32,\n\n offset_y as i32,\n\n image.width() as i32,\n\n image.height() as i32,\n\n Z::layout().cpu_format(),\n\n Z::component_type().pixel_type(),\n\n image.as_slice(),\n\n );\n\n self.generate_mipmap();\n\n gl.bind_texture(TextureBindingTarget::Texture2D, None);\n\n }\n\n\n\n pub(crate) fn bind(&self, unit: u32) {\n\n let gl = graphics().gl();\n\n gl.active_texture(unit);\n\n gl.bind_texture(TextureBindingTarget::Texture2D, Some(self.id));\n\n }\n\n}\n\n\n\nimpl Drop for Texture {\n\n fn drop(&mut self) {\n\n if Rc::<()>::strong_count(&self.rc) == 1 {\n\n graphics().gl().delete_texture(self.id);\n\n }\n\n }\n\n}\n", "file_path": "src/graphics/texture.rs", "rank": 48, "score": 69621.1787930453 }, { "content": "use crate::color::ColorDescriptor;\n\nuse crate::graphics::{\n\n graphics, resource, TextureBindingTarget, TextureLoadTarget, TextureMagFilterValue,\n\n TextureMinFilterValue, TextureParameterTarget, TextureSection, TextureWrapValue,\n\n};\n\nuse crate::image::Image;\n\nuse crate::{App, Context};\n\nuse alloc::rc::Rc;\n\n\n\n/// Describes how a texture will be filtered. Different settings can improve texture rendering when\n\n/// viewing textures far away, or at steep angles.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct TextureFiltering {\n\n min_filter: TextureMinFilterValue,\n\n mip_levels: Option<i32>,\n\n anisotropy: Option<f32>,\n\n}\n\n\n\nimpl TextureFiltering {\n\n /// Applies no filtering to the texture.\n", "file_path": "src/graphics/texture.rs", "rank": 49, "score": 69620.08581871657 }, { "content": "\n\n if let Some(requested_anisotropy) = self.filter.anisotropy {\n\n if let Some(supported_anisotropy) = graphics().max_texture_anisotropy() {\n\n gl.tex_parameter_anisotropy(\n\n TextureParameterTarget::Texture2D,\n\n supported_anisotropy.min(requested_anisotropy),\n\n );\n\n }\n\n }\n\n }\n\n\n\n /// The width of the texture.\n\n pub fn width(&self) -> u32 {\n\n self.width\n\n }\n\n\n\n /// The height of the texture.\n\n pub fn height(&self) -> u32 {\n\n self.height\n\n }\n", "file_path": "src/graphics/texture.rs", "rank": 50, "score": 69615.01638481638 }, { "content": "pub struct Texture {\n\n id: resource::Texture,\n\n filter: TextureFiltering,\n\n width: u32,\n\n height: u32,\n\n rc: Rc<()>,\n\n}\n\n\n\nimpl Clone for Texture {\n\n fn clone(&self) -> Self {\n\n Texture {\n\n id: self.id,\n\n filter: self.filter,\n\n width: self.width,\n\n height: self.height,\n\n rc: self.rc.clone(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/graphics/texture.rs", "rank": 51, "score": 69615.01208679394 }, { "content": "impl Texture {\n\n /// Interpret a slice of bytes as a PNG, decodes it into an RGBA image, then uploads it image to\n\n /// the GPU, creating a texture.\n\n pub fn from_png(ctx: &Context<impl App>, bytes: &[u8], filtering: TextureFiltering) -> Texture {\n\n Self::from_image(ctx, &Image::from_png(bytes), filtering)\n\n }\n\n\n\n /// Uploads an image to the GPU, creating a texture.\n\n pub fn from_image<T: ColorDescriptor>(\n\n ctx: &Context<impl App>,\n\n image: &Image<T>,\n\n filtering: TextureFiltering,\n\n ) -> Texture {\n\n let max_size = ctx.max_texture_size() as u32;\n\n if image.width() > max_size || image.height() > max_size {\n\n panic!(\n\n \"The max width or height texture may have on this device is {}. \\\n\n The given image has a (width, height) of ({}, {})\",\n\n max_size,\n\n image.width(),\n", "file_path": "src/graphics/texture.rs", "rank": 52, "score": 69614.16781062678 }, { "content": "\n\n /// Coordinates relative to the top left corner of the texture. (0, 0) is the top left of the\n\n /// texture, and (width, height) is the bottom right of the texture.\n\n pub fn subsection(&self, left: u32, right: u32, top: u32, bottom: u32) -> TextureSection {\n\n TextureSection::from_texture(&self, left, right, top, bottom)\n\n }\n\n\n\n /// Sets a subsection of the texture to the given image. (0, 0) is the top left of the texture,\n\n /// and (width, height) is the bottom right of the texture.\n\n /// # Arguments\n\n ///\n\n /// * `offset_x` - The top left texel x coordinate to offset the image by.\n\n /// * `offset_y` - The top left texel y coordinate to offset the image by.\n\n /// * `image` - The image to overwrite the texture with.\n\n pub fn set<Z: ColorDescriptor>(&self, offset_x: u32, offset_y: u32, image: &Image<Z>) {\n\n assert!(image.width() + offset_x <= self.width && image.height() + offset_y <= self.height);\n\n let gl = graphics().gl();\n\n gl.bind_texture(TextureBindingTarget::Texture2D, Some(self.id));\n\n gl.tex_sub_image_2d(\n\n TextureLoadTarget::Texture2D,\n", "file_path": "src/graphics/texture.rs", "rank": 53, "score": 69613.8739830725 }, { "content": " image.height()\n\n );\n\n }\n\n let gl = graphics().gl();\n\n let id = gl.create_texture();\n\n let texture = Texture {\n\n id,\n\n filter: filtering,\n\n width: image.width(),\n\n height: image.height(),\n\n rc: Rc::new(()),\n\n };\n\n gl.bind_texture(TextureBindingTarget::Texture2D, Some(id));\n\n gl.tex_image_2d(\n\n TextureLoadTarget::Texture2D,\n\n 0,\n\n image.width() as i32,\n\n image.height() as i32,\n\n 0,\n\n T::layout().gpu_format(),\n", "file_path": "src/graphics/texture.rs", "rank": 54, "score": 69612.87989681616 }, { "content": " pub fn none() -> TextureFiltering {\n\n TextureFiltering {\n\n min_filter: TextureMinFilterValue::Nearest,\n\n mip_levels: None,\n\n anisotropy: None,\n\n }\n\n }\n\n\n\n /// Generates mipmaps, using the nearest mipmap to select the texture.\n\n /// # Arguments\n\n ///\n\n /// * `mip_levels` - The number of mip map levels to generate. If the requested level isn't\n\n /// available, this falls back to the max supported level.\n\n pub fn bilinear(mip_levels: u32) -> TextureFiltering {\n\n TextureFiltering {\n\n min_filter: TextureMinFilterValue::LinearMipmapNearest,\n\n mip_levels: Some(mip_levels as i32),\n\n anisotropy: None,\n\n }\n\n }\n", "file_path": "src/graphics/texture.rs", "rank": 55, "score": 69611.30443802036 }, { "content": " /// * `mip_levels` - The number of mip map levels to generate. If the requested level isn't\n\n /// available, this falls back to the max supported level.\n\n /// * `anisotropy` - The number of anisotropy samples. This must be a power of two value. If the\n\n /// requested level isn't available, this silently falls back to the max supported level.\n\n pub fn anisotropic(mip_levels: u32, anisotropy: u32) -> TextureFiltering {\n\n assert!(anisotropy.is_power_of_two(), \"anisotropy is not a power of two.\");\n\n TextureFiltering {\n\n min_filter: TextureMinFilterValue::LinearMipmapLinear,\n\n mip_levels: Some(mip_levels as i32),\n\n anisotropy: Some(anisotropy as f32),\n\n }\n\n }\n\n\n\n /// Gets the requested mip levels. None if no filtering is being requested.\n\n pub fn mip_levels(&self) -> Option<i32> {\n\n self.mip_levels\n\n }\n\n}\n\n\n\n/// Represents a GPU resource for a texture.\n", "file_path": "src/graphics/texture.rs", "rank": 56, "score": 69611.1998702031 }, { "content": " T::layout().cpu_format(),\n\n T::component_type().pixel_type(),\n\n image.as_slice(),\n\n );\n\n\n\n gl.tex_parameter_wrap_s(TextureParameterTarget::Texture2D, TextureWrapValue::ClampToEdge);\n\n gl.tex_parameter_wrap_t(TextureParameterTarget::Texture2D, TextureWrapValue::ClampToEdge);\n\n gl.tex_parameter_min_filter(TextureParameterTarget::Texture2D, filtering.min_filter);\n\n gl.tex_parameter_mag_filter(TextureParameterTarget::Texture2D, TextureMagFilterValue::Nearest);\n\n texture.generate_mipmap();\n\n gl.bind_texture(TextureBindingTarget::Texture2D, None);\n\n texture\n\n }\n\n\n\n fn generate_mipmap(&self) {\n\n let gl = graphics().gl();\n\n if let Some(mip_levels) = self.filter.mip_levels {\n\n gl.tex_parameter_max_mipmaps(TextureParameterTarget::Texture2D, mip_levels);\n\n gl.generate_mipmap(TextureParameterTarget::Texture2D);\n\n }\n", "file_path": "src/graphics/texture.rs", "rank": 57, "score": 69610.99892521942 }, { "content": "\n\n /// Generates mipmaps, linearly interpolating between the mipmap to select the texture.\n\n /// # Arguments\n\n ///\n\n /// * `mip_levels` - The number of mip map levels to generate. If the requested level isn't\n\n /// available, this falls back to the max supported level.\n\n pub fn trilinear(mip_levels: u32) -> TextureFiltering {\n\n TextureFiltering {\n\n min_filter: TextureMinFilterValue::LinearMipmapLinear,\n\n mip_levels: Some(mip_levels as i32),\n\n anisotropy: None,\n\n }\n\n }\n\n\n\n /// Generates mipmaps and anisotropic mipmap levels, linearly interpolating between the mipmap\n\n /// to select the texture. If anisotropic mipmaps aren't available, this silently falls back to\n\n /// trilinear filtering. Use max_texture_anisotropy() to check if this feature is supported, as\n\n /// well as to get the max anisotropy.\n\n /// # Arguments\n\n ///\n", "file_path": "src/graphics/texture.rs", "rank": 59, "score": 69609.29149670839 }, { "content": "use crate::graphics::{AttributeType, OpenGL};\n\nuse log::trace;\n\n\n\n/// A trait to describe vertices that will be consumed by a shader. The INSTANCING field describes\n\n/// if vertices with this VertexDescriptor will be drawn instanced or non instanced. The ATTRIBUTES\n\n/// field describes the fields contained in your vertex struct.\n\n///\n\n/// # Example\n\n/// ```\n\n/// // This is an example for how to implement VertexDescriptor for a simple type.\n\n/// use storm::cgmath::*;\n\n/// use storm::graphics::*;\n\n///\n\n/// #[repr(C)]\n\n/// #[derive(Copy, Clone)]\n\n/// struct Demo {\n\n/// pos: Vector3<f32>,\n\n/// size: Vector2<u16>,\n\n/// }\n\n///\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 66, "score": 66623.9794966077 }, { "content": " };\n\n /// F32 specifies the input be converted into a f32. For example, if the input is a u32 with the\n\n /// value of 10, it will be converted into 10.0.\n\n pub const F32: VertexOutputType = VertexOutputType {\n\n integer: false,\n\n normalized: false,\n\n };\n\n /// I32 specifies the input be converted into a i32. For example, if the input is a f32 with the\n\n /// value of 10.1, it will be converted into 10.\n\n pub const I32: VertexOutputType = VertexOutputType {\n\n integer: true,\n\n normalized: false,\n\n };\n\n}\n\n\n\n/// The input format a vertice will be converted from.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct VertexInputType {\n\n size: i32,\n\n format: AttributeType,\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 67, "score": 66621.76867693395 }, { "content": " pub const U16: VertexInputType = VertexInputType {\n\n size: 2,\n\n format: AttributeType::UnsignedShort,\n\n };\n\n /// I32 specifies the input is an i32.\n\n pub const I32: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::Int,\n\n };\n\n /// U32 specifies the input is an u32.\n\n pub const U32: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::UnsignedInt,\n\n };\n\n /// F16 specifies the input is a f16 (A f32 with half percision).\n\n pub const F16: VertexInputType = VertexInputType {\n\n size: 2,\n\n format: AttributeType::HalfFloat,\n\n };\n\n /// F32 specifies the input is an f32.\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 68, "score": 66621.7392361388 }, { "content": " }\n\n}\n\n\n\n/// Describes an individual vertex attribute. These usually correspond to fields in a struct.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct VertexAttribute {\n\n /// Specifies the number of components per generic vertex attribute\n\n pub count: i32,\n\n /// Specifies the data type of each component in the array.\n\n pub input: VertexInputType,\n\n /// Specifies the output conversion in the shader.\n\n pub output: VertexOutputType,\n\n}\n\n\n\nimpl VertexAttribute {\n\n /// Helper function to create a new vertex attribute.\n\n pub const fn new(count: i32, input: VertexInputType, output: VertexOutputType) -> VertexAttribute {\n\n VertexAttribute {\n\n count,\n\n input,\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 69, "score": 66621.23806911934 }, { "content": " };\n\n pub const UnsignedInt10F_11f_11f_rev: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::UnsignedInt10f_11f_11f_Rev,\n\n };\n\n}\n\n\n\npub(crate) fn configure_vertex<T: VertexDescriptor + Copy>(attributes: &[VertexAttribute], gl: &OpenGL) {\n\n let stride = core::mem::size_of::<T>() as i32;\n\n let mut index = 0;\n\n let mut size = 0;\n\n let divisor = T::INSTANCING.divisor;\n\n for attribute in attributes {\n\n gl.enable_vertex_attrib_array(index);\n\n gl.vertex_attrib_divisor(index, divisor);\n\n if attribute.output.integer {\n\n gl.vertex_attrib_pointer_i32(index, attribute.count, attribute.input.format, stride, size);\n\n } else {\n\n gl.vertex_attrib_pointer_f32(\n\n index,\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 70, "score": 66621.0611852127 }, { "content": "/// impl VertexDescriptor for Demo {\n\n/// // Don't apply any instancing to this vertex type.\n\n/// const INSTANCING: VertexInstancing = VertexInstancing::none();\n\n/// // These are the attributes that describe the fields contained in this vertex.\n\n/// const ATTRIBUTES: &'static [VertexAttribute] = &[\n\n/// // This value represents the three f32s in pos's Vector3<f32>. When invoked in the\n\n/// // shader, the values will be read as f32s.\n\n/// VertexAttribute::new(3, VertexInputType::F32, VertexOutputType::F32),\n\n/// // This value represents the two u16s in size's Vector3<u16>. When invoked in the\n\n/// // shader, the values will be read as f32s.\n\n/// VertexAttribute::new(2, VertexInputType::U16, VertexOutputType::F32),\n\n/// ];\n\n/// }\n\n/// ```\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 71, "score": 66615.38007218565 }, { "content": " pub const F32: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::Float,\n\n };\n\n /// F64 specifies the input is an f64.\n\n pub const F64: VertexInputType = VertexInputType {\n\n size: 8,\n\n format: AttributeType::Double,\n\n };\n\n pub const Fixed: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::Fixed,\n\n };\n\n pub const Int2_10_10_10_rev: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::Int2_10_10_10_Rev,\n\n };\n\n pub const UnsignedInt2_10_10_10_Rev: VertexInputType = VertexInputType {\n\n size: 4,\n\n format: AttributeType::UnsignedInt2_10_10_10_Rev,\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 72, "score": 66615.14228144239 }, { "content": " output,\n\n }\n\n }\n\n}\n\n\n\n/// The output format a vertice will be converted into.\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct VertexOutputType {\n\n integer: bool,\n\n normalized: bool,\n\n}\n\n\n\n#[allow(non_upper_case_globals)]\n\nimpl VertexOutputType {\n\n /// F32 specifies the input be converted into a f32, normalizing in the process. Signed values\n\n /// are normalized into [-1, 1], and unsigned values are normalized into [0, 1]. For example, if\n\n /// the input is a u16 with the value of u16::MAX / 2, it will be converted into 0.5.\n\n pub const NormalizedF32: VertexOutputType = VertexOutputType {\n\n integer: false,\n\n normalized: true,\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 73, "score": 66614.4291734814 }, { "content": "}\n\n\n\n#[allow(non_upper_case_globals)]\n\nimpl VertexInputType {\n\n /// I8 specifies the input is an i8.\n\n pub const I8: VertexInputType = VertexInputType {\n\n size: 1,\n\n format: AttributeType::Byte,\n\n };\n\n /// U8 specifies the input is an u8.\n\n pub const U8: VertexInputType = VertexInputType {\n\n size: 1,\n\n format: AttributeType::UnsignedByte,\n\n };\n\n /// I16 specifies the input is an i16.\n\n pub const I16: VertexInputType = VertexInputType {\n\n size: 2,\n\n format: AttributeType::Short,\n\n };\n\n /// U16 specifies the input is an u16.\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 74, "score": 66614.26466382321 }, { "content": " VertexInstancing {\n\n divisor: 0,\n\n count: 0,\n\n }\n\n }\n\n\n\n /// Instancing will be applied when this vertex is drawn.\n\n /// # Arguments\n\n ///\n\n /// * `count` - Specifies the number of instances to be rendered per vertex.\n\n pub const fn instanced(count: i32) -> VertexInstancing {\n\n VertexInstancing {\n\n divisor: 1,\n\n count,\n\n }\n\n }\n\n\n\n /// Gets if this is instanced or not.\n\n pub const fn is_instanced(&self) -> bool {\n\n self.divisor != 0\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 75, "score": 66612.42923699079 }, { "content": " attribute.count,\n\n attribute.input.format,\n\n attribute.output.normalized,\n\n stride,\n\n size,\n\n );\n\n }\n\n size += attribute.count * attribute.input.size;\n\n index += 1;\n\n }\n\n trace!(\"Configured vertex {}: Size {}, Stride: {}\", core::any::type_name::<T>(), size, stride);\n\n}\n", "file_path": "src/graphics/vertex_descriptor.rs", "rank": 76, "score": 66607.30667090837 }, { "content": "use crate::color::ColorDescriptor;\n\nuse crate::graphics::{Texture, TextureFiltering, TextureSection};\n\nuse crate::image::{Image, Packer};\n\nuse crate::{App, Context};\n\n\n\n/// Simple image atlas that adds padding to reduce mip map artifacts. Extra padding is added to\n\n/// packed images based on the number of mip levels. More mip levels means more space dedicated to\n\n/// padding.\n\npub struct TextureAtlas {\n\n atlas: Texture,\n\n packer: Packer,\n\n padding: Option<u32>,\n\n}\n\n\n\nimpl TextureAtlas {\n\n /// Creates a new atlas.\n\n /// # Arguments\n\n ///\n\n /// * `size` - The width and height of the atlas. This must be a power of two value.\n\n /// * `filtering` - The filtering to apply. If any filtering is used, this atlas will add\n", "file_path": "src/graphics/texture_atlas.rs", "rank": 77, "score": 66412.00763124015 }, { "content": "use crate::graphics::Texture;\n\nuse cgmath::*;\n\n\n\nconst MAX_INTEGER: u16 = u16::MAX;\n\nconst MAX_FLOAT: f32 = u16::MAX as f32 + 1.0;\n\n\n\n/// Token to reference a texture with. Has basic configuration settings.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\n#[repr(transparent)]\n\npub struct TextureSection(pub Vector4<u16>);\n\n\n\nimpl Default for TextureSection {\n\n fn default() -> TextureSection {\n\n TextureSection::full()\n\n }\n\n}\n\n\n\nimpl TextureSection {\n\n /// Coordinates relative to the top left corner of the texture. (0, 0) is the top left of the\n\n /// texture, and (width, height) is the bottom right of the texture.\n", "file_path": "src/graphics/texture_section.rs", "rank": 78, "score": 66409.41357674758 }, { "content": " /// padding to each texture packed to offset mip map artifacts.\n\n pub fn new<T: ColorDescriptor, A: App>(\n\n ctx: &Context<A>,\n\n size: u32,\n\n filtering: TextureFiltering,\n\n ) -> TextureAtlas {\n\n assert!(size.is_power_of_two(), \"size is not a power of two.\");\n\n\n\n let atlas = Texture::from_image(ctx, &Image::from_color(T::default(), size, size), filtering);\n\n let packer = Packer::new(size, size);\n\n let padding = if let Some(mip_levels) = filtering.mip_levels() {\n\n Some(2u32.pow(mip_levels as u32))\n\n } else {\n\n None\n\n };\n\n\n\n TextureAtlas {\n\n atlas,\n\n packer,\n\n padding,\n", "file_path": "src/graphics/texture_atlas.rs", "rank": 79, "score": 66408.63063922909 }, { "content": " pub fn from_texture(texture: &Texture, left: u32, right: u32, top: u32, bottom: u32) -> TextureSection {\n\n let inv_width = MAX_FLOAT / (texture.width() as f32);\n\n let inv_height = MAX_FLOAT / (texture.height() as f32);\n\n let left = (left as f32) * inv_width;\n\n let right = (right as f32) * inv_width;\n\n let top = (top as f32) * inv_height;\n\n let bottom = (bottom as f32) * inv_height;\n\n TextureSection(Vector4::new(\n\n left as u16 + 1, // Left\n\n right as u16 - 1, // Right\n\n top as u16 + 1, // Top\n\n bottom as u16 - 1, // Bottom\n\n ))\n\n }\n\n\n\n /// Creates a texture section that encompases the whole texture.\n\n pub fn full() -> TextureSection {\n\n TextureSection(Vector4::new(0, MAX_INTEGER, 0, MAX_INTEGER))\n\n }\n\n\n", "file_path": "src/graphics/texture_section.rs", "rank": 80, "score": 66405.42472829408 }, { "content": " /// Mirrors the texture along the Y axis. Creates a new texture.\n\n pub fn mirror_y(&self) -> TextureSection {\n\n TextureSection(Vector4::new(self.0.y, self.0.x, self.0.z, self.0.w))\n\n }\n\n\n\n /// Mirrors the texture along the X axis. Creates a new texture.\n\n pub fn mirror_x(&self) -> TextureSection {\n\n TextureSection(Vector4::new(self.0.x, self.0.y, self.0.w, self.0.z))\n\n }\n\n}\n", "file_path": "src/graphics/texture_section.rs", "rank": 81, "score": 66402.87227526093 }, { "content": " if let Some(rect) = rect {\n\n self.atlas.set(rect.x, rect.y, image);\n\n return Some(self.atlas.subsection(rect.x, rect.x + rect.w, rect.y, rect.y + rect.h));\n\n }\n\n }\n\n None\n\n }\n\n\n\n /// Gets a reference to the underlying texture.\n\n pub fn get(&self) -> &Texture {\n\n &self.atlas\n\n }\n\n}\n", "file_path": "src/graphics/texture_atlas.rs", "rank": 82, "score": 66401.1407947297 }, { "content": " }\n\n }\n\n\n\n /// Packs an image into the texture atlas, returning a texture section for where the image was\n\n /// added. Returns None if the image could not be fit in the atlas.\n\n pub fn pack<T: ColorDescriptor>(&mut self, image: &Image<T>) -> Option<TextureSection> {\n\n if let Some(padding) = self.padding {\n\n let image = image.pad(padding);\n\n let rect = self.packer.pack(image.width(), image.height());\n\n if let Some(rect) = rect {\n\n self.atlas.set(rect.x, rect.y, &image);\n\n return Some(self.atlas.subsection(\n\n rect.x + padding,\n\n rect.x + rect.w - padding,\n\n rect.y + padding,\n\n rect.y + rect.h - padding,\n\n ));\n\n }\n\n } else {\n\n let rect = self.packer.pack(image.width(), image.height());\n", "file_path": "src/graphics/texture_atlas.rs", "rank": 83, "score": 66399.74573217612 }, { "content": "/// Bundled sample shaders for basic sprite rendering.\n\npub mod sprite;\n\n/// Bundled sample shaders for basic text rendering.\n\npub mod text;\n", "file_path": "src/graphics/shaders/mod.rs", "rank": 84, "score": 66367.25399438688 }, { "content": "/// Enumeration for window display options.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum DisplayMode {\n\n /// Normal windowed mode.\n\n Windowed {\n\n /// The height of the window.\n\n width: i32,\n\n /// The height of the window.\n\n height: i32,\n\n /// If the window is resizable.\n\n resizable: bool,\n\n },\n\n /// For \"fake\" fullscreen that takes the size of the desktop.\n\n WindowedFullscreen,\n\n /// For \"real\" fullscreen with a videomode change.\n\n Fullscreen,\n\n}\n\n\n\n/// Enumeration for all possible vsync settings.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum Vsync {\n\n /// Vsync will be disabled.\n\n Disabled,\n\n /// Vsync will be enabled.\n\n Enabled,\n\n}\n", "file_path": "src/graphics/window/display_mode.rs", "rank": 85, "score": 63803.33546057544 }, { "content": "use crate::color::RGBA8;\n\nuse crate::graphics::{\n\n TextureSection, VertexAttribute, VertexDescriptor, VertexInputType, VertexInstancing, VertexOutputType,\n\n};\n\nuse cgmath::{Vector2, Vector3};\n\nuse fontdue::layout::TextStyle;\n\n\n\n/// Configuration settings for text.\n\npub struct Text<'a> {\n\n /// The text to layout.\n\n pub text: &'a str,\n\n /// The scale of the text in pixel units. The units of the scale are pixels per Em unit.\n\n pub px: f32,\n\n /// The font to layout the text in.\n\n pub font_index: usize,\n\n /// The text color,\n\n pub color: RGBA8,\n\n /// The depth value used for rendering the text.\n\n pub depth: f32,\n\n}\n", "file_path": "src/graphics/shaders/text/data.rs", "rank": 86, "score": 63462.602746863595 }, { "content": "use crate::color::RGBA8;\n\nuse crate::graphics::{\n\n TextureSection, VertexAttribute, VertexDescriptor, VertexInputType, VertexInstancing, VertexOutputType,\n\n};\n\nuse crate::math::AABB2D;\n\nuse cgmath::*;\n\n\n\n/// Configuration settings for a sprite.\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct Sprite {\n\n /// Position of the sprite. The X and Y coordinates represent the bottom left corner of the\n\n /// sprite. The Z coordinate represents sprite depth. Units are measured in pixels.\n\n pub pos: Vector3<f32>,\n\n /// Units are measured in pixels.\n\n pub size: Vector2<u16>,\n\n /// Texture to apply to the sprite. The default is a plain white texture.\n\n pub texture: TextureSection,\n\n /// Color multiplier to apply to the sprite. The default is white.\n\n pub color: RGBA8,\n", "file_path": "src/graphics/shaders/sprite/data.rs", "rank": 87, "score": 63460.76017053515 }, { "content": "/// Holds configuration settings for a glyph of text.\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct TextSprite {\n\n /// Position of the sprite. The X and Y coordinates represent the bottom left corner of the\n\n /// sprite. The Z coordinate represents sprite depth. Units are measured in pixels.\n\n pub pos: Vector3<f32>,\n\n /// Units are measured in pixels.\n\n pub size: Vector2<u16>,\n\n /// Texture to apply to the sprite. The default is a plain white texture.\n\n pub texture: TextureSection,\n\n /// Color multiplier to apply to the sprite. The default is white.\n\n pub color: RGBA8,\n\n}\n\n\n\nimpl VertexDescriptor for TextSprite {\n\n const INSTANCING: VertexInstancing = VertexInstancing::instanced(4);\n\n const ATTRIBUTES: &'static [VertexAttribute] = &[\n\n // Position, Size, UV, Color::RGBA8\n\n VertexAttribute::new(3, VertexInputType::F32, VertexOutputType::F32),\n", "file_path": "src/graphics/shaders/text/data.rs", "rank": 88, "score": 63456.68213348536 }, { "content": " VertexAttribute::new(2, VertexInputType::U16, VertexOutputType::F32),\n\n VertexAttribute::new(4, VertexInputType::U16, VertexOutputType::NormalizedF32),\n\n VertexAttribute::new(4, VertexInputType::U8, VertexOutputType::NormalizedF32),\n\n ];\n\n}\n\n\n\nimpl TextSprite {\n\n /// Helper function to create a new text sprite.\n\n pub fn new(pos: Vector3<f32>, size: Vector2<f32>, texture: TextureSection, color: RGBA8) -> TextSprite {\n\n TextSprite {\n\n pos,\n\n size: {\n\n let x = (size.x as u32) & 0xFFFF;\n\n let y = (size.y as u32) & 0xFFFF;\n\n Vector2::new(x as u16, y as u16)\n\n },\n\n texture,\n\n color,\n\n }\n\n }\n\n}\n", "file_path": "src/graphics/shaders/text/data.rs", "rank": 89, "score": 63456.572367791116 }, { "content": " /// Rotation of the sprite. Units are 1/65536th of a turn.\n\n pub rotation: u16,\n\n}\n\n\n\nimpl VertexDescriptor for Sprite {\n\n const INSTANCING: VertexInstancing = VertexInstancing::instanced(4);\n\n const ATTRIBUTES: &'static [VertexAttribute] = &[\n\n // Pos, Size, Texture, Color::RGBA8, Rotation\n\n VertexAttribute::new(3, VertexInputType::F32, VertexOutputType::F32),\n\n VertexAttribute::new(2, VertexInputType::U16, VertexOutputType::F32),\n\n VertexAttribute::new(4, VertexInputType::U16, VertexOutputType::NormalizedF32),\n\n VertexAttribute::new(4, VertexInputType::U8, VertexOutputType::NormalizedF32),\n\n VertexAttribute::new(1, VertexInputType::U16, VertexOutputType::NormalizedF32),\n\n ];\n\n}\n\n\n\nimpl Default for Sprite {\n\n fn default() -> Sprite {\n\n Sprite {\n\n pos: Vector3::new(0.0, 0.0, 0.0),\n", "file_path": "src/graphics/shaders/sprite/data.rs", "rank": 90, "score": 63454.02922689695 }, { "content": "mod data;\n\nmod shader;\n\n\n\npub use self::data::Sprite;\n\npub use self::shader::SpriteShader;\n", "file_path": "src/graphics/shaders/sprite/mod.rs", "rank": 91, "score": 63452.553301973974 }, { "content": "mod data;\n\nmod shader;\n\n\n\npub(crate) use self::data::{TextSprite, TextUserData};\n\n\n\npub use self::data::Text;\n\npub use self::shader::{TextShader, TextShaderPass};\n", "file_path": "src/graphics/shaders/text/mod.rs", "rank": 92, "score": 63452.53019210533 }, { "content": " Sprite {\n\n pos,\n\n size: {\n\n let x = (size.x as u32) & 0xFFFF;\n\n let y = (size.y as u32) & 0xFFFF;\n\n Vector2::new(x as u16, y as u16)\n\n },\n\n texture,\n\n color,\n\n rotation: (rotation.fract() * 65536.0) as u16,\n\n }\n\n }\n\n\n\n /// Creates a new sprite. This does not perform conversions and represents exactly the members\n\n /// of the sprite type.\n\n pub fn new_raw(\n\n pos: Vector3<f32>,\n\n size: Vector2<u16>,\n\n texture: TextureSection,\n\n color: RGBA8,\n", "file_path": "src/graphics/shaders/sprite/data.rs", "rank": 93, "score": 63448.98618223715 }, { "content": " size: Vector2::new(100, 100),\n\n texture: TextureSection::default(),\n\n color: RGBA8::WHITE,\n\n rotation: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Sprite {\n\n /// Creates aa new sprite. This converts the rotation and size from floats automatically. Size\n\n /// is measured in pixels, and is limited to 65535. Rotation is measured in turns from [0, 1).\n\n /// Values outside of the range are wrapped into the range. For example, 1.75 is wrapped into\n\n /// 0.75, -0.4 is wrapped into 0.6.\n\n pub fn new(\n\n pos: Vector3<f32>,\n\n size: Vector2<f32>,\n\n texture: TextureSection,\n\n color: RGBA8,\n\n rotation: f32,\n\n ) -> Sprite {\n", "file_path": "src/graphics/shaders/sprite/data.rs", "rank": 94, "score": 63445.32013603273 }, { "content": " rotation: u16,\n\n ) -> Sprite {\n\n Sprite {\n\n pos,\n\n size,\n\n texture,\n\n color,\n\n rotation,\n\n }\n\n }\n\n}\n\n\n\nimpl From<Sprite> for AABB2D {\n\n fn from(sprite: Sprite) -> Self {\n\n AABB2D::from_pos_size(sprite.pos.truncate(), sprite.size.cast().unwrap())\n\n }\n\n}\n", "file_path": "src/graphics/shaders/sprite/data.rs", "rank": 95, "score": 63445.191672156136 }, { "content": "\n\nimpl<'a> Into<TextStyle<'a, TextUserData>> for &Text<'a> {\n\n fn into(self) -> TextStyle<'a, TextUserData> {\n\n TextStyle {\n\n text: self.text,\n\n px: self.px,\n\n font_index: self.font_index,\n\n user_data: TextUserData {\n\n depth: self.depth,\n\n color: self.color,\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub(crate) struct TextUserData {\n\n pub color: RGBA8,\n\n pub depth: f32,\n\n}\n", "file_path": "src/graphics/shaders/text/data.rs", "rank": 96, "score": 63445.03194894911 }, { "content": "fn parse_uniform(input: &DeriveInput) -> Result<TokenStream2, String> {\n\n let data = match &input.data {\n\n Data::Struct(data) => data,\n\n _ => return Err(\"Cannot represent an enum or union with #[uniform], only a struct.\".to_string()),\n\n };\n\n\n\n if input.attrs.iter().any(|attr| attr.path.is_ident(\"repr\")) {\n\n return Err(\"Cannot parse another #[repr] attribute on a struct marked with #[uniform]\".to_string());\n\n }\n\n\n\n let mod_path = quote!(std140);\n\n let struct_name = &input.ident;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n\n\n let asserts = data.fields.iter().map(|field| {\n\n let ty = &field.ty;\n\n let span = field.span();\n\n\n\n quote_spanned!(span=> assert_std140_element::<#ty> { marker: core::marker::PhantomData };)\n\n });\n", "file_path": "storm_macro/src/lib.rs", "rank": 97, "score": 61864.214214763335 }, { "content": "pub fn make(sound: &Sound, volume: f32, smooth: f32, paused: bool) -> (SoundControl, SoundInstance) {\n\n let control = SoundControl::new(volume, smooth, paused);\n\n let instance = SoundInstance::new(sound, &control);\n\n (control, instance)\n\n}\n\n\n", "file_path": "src/audio/instance.rs", "rank": 98, "score": 61444.49737203485 }, { "content": "trait Perceptual {\n\n fn perceptual(&self) -> Self;\n\n}\n\n\n\nimpl Perceptual for f32 {\n\n fn perceptual(&self) -> Self {\n\n self * self\n\n }\n\n}\n", "file_path": "src/audio/instance.rs", "rank": 99, "score": 59771.95746630241 } ]
Rust
src/find.rs
jqnatividad/csvlens
5d9f82e4e17145f0df324dfff39b29a257aab6bd
use crate::csv; use anyhow::Result; use regex::Regex; use std::cmp::min; use std::sync::{Arc, Mutex, MutexGuard}; use std::thread::{self}; use std::time::Instant; pub struct Finder { internal: Arc<Mutex<FinderInternalState>>, cursor: Option<usize>, row_hint: usize, target: Regex, } #[derive(Clone, Debug)] pub struct FoundRecord { pub row_index: usize, column_indices: Vec<usize>, } impl FoundRecord { pub fn row_index(&self) -> usize { self.row_index } pub fn column_indices(&self) -> &Vec<usize> { &self.column_indices } pub fn first_column(&self) -> usize { *self.column_indices.first().unwrap() } } impl Finder { pub fn new(config: Arc<csv::CsvConfig>, target: Regex) -> Result<Self> { let internal = FinderInternalState::init( config, target.clone() ); let finder = Finder { internal, cursor: None, row_hint: 0, target, }; Ok(finder) } pub fn count(&self) -> usize { (self.internal.lock().unwrap()).count } pub fn done(&self) -> bool { (self.internal.lock().unwrap()).done } pub fn cursor(&self) -> Option<usize> { self.cursor } pub fn cursor_row_index(&self) -> Option<usize> { let m_guard = self.internal.lock().unwrap(); self.get_found_record_at_cursor(m_guard) .map(|x| x.row_index()) } pub fn target(&self) -> Regex { self.target.clone() } pub fn reset_cursor(&mut self) { self.cursor = None; } pub fn set_row_hint(&mut self, row_hint: usize) { self.row_hint = row_hint; } pub fn next(&mut self) -> Option<FoundRecord> { let m_guard = self.internal.lock().unwrap(); let count = m_guard.count; if let Some(n) = self.cursor { if n + 1 < count { self.cursor = Some(n + 1); } } else if count > 0 { self.cursor = Some(m_guard.next_from(self.row_hint)); } self.get_found_record_at_cursor(m_guard) } pub fn prev(&mut self) -> Option<FoundRecord> { let m_guard = self.internal.lock().unwrap(); if let Some(n) = self.cursor { self.cursor = Some(n.saturating_sub(1)); } else { let count = m_guard.count; if count > 0 { self.cursor = Some(m_guard.prev_from(self.row_hint)); } } self.get_found_record_at_cursor(m_guard) } pub fn current(&self) -> Option<FoundRecord> { let m_guard = self.internal.lock().unwrap(); self.get_found_record_at_cursor(m_guard) } fn get_found_record_at_cursor( &self, m_guard: MutexGuard<FinderInternalState>, ) -> Option<FoundRecord> { if let Some(n) = self.cursor { let res = m_guard.founds.get(n); res.cloned() } else { None } } fn terminate(&self) { let mut m_guard = self.internal.lock().unwrap(); m_guard.terminate(); } pub fn elapsed(&self) -> Option<u128> { let m_guard = self.internal.lock().unwrap(); m_guard.elapsed() } pub fn get_subset_found(&self, offset: usize, num_rows: usize) -> Vec<u64> { let m_guard = self.internal.lock().unwrap(); let founds = &m_guard.founds; let start = min(offset, founds.len().saturating_sub(1)); let end = start.saturating_add(num_rows); let end = min(end, founds.len()); let indices: Vec<u64> = founds[start..end] .iter() .map(|x| x.row_index() as u64) .collect(); indices } } impl Drop for Finder { fn drop(&mut self) { self.terminate(); } } struct FinderInternalState { count: usize, founds: Vec<FoundRecord>, done: bool, should_terminate: bool, elapsed: Option<u128>, } impl FinderInternalState { pub fn init(config: Arc<csv::CsvConfig>, target: Regex) -> Arc<Mutex<FinderInternalState>> { let internal = FinderInternalState { count: 0, founds: vec![], done: false, should_terminate: false, elapsed: None, }; let m_state = Arc::new(Mutex::new(internal)); let _m = m_state.clone(); let _filename = config.filename().to_owned(); let _handle = thread::spawn(move || { let mut bg_reader = config.new_reader().unwrap(); let records = bg_reader.records(); let start = Instant::now(); for (row_index, r) in records.enumerate() { let mut column_indices = vec![]; if let Ok(valid_record) = r { for (column_index, field) in valid_record.iter().enumerate() { if target.is_match(field) { column_indices.push(column_index); } } } if !column_indices.is_empty() { let found = FoundRecord { row_index, column_indices, }; let mut m = _m.lock().unwrap(); (*m).found_one(found); } let m = _m.lock().unwrap(); if m.should_terminate { break; } } let mut m = _m.lock().unwrap(); (*m).done = true; (*m).elapsed = Some(start.elapsed().as_micros()); }); m_state } fn found_one(&mut self, found: FoundRecord) { self.founds.push(found); self.count += 1; } fn next_from(&self, row_hint: usize) -> usize { let mut index = self.founds.partition_point(|r| r.row_index() < row_hint); if index >= self.founds.len() { index -= 1; } index } fn prev_from(&self, row_hint: usize) -> usize { let next = self.next_from(row_hint); if next > 0 { next - 1 } else { next } } fn terminate(&mut self) { self.should_terminate = true; } fn elapsed(&self) -> Option<u128> { self.elapsed } }
use crate::csv; use anyhow::Result; use regex::Regex; use std::cmp::min; use std::sync::{Arc, Mutex, MutexGuard}; use std::thread::{self}; use std::time::Instant; pub struct Finder { internal: Arc<Mutex<FinderInternalState>>, cursor: Option<usize>, row_hint: usize, target: Regex, } #[derive(Clone, Debug)] pub struct FoundRecord { pub row_index: usize, column_indices: Vec<usize>, } impl FoundRecord { pub fn row_index(&self) -> usize { self.row_index } pub fn column_indices(&self) -> &Vec<usize> { &self.column_indices } pub fn first_column(&self) -> usize { *self.column_indices.first().unwrap() } } impl Finder { pub fn new(config: Arc<csv::CsvConfig>, target: Regex) -> Result<Self> { let internal = FinderInternalState::init( config, target.clone() ); let finder = Finder { internal, cursor: None, row_hint: 0, target, }; Ok(finder) } pub fn count(&self) -> usize { (self.internal.lock().unwrap()).count } pub fn done(&self) -> bool { (self.internal.lock().unwrap()).done } pub fn cursor(&self) -> Option<usize> { self.cursor } pub fn cursor_row_index(&self) -> Option<usize> { let m_guard = self.internal.lock().unwrap(); self.get_found_record_at_cursor(m_guard) .map(|x| x.row_index()) } pub fn target(&self) -> Regex { self.target.clone() } pub fn reset_cursor(&mut self) { self.cursor = None; } pub fn set_row_hint(&mut self, row_hint: usize) { self.row_hint = row_hint; } pub fn next(&mut self) -> Option<FoundRecord> {
pub fn prev(&mut self) -> Option<FoundRecord> { let m_guard = self.internal.lock().unwrap(); if let Some(n) = self.cursor { self.cursor = Some(n.saturating_sub(1)); } else { let count = m_guard.count; if count > 0 { self.cursor = Some(m_guard.prev_from(self.row_hint)); } } self.get_found_record_at_cursor(m_guard) } pub fn current(&self) -> Option<FoundRecord> { let m_guard = self.internal.lock().unwrap(); self.get_found_record_at_cursor(m_guard) } fn get_found_record_at_cursor( &self, m_guard: MutexGuard<FinderInternalState>, ) -> Option<FoundRecord> { if let Some(n) = self.cursor { let res = m_guard.founds.get(n); res.cloned() } else { None } } fn terminate(&self) { let mut m_guard = self.internal.lock().unwrap(); m_guard.terminate(); } pub fn elapsed(&self) -> Option<u128> { let m_guard = self.internal.lock().unwrap(); m_guard.elapsed() } pub fn get_subset_found(&self, offset: usize, num_rows: usize) -> Vec<u64> { let m_guard = self.internal.lock().unwrap(); let founds = &m_guard.founds; let start = min(offset, founds.len().saturating_sub(1)); let end = start.saturating_add(num_rows); let end = min(end, founds.len()); let indices: Vec<u64> = founds[start..end] .iter() .map(|x| x.row_index() as u64) .collect(); indices } } impl Drop for Finder { fn drop(&mut self) { self.terminate(); } } struct FinderInternalState { count: usize, founds: Vec<FoundRecord>, done: bool, should_terminate: bool, elapsed: Option<u128>, } impl FinderInternalState { pub fn init(config: Arc<csv::CsvConfig>, target: Regex) -> Arc<Mutex<FinderInternalState>> { let internal = FinderInternalState { count: 0, founds: vec![], done: false, should_terminate: false, elapsed: None, }; let m_state = Arc::new(Mutex::new(internal)); let _m = m_state.clone(); let _filename = config.filename().to_owned(); let _handle = thread::spawn(move || { let mut bg_reader = config.new_reader().unwrap(); let records = bg_reader.records(); let start = Instant::now(); for (row_index, r) in records.enumerate() { let mut column_indices = vec![]; if let Ok(valid_record) = r { for (column_index, field) in valid_record.iter().enumerate() { if target.is_match(field) { column_indices.push(column_index); } } } if !column_indices.is_empty() { let found = FoundRecord { row_index, column_indices, }; let mut m = _m.lock().unwrap(); (*m).found_one(found); } let m = _m.lock().unwrap(); if m.should_terminate { break; } } let mut m = _m.lock().unwrap(); (*m).done = true; (*m).elapsed = Some(start.elapsed().as_micros()); }); m_state } fn found_one(&mut self, found: FoundRecord) { self.founds.push(found); self.count += 1; } fn next_from(&self, row_hint: usize) -> usize { let mut index = self.founds.partition_point(|r| r.row_index() < row_hint); if index >= self.founds.len() { index -= 1; } index } fn prev_from(&self, row_hint: usize) -> usize { let next = self.next_from(row_hint); if next > 0 { next - 1 } else { next } } fn terminate(&mut self) { self.should_terminate = true; } fn elapsed(&self) -> Option<u128> { self.elapsed } }
let m_guard = self.internal.lock().unwrap(); let count = m_guard.count; if let Some(n) = self.cursor { if n + 1 < count { self.cursor = Some(n + 1); } } else if count > 0 { self.cursor = Some(m_guard.next_from(self.row_hint)); } self.get_found_record_at_cursor(m_guard) }
function_block-function_prefix_line
[ { "content": "struct ReaderInternalState {\n\n total_line_number: Option<usize>,\n\n total_line_number_approx: Option<usize>,\n\n pos_table: Vec<Position>,\n\n done: bool,\n\n}\n\n\n\nimpl ReaderInternalState {\n\n fn init_internal(config: Arc<CsvConfig>) -> (Arc<Mutex<ReaderInternalState>>, JoinHandle<()>) {\n\n let internal = ReaderInternalState {\n\n total_line_number: None,\n\n total_line_number_approx: None,\n\n pos_table: vec![],\n\n done: false,\n\n };\n\n\n\n let m_state = Arc::new(Mutex::new(internal));\n\n\n\n let _m = m_state.clone();\n\n let handle = thread::spawn(move || {\n", "file_path": "src/csv.rs", "rank": 1, "score": 50586.31380385593 }, { "content": "fn main() {\n\n if let Err(e) = run_csvlens() {\n\n println!(\"{}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 2, "score": 33825.87497681685 }, { "content": "#[derive(Parser, Debug)]\n\nstruct Args {\n\n /// CSV filename\n\n filename: Option<String>,\n\n\n\n /// Delimiter character (comma by default)\n\n #[clap(short, long)]\n\n delimiter: Option<String>,\n\n\n\n /// Show stats for debugging\n\n #[clap(long)]\n\n debug: bool,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 33705.85580803654 }, { "content": "struct BordersState {\n\n x_row_separator: u16,\n\n y_first_record: u16,\n\n}\n\n\n\npub struct DebugStats {\n\n rows_view_elapsed: Option<f64>,\n\n finder_elapsed: Option<f64>,\n\n}\n\n\n\nimpl DebugStats {\n\n pub fn new() -> Self {\n\n DebugStats {\n\n rows_view_elapsed: None,\n\n finder_elapsed: None,\n\n }\n\n }\n\n\n\n pub fn rows_view_elapsed(&mut self, elapsed: Option<u128>) {\n\n self.rows_view_elapsed = elapsed.map(|e| e as f64 / 1000.0);\n", "file_path": "src/ui.rs", "rank": 4, "score": 32121.23844897581 }, { "content": "struct RowsFilter {\n\n indices: Vec<u64>,\n\n total: usize,\n\n}\n\n\n\nimpl RowsFilter {\n\n fn new(finder: &find::Finder, rows_from: u64, num_rows: u64) -> RowsFilter {\n\n let total = finder.count();\n\n let indices = finder.get_subset_found(rows_from as usize, num_rows as usize);\n\n RowsFilter { indices, total }\n\n }\n\n}\n\n\n\npub struct RowsView {\n\n reader: CsvLensReader,\n\n headers: Vec<String>,\n\n rows: Vec<Row>,\n\n num_rows: u64,\n\n rows_from: u64,\n\n filter: Option<RowsFilter>,\n", "file_path": "src/view.rs", "rank": 5, "score": 32121.23844897581 }, { "content": "struct SeekableFile {\n\n filename: Option<String>,\n\n inner_file: Option<NamedTempFile>,\n\n}\n\n\n\nimpl SeekableFile {\n\n fn new(maybe_filename: &Option<String>) -> Result<SeekableFile> {\n\n let mut inner_file = NamedTempFile::new()?;\n\n let inner_file_res;\n\n\n\n if let Some(filename) = maybe_filename {\n\n let err = format!(\"Failed to open file: {}\", filename);\n\n let mut f = File::open(filename).context(err)?;\n\n // If not seekable, it most likely is due to process substitution using\n\n // pipe - write out to a temp file to make it seekable\n\n if f.seek(SeekFrom::Start(0)).is_err() {\n\n let mut buffer: Vec<u8> = vec![];\n\n // TODO: could have read by chunks, yolo for now\n\n f.read_to_end(&mut buffer)?;\n\n inner_file.write_all(&buffer)?;\n", "file_path": "src/main.rs", "rank": 6, "score": 32121.23844897581 }, { "content": "fn scroll_to_found_record(\n\n found_record: find::FoundRecord,\n\n rows_view: &mut view::RowsView,\n\n csv_table_state: &mut CsvTableState,\n\n) {\n\n let (new_rows_offset, new_cols_offset) =\n\n get_offsets_to_make_visible(found_record, rows_view, csv_table_state);\n\n\n\n if let Some(rows_offset) = new_rows_offset {\n\n rows_view.set_rows_from(rows_offset).unwrap();\n\n csv_table_state.set_rows_offset(rows_offset);\n\n }\n\n\n\n if let Some(cols_offset) = new_cols_offset {\n\n csv_table_state.set_cols_offset(cols_offset);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 30864.312017484004 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct GetRowsStats {\n\n num_seek: u64,\n\n num_parsed_record: u64,\n\n}\n\n\n\nimpl GetRowsStats {\n\n fn new() -> GetRowsStats {\n\n GetRowsStats {\n\n num_seek: 0,\n\n num_parsed_record: 0,\n\n }\n\n }\n\n\n\n fn log_seek(&mut self) {\n\n self.num_seek += 1;\n\n }\n\n\n\n fn log_parsed_record(&mut self) {\n\n self.num_parsed_record += 1\n\n }\n\n}\n\n\n", "file_path": "src/csv.rs", "rank": 8, "score": 30749.91437278421 }, { "content": "fn get_offsets_to_make_visible(\n\n found_record: find::FoundRecord,\n\n rows_view: &view::RowsView,\n\n csv_table_state: &CsvTableState,\n\n) -> (Option<u64>, Option<u64>) {\n\n // TODO: row_index() should probably be u64\n\n let new_rows_offset = if rows_view.in_view(found_record.row_index() as u64) {\n\n None\n\n } else {\n\n Some(found_record.row_index() as u64)\n\n };\n\n\n\n let cols_offset = csv_table_state.cols_offset;\n\n let last_rendered_col = cols_offset.saturating_add(csv_table_state.num_cols_rendered);\n\n let column_index = found_record.first_column() as u64;\n\n let new_cols_offset = if column_index >= cols_offset && column_index < last_rendered_col {\n\n None\n\n } else {\n\n Some(column_index)\n\n };\n\n\n\n (new_rows_offset, new_cols_offset)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 29656.128946574354 }, { "content": "fn run_csvlens() -> Result<()> {\n\n let args = Args::parse();\n\n\n\n let show_stats = args.debug;\n\n let delimiter = parse_delimiter(&args)?;\n\n\n\n let file = SeekableFile::new(&args.filename)?;\n\n let filename = file.filename();\n\n\n\n // Some lines are reserved for plotting headers (3 lines for headers + 2 lines for status bar)\n\n let num_rows_not_visible = 5;\n\n\n\n // Number of rows that are visible in the current frame\n\n let num_rows = 50 - num_rows_not_visible;\n\n\n\n let mut config = csv::CsvConfig::new(filename);\n\n if let Some(d) = delimiter {\n\n config.delimiter = d;\n\n }\n\n let shared_config = Arc::new(config);\n", "file_path": "src/main.rs", "rank": 10, "score": 28273.293589700996 }, { "content": "fn parse_delimiter(args: &Args) -> Result<Option<u8>> {\n\n if let Some(s) = &args.delimiter {\n\n let mut chars = s.chars();\n\n let c = chars.next().context(\"Delimiter should not be empty\")?;\n\n if !c.is_ascii() {\n\n bail!(\n\n \"Delimiter should be within the ASCII range: {} is too fancy\",\n\n c\n\n );\n\n }\n\n if chars.next().is_some() {\n\n bail!(\"Delimiter should be exactly one character, got {}\", s);\n\n }\n\n Ok(Some(c.try_into()?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 11, "score": 20061.38956622258 }, { "content": "fn string_record_to_vec(record: &csv::StringRecord) -> Vec<String> {\n\n let mut string_vec = Vec::new();\n\n for field in record.iter() {\n\n string_vec.push(String::from(field));\n\n }\n\n string_vec\n\n}\n\n\n\npub struct CsvConfig {\n\n path: String,\n\n pub delimiter: u8,\n\n}\n\n\n\nimpl CsvConfig {\n\n pub fn new(path: &str) -> CsvConfig {\n\n CsvConfig {\n\n path: path.to_string(),\n\n delimiter: b',',\n\n }\n\n }\n", "file_path": "src/csv.rs", "rank": 12, "score": 18595.639569422434 }, { "content": "#[derive(Debug, PartialEq)]\n\npub struct Row {\n\n pub record_num: usize,\n\n pub fields: Vec<String>,\n\n}\n\n\n\nimpl CsvLensReader {\n\n pub fn new(config: Arc<CsvConfig>) -> Result<Self> {\n\n let mut reader = config.new_reader()?;\n\n let headers_record = reader.headers().unwrap();\n\n let headers = string_record_to_vec(headers_record);\n\n\n\n let (m_internal, _handle) = ReaderInternalState::init_internal(config);\n\n\n\n let reader = Self {\n\n reader,\n\n headers,\n\n internal: m_internal,\n\n };\n\n Ok(reader)\n", "file_path": "src/csv.rs", "rank": 17, "score": 16.53708707276089 }, { "content": " pub user_error: Option<String>,\n\n pub debug: String,\n\n}\n\n\n\nimpl CsvTableState {\n\n pub fn new(filename: Option<String>, total_cols: usize) -> Self {\n\n Self {\n\n rows_offset: 0,\n\n cols_offset: 0,\n\n num_cols_rendered: 0,\n\n more_cols_to_show: true,\n\n filename,\n\n total_line_number: None,\n\n total_cols,\n\n debug_stats: DebugStats::new(),\n\n buffer_content: BufferState::Disabled,\n\n finder_state: FinderState::FinderInactive,\n\n borders_state: None,\n\n col_ending_pos_x: 0,\n\n selected: None,\n", "file_path": "src/ui.rs", "rank": 18, "score": 15.683510349884658 }, { "content": "pub enum FinderState {\n\n FinderInactive,\n\n FinderActive(FinderActiveState),\n\n}\n\n\n\nimpl FinderState {\n\n pub fn from_finder(finder: &find::Finder, rows_view: &view::RowsView) -> FinderState {\n\n let active_state = FinderActiveState::new(finder, rows_view);\n\n FinderState::FinderActive(active_state)\n\n }\n\n}\n\n\n\npub struct FinderActiveState {\n\n find_complete: bool,\n\n total_found: u64,\n\n cursor_index: Option<u64>,\n\n target: Regex,\n\n found_record: Option<find::FoundRecord>,\n\n selected_offset: Option<u64>,\n\n is_filter: bool,\n", "file_path": "src/ui.rs", "rank": 19, "score": 14.548119978039066 }, { "content": "}\n\n\n\nimpl FinderActiveState {\n\n pub fn new(finder: &find::Finder, rows_view: &view::RowsView) -> Self {\n\n FinderActiveState {\n\n find_complete: finder.done(),\n\n total_found: finder.count() as u64,\n\n cursor_index: finder.cursor().map(|x| x as u64),\n\n target: finder.target(),\n\n found_record: finder.current(),\n\n selected_offset: rows_view.selected_offset(),\n\n is_filter: rows_view.is_filter(),\n\n }\n\n }\n\n\n\n fn status_line(&self) -> String {\n\n let plus_marker;\n\n let line;\n\n if self.total_found == 0 {\n\n if self.find_complete {\n", "file_path": "src/ui.rs", "rank": 20, "score": 12.193942892590012 }, { "content": " rx: mpsc::Receiver<Event<Key>>,\n\n input_handle: thread::JoinHandle<()>,\n\n ignore_exit_key: Arc<AtomicBool>,\n\n tick_handle: thread::JoinHandle<()>,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Config {\n\n pub exit_key: Key,\n\n pub tick_rate: Duration,\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Config {\n\n Config {\n\n exit_key: Key::Char('q'),\n\n tick_rate: Duration::from_millis(250),\n\n }\n\n }\n\n}\n", "file_path": "src/util/events.rs", "rank": 22, "score": 11.287994851042573 }, { "content": " }\n\n }\n\n}\n\n\n\npub struct CsvTableState {\n\n // TODO: types appropriate?\n\n pub rows_offset: u64,\n\n pub cols_offset: u64,\n\n pub num_cols_rendered: u64,\n\n pub more_cols_to_show: bool,\n\n filename: Option<String>,\n\n total_line_number: Option<usize>,\n\n total_cols: usize,\n\n pub debug_stats: DebugStats,\n\n buffer_content: BufferState,\n\n pub finder_state: FinderState,\n\n borders_state: Option<BordersState>,\n\n // TODO: should probably be with BordersState\n\n col_ending_pos_x: u16,\n\n pub selected: Option<u64>,\n", "file_path": "src/ui.rs", "rank": 23, "score": 10.90968733920777 }, { "content": " user_error: None,\n\n debug: \"\".into(),\n\n }\n\n }\n\n\n\n pub fn set_rows_offset(&mut self, offset: u64) {\n\n self.rows_offset = offset;\n\n }\n\n\n\n pub fn set_cols_offset(&mut self, offset: u64) {\n\n self.cols_offset = offset;\n\n }\n\n\n\n fn set_more_cols_to_show(&mut self, value: bool) {\n\n self.more_cols_to_show = value;\n\n }\n\n\n\n pub fn has_more_cols_to_show(&self) -> bool {\n\n self.more_cols_to_show\n\n }\n", "file_path": "src/ui.rs", "rank": 24, "score": 9.57202168024698 }, { "content": " (m_state, handle)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use core::time;\n\n\n\n use super::*;\n\n\n\n impl Row {\n\n pub fn new(record_num: usize, fields: Vec<&str>) -> Row {\n\n Row {\n\n record_num,\n\n fields: fields.iter().map(|x| x.to_string()).collect(),\n\n }\n\n }\n\n }\n\n\n\n impl CsvLensReader {\n", "file_path": "src/csv.rs", "rank": 25, "score": 9.567277203698831 }, { "content": " let re = Regex::new(s.as_str());\n\n if let Ok(target) = re {\n\n finder = Some(find::Finder::new(shared_config.clone(), target).unwrap());\n\n match control {\n\n Control::Find(_) => {\n\n // will scroll to first result below once ready\n\n first_found_scrolled = false;\n\n rows_view.reset_filter().unwrap();\n\n }\n\n Control::Filter(_) => {\n\n rows_view.set_rows_from(0).unwrap();\n\n rows_view.set_filter(finder.as_ref().unwrap()).unwrap();\n\n }\n\n _ => {}\n\n }\n\n } else {\n\n finder = None;\n\n // TODO: how to show multi-line error\n\n user_error = Some(format!(\"Invalid regex: {}\", s));\n\n }\n", "file_path": "src/main.rs", "rank": 26, "score": 9.378891557651484 }, { "content": "use crate::csv::Row;\n\nuse crate::find;\n\nuse crate::input::InputMode;\n\nuse crate::view;\n\nuse regex::Regex;\n\nuse tui::buffer::Buffer;\n\nuse tui::layout::Rect;\n\nuse tui::style::{Color, Modifier, Style};\n\nuse tui::symbols::line;\n\nuse tui::text::{Span, Spans};\n\nuse tui::widgets::Widget;\n\nuse tui::widgets::{Block, Borders, StatefulWidget};\n\n\n\nuse std::cmp::min;\n\n\n\n#[derive(Debug)]\n\npub struct CsvTable<'a> {\n\n header: Vec<String>,\n\n rows: &'a [Row],\n\n}\n", "file_path": "src/ui.rs", "rank": 28, "score": 9.169366911960747 }, { "content": "mod csv;\n\nmod find;\n\nmod input;\n\nmod ui;\n\n#[allow(dead_code)]\n\nmod util;\n\nmod view;\n\nuse crate::input::{Control, InputHandler};\n\nuse crate::ui::{CsvTable, CsvTableState, FinderState};\n\n\n\nextern crate csv as sushi_csv;\n\n\n\nuse anyhow::{bail, Context, Result};\n\nuse clap::Parser;\n\nuse regex::Regex;\n\nuse std::convert::TryInto;\n\nuse std::fs::File;\n\nuse std::io::{self, Read, Seek, SeekFrom, Write};\n\nuse std::sync::Arc;\n\nuse std::usize;\n\nuse tempfile::NamedTempFile;\n\nuse termion::{raw::IntoRawMode, screen::AlternateScreen};\n\nuse tui::backend::TermionBackend;\n\nuse tui::Terminal;\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 8.290735471061996 }, { "content": "\n\nimpl Events {\n\n pub fn new() -> Events {\n\n Events::with_config(Config::default())\n\n }\n\n\n\n pub fn with_config(config: Config) -> Events {\n\n let (tx, rx) = mpsc::channel();\n\n let ignore_exit_key = Arc::new(AtomicBool::new(true));\n\n let input_handle = {\n\n let tx = tx.clone();\n\n let ignore_exit_key = ignore_exit_key.clone();\n\n thread::spawn(move || {\n\n let tty = File::open(\"/dev/tty\").unwrap();\n\n for key in tty.keys().flatten() {\n\n if let Err(err) = tx.send(Event::Input(key)) {\n\n eprintln!(\"{}\", err);\n\n return;\n\n }\n\n if !ignore_exit_key.load(Ordering::Relaxed) && key == config.exit_key {\n", "file_path": "src/util/events.rs", "rank": 30, "score": 8.233133337340114 }, { "content": " // no more records\n\n break;\n\n }\n\n }\n\n\n\n if next_pos.is_none() {\n\n // If here, the last block had been scanned, and we should be\n\n // done. If next_wanted is not None, that means an out of bound\n\n // index was provided - that could happen for small input - and\n\n // we should ignore it and stop here regardless\n\n break;\n\n }\n\n }\n\n\n\n Ok((res, stats))\n\n }\n\n\n\n pub fn get_total_line_numbers(&self) -> Option<usize> {\n\n let res = (*self.internal.lock().unwrap()).total_line_number;\n\n res\n", "file_path": "src/csv.rs", "rank": 31, "score": 8.188252651285898 }, { "content": "\n\n pub fn new_reader(&self) -> Result<Reader<File>> {\n\n let reader = ReaderBuilder::new()\n\n .flexible(true)\n\n .delimiter(self.delimiter)\n\n .from_path(self.path.as_str())?;\n\n Ok(reader)\n\n }\n\n\n\n pub fn filename(&self) -> &str {\n\n self.path.as_str()\n\n }\n\n}\n\n\n\npub struct CsvLensReader {\n\n reader: Reader<File>,\n\n pub headers: Vec<String>,\n\n internal: Arc<Mutex<ReaderInternalState>>,\n\n}\n\n\n", "file_path": "src/csv.rs", "rank": 33, "score": 7.792197974021511 }, { "content": "use std::fs::File;\n\nuse std::sync::mpsc;\n\nuse std::sync::{\n\n atomic::{AtomicBool, Ordering},\n\n Arc,\n\n};\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse termion::event::Key;\n\nuse termion::input::TermRead;\n\n\n\npub enum Event<I> {\n\n Input(I),\n\n Tick,\n\n}\n\n\n\n/// A small event handler that wrap termion input and tick events. Each event\n\n/// type is handled in its own thread and returned to a common `Receiver`\n\npub struct Events {\n", "file_path": "src/util/events.rs", "rank": 34, "score": 7.740553274381056 }, { "content": " }\n\n\n\n pub fn get_total_line_numbers_approx(&self) -> Option<usize> {\n\n let res = (*self.internal.lock().unwrap()).total_line_number_approx;\n\n res\n\n }\n\n\n\n pub fn get_pos_table(&self) -> Vec<Position> {\n\n let res = (*self.internal.lock().unwrap()).pos_table.clone();\n\n res\n\n }\n\n}\n\n\n", "file_path": "src/csv.rs", "rank": 35, "score": 7.721140319946466 }, { "content": "\n\n // Finder\n\n if let FinderState::FinderActive(s) = &state.finder_state {\n\n content += format!(\" {}\", s.status_line()).as_str();\n\n }\n\n\n\n if let Some(stats_line) = &state.debug_stats.status_line() {\n\n content += format!(\" {}\", stats_line).as_str();\n\n }\n\n\n\n if !state.debug.is_empty() {\n\n content += format!(\" (debug: {})\", state.debug).as_str();\n\n }\n\n }\n\n let span = Span::styled(content, style);\n\n buf.set_span(area.x, area.bottom().saturating_sub(1), &span, area.width);\n\n }\n\n}\n\n\n\nimpl<'a> StatefulWidget for CsvTable<'a> {\n", "file_path": "src/ui.rs", "rank": 36, "score": 7.681837229324535 }, { "content": " selected: Option<u64>,\n\n elapsed: Option<u128>,\n\n}\n\n\n\nimpl RowsView {\n\n pub fn new(mut reader: CsvLensReader, num_rows: u64) -> Result<RowsView> {\n\n let rows_from = 0;\n\n let rows = reader.get_rows(rows_from, num_rows)?;\n\n let headers = reader.headers.clone();\n\n let view = Self {\n\n reader,\n\n headers,\n\n rows,\n\n num_rows,\n\n rows_from,\n\n filter: None,\n\n selected: Some(0),\n\n elapsed: None,\n\n };\n\n Ok(view)\n", "file_path": "src/view.rs", "rank": 37, "score": 7.66002283213083 }, { "content": " }\n\n\n\n pub fn elapsed(&self) -> Option<u128> {\n\n self.elapsed\n\n }\n\n\n\n pub fn get_total_line_numbers(&self) -> Option<usize> {\n\n self.reader.get_total_line_numbers()\n\n }\n\n\n\n pub fn get_total_line_numbers_approx(&self) -> Option<usize> {\n\n self.reader.get_total_line_numbers_approx()\n\n }\n\n\n\n pub fn in_view(&self, row_index: u64) -> bool {\n\n let last_row = self.rows_from().saturating_add(self.num_rows());\n\n if row_index >= self.rows_from() && row_index < last_row {\n\n return true;\n\n }\n\n false\n", "file_path": "src/view.rs", "rank": 38, "score": 7.080920227254635 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn set_filter(&mut self, finder: &find::Finder) -> Result<()> {\n\n let filter = RowsFilter::new(finder, self.rows_from, self.num_rows);\n\n if let Some(cur_filter) = &self.filter {\n\n if cur_filter.indices == filter.indices {\n\n return Ok(());\n\n }\n\n }\n\n self.filter = Some(filter);\n\n self.do_get_rows()\n\n }\n\n\n\n pub fn is_filter(&self) -> bool {\n\n self.filter.is_some()\n\n }\n\n\n\n pub fn reset_filter(&mut self) -> Result<()> {\n\n if !self.is_filter() {\n", "file_path": "src/view.rs", "rank": 39, "score": 6.797831927187111 }, { "content": " let mut r = CsvLensReader::new(config).unwrap();\n\n r.wait_internal();\n\n let rows = r.get_rows(1234, 2).unwrap();\n\n let expected = vec![\n\n Row::new(1235, vec![\"A1235\", \"B1235\"]),\n\n Row::new(1236, vec![\"A1236\", \"B1236\"]),\n\n ];\n\n assert_eq!(rows, expected);\n\n }\n\n\n\n #[test]\n\n fn test_simple_get_rows_out_of_bound() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/simple.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n r.wait_internal();\n\n let indices = vec![5000];\n\n let (rows, _stats) = r.get_rows_impl(&indices).unwrap();\n\n assert_eq!(rows, vec![]);\n\n }\n\n\n", "file_path": "src/csv.rs", "rank": 40, "score": 6.691125744413904 }, { "content": " #[test]\n\n fn test_simple_get_rows_impl_3() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/simple.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n r.wait_internal();\n\n let indices = vec![2];\n\n let (rows, stats) = r.get_rows_impl(&indices).unwrap();\n\n let expected = vec![Row::new(3, vec![\"A3\", \"B3\"])];\n\n assert_eq!(rows, expected);\n\n let expected = GetRowsStats {\n\n num_seek: 0,\n\n num_parsed_record: 4, // 3 + 1 (including header)\n\n };\n\n assert_eq!(stats, expected);\n\n }\n\n\n\n #[test]\n\n fn test_small() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/small.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n", "file_path": "src/csv.rs", "rank": 41, "score": 6.611166874172048 }, { "content": " };\n\n assert_eq!(stats, expected);\n\n }\n\n\n\n #[test]\n\n fn test_simple_get_rows_impl_2() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/simple.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n r.wait_internal();\n\n let indices = vec![1234];\n\n let (rows, stats) = r.get_rows_impl(&indices).unwrap();\n\n let expected = vec![Row::new(1235, vec![\"A1235\", \"B1235\"])];\n\n assert_eq!(rows, expected);\n\n let expected = GetRowsStats {\n\n num_seek: 12,\n\n num_parsed_record: 35,\n\n };\n\n assert_eq!(stats, expected);\n\n }\n\n\n", "file_path": "src/csv.rs", "rank": 42, "score": 6.59939506601677 }, { "content": "\n\nimpl<'a> CsvTable<'a> {\n\n pub fn new(header: &[String], rows: &'a [Row]) -> Self {\n\n let _header = header.to_vec();\n\n Self {\n\n header: _header,\n\n rows,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> CsvTable<'a> {\n\n fn get_column_widths(&self, area_width: u16) -> Vec<u16> {\n\n let mut column_widths = Vec::new();\n\n for s in self.header.iter() {\n\n column_widths.push(s.len() as u16);\n\n }\n\n for row in self.rows.iter() {\n\n for (i, value) in row.fields.iter().enumerate() {\n\n let v = column_widths.get_mut(i).unwrap();\n", "file_path": "src/ui.rs", "rank": 43, "score": 6.535084150393073 }, { "content": "use crate::util::events::{Event, Events};\n\nuse termion::event::Key;\n\n\n\npub enum Control {\n\n ScrollUp,\n\n ScrollDown,\n\n ScrollLeft,\n\n ScrollRight,\n\n ScrollBottom,\n\n ScrollPageUp,\n\n ScrollPageDown,\n\n ScrollTo(usize),\n\n ScrollToNextFound,\n\n ScrollToPrevFound,\n\n Find(String),\n\n Filter(String),\n\n Quit,\n\n BufferContent(String),\n\n BufferReset,\n\n Nothing,\n\n}\n\n\n", "file_path": "src/input.rs", "rank": 44, "score": 6.252201869387949 }, { "content": " fn wait_internal(&self) {\n\n loop {\n\n if self.internal.lock().unwrap().done {\n\n break;\n\n }\n\n thread::sleep(time::Duration::from_millis(100));\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_cities_get_rows() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/cities.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n r.wait_internal();\n\n let rows = r.get_rows(2, 3).unwrap();\n\n let expected = vec![\n\n Row::new(\n\n 3,\n\n vec![\n", "file_path": "src/csv.rs", "rank": 45, "score": 6.010105141576566 }, { "content": "extern crate csv;\n\n\n\nuse anyhow::Result;\n\nuse csv::{Position, Reader, ReaderBuilder};\n\nuse std::cmp::max;\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::thread::{self, JoinHandle};\n\n\n", "file_path": "src/csv.rs", "rank": 46, "score": 5.929654632372955 }, { "content": " }\n\n\n\n pub fn get_rows(&mut self, rows_from: u64, num_rows: u64) -> Result<Vec<Row>> {\n\n let indices: Vec<u64> = (rows_from..rows_from + num_rows).collect();\n\n self.get_rows_impl(&indices).map(|x| x.0)\n\n }\n\n\n\n pub fn get_rows_for_indices(&mut self, indices: &[u64]) -> Result<Vec<Row>> {\n\n self.get_rows_impl(indices).map(|x| x.0)\n\n }\n\n\n\n fn get_rows_impl(&mut self, indices: &[u64]) -> Result<(Vec<Row>, GetRowsStats)> {\n\n // stats for debugging and testing\n\n let mut stats = GetRowsStats::new();\n\n\n\n let pos = Position::new();\n\n self.reader.seek(pos)?;\n\n\n\n let pos_table = self.get_pos_table();\n\n let mut pos_iter = pos_table.iter();\n", "file_path": "src/csv.rs", "rank": 47, "score": 5.857039537028708 }, { "content": " #[test]\n\n fn test_simple_get_rows_impl_1() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/simple.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n r.wait_internal();\n\n let indices = vec![1, 3, 5, 1234, 2345, 3456, 4999];\n\n let (rows, stats) = r.get_rows_impl(&indices).unwrap();\n\n let expected = vec![\n\n Row::new(2, vec![\"A2\", \"B2\"]),\n\n Row::new(4, vec![\"A4\", \"B4\"]),\n\n Row::new(6, vec![\"A6\", \"B6\"]),\n\n Row::new(1235, vec![\"A1235\", \"B1235\"]),\n\n Row::new(2346, vec![\"A2346\", \"B2346\"]),\n\n Row::new(3457, vec![\"A3457\", \"B3457\"]),\n\n Row::new(5000, vec![\"A5000\", \"B5000\"]),\n\n ];\n\n assert_eq!(rows, expected);\n\n let expected = GetRowsStats {\n\n num_seek: 49,\n\n num_parsed_record: 505,\n", "file_path": "src/csv.rs", "rank": 48, "score": 5.765791003672276 }, { "content": " line = \"Not found\".to_owned();\n\n } else {\n\n line = \"Finding...\".to_owned();\n\n }\n\n } else {\n\n if self.find_complete {\n\n plus_marker = \"\";\n\n } else {\n\n plus_marker = \"+\";\n\n }\n\n let cursor_str;\n\n if self.is_filter {\n\n if let Some(i) = self.selected_offset {\n\n cursor_str = i.saturating_add(1).to_string();\n\n } else {\n\n cursor_str = \"-\".to_owned();\n\n }\n\n } else if let Some(i) = self.cursor_index {\n\n cursor_str = (i.saturating_add(1)).to_string();\n\n } else {\n\n cursor_str = \"-\".to_owned();\n\n }\n\n line = format!(\"{}/{}{}\", cursor_str, self.total_found, plus_marker,);\n\n }\n\n let action = if self.is_filter { \"Filter\" } else { \"Find\" };\n\n format!(\"[{} \\\"{}\\\": {}]\", action, self.target, line)\n\n }\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 49, "score": 5.463694154079689 }, { "content": " return Ok(());\n\n }\n\n self.filter = None;\n\n self.do_get_rows()\n\n }\n\n\n\n pub fn rows_from(&self) -> u64 {\n\n self.rows_from\n\n }\n\n\n\n pub fn set_rows_from(&mut self, rows_from_: u64) -> Result<()> {\n\n let rows_from = if let Some(n) = self.bottom_rows_from() {\n\n min(rows_from_, n)\n\n } else {\n\n rows_from_\n\n };\n\n if rows_from == self.rows_from {\n\n return Ok(());\n\n }\n\n self.rows_from = rows_from;\n", "file_path": "src/view.rs", "rank": 51, "score": 5.0227834620132015 }, { "content": " self.do_get_rows()?;\n\n Ok(())\n\n }\n\n\n\n pub fn set_selected(&mut self, selected: u64) {\n\n let selected = min(selected, (self.rows.len() as u64).saturating_sub(1));\n\n self.selected = Some(selected);\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn reset_selected(&mut self) {\n\n self.selected = None;\n\n }\n\n\n\n pub fn increase_selected(&mut self) {\n\n if let Some(i) = self.selected {\n\n self.set_selected(i.saturating_add(1));\n\n };\n\n }\n\n\n", "file_path": "src/view.rs", "rank": 52, "score": 4.928237722703933 }, { "content": " }\n\n\n\n if let Some(f) = &finder {\n\n // TODO: need to create a new finder every time?\n\n csv_table_state.finder_state = FinderState::from_finder(f, &rows_view);\n\n }\n\n\n\n csv_table_state.user_error = user_error.clone();\n\n\n\n //csv_table_state.debug = format!(\"{:?}\", rows_view.rows_from());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 53, "score": 4.864593666449651 }, { "content": " // set row_hint to 0 so that this always scrolls to first result\n\n fdr.set_row_hint(0);\n\n if let Some(found_record) = fdr.next() {\n\n scroll_to_found_record(found_record, &mut rows_view, &mut csv_table_state);\n\n }\n\n first_found_scrolled = true;\n\n }\n\n\n\n // reset cursor if out of view\n\n if let Some(cursor_row_index) = fdr.cursor_row_index() {\n\n if !rows_view.in_view(cursor_row_index as u64) {\n\n fdr.reset_cursor();\n\n }\n\n }\n\n\n\n fdr.set_row_hint(rows_view.rows_from() as usize);\n\n } else {\n\n rows_view.set_filter(fdr).unwrap();\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 54, "score": 4.758709514992467 }, { "content": "\n\n fn set_num_cols_rendered(&mut self, n: u64) {\n\n self.num_cols_rendered = n;\n\n }\n\n\n\n pub fn set_total_line_number(&mut self, n: usize) {\n\n self.total_line_number = Some(n);\n\n }\n\n\n\n pub fn set_buffer(&mut self, mode: InputMode, buf: &str) {\n\n self.buffer_content = BufferState::Enabled(mode, buf.to_string());\n\n }\n\n\n\n pub fn reset_buffer(&mut self) {\n\n self.buffer_content = BufferState::Disabled;\n\n }\n\n}\n", "file_path": "src/ui.rs", "rank": 55, "score": 4.620850542778192 }, { "content": "\n\n let csvlens_reader = csv::CsvLensReader::new(shared_config.clone())\n\n .context(format!(\"Failed to open file: {}\", filename))?;\n\n let mut rows_view = view::RowsView::new(csvlens_reader, num_rows)?;\n\n\n\n let headers = rows_view.headers().clone();\n\n\n\n let stdout = io::stdout().into_raw_mode().unwrap();\n\n let stdout = AlternateScreen::from(stdout);\n\n let backend = TermionBackend::new(stdout);\n\n let mut terminal = Terminal::new(backend).unwrap();\n\n\n\n let mut input_handler = InputHandler::new();\n\n let mut csv_table_state = CsvTableState::new(args.filename, headers.len());\n\n\n\n let mut finder: Option<find::Finder> = None;\n\n let mut first_found_scrolled = false;\n\n\n\n let mut user_error: Option<String> = None;\n\n\n", "file_path": "src/main.rs", "rank": 56, "score": 4.439180753772332 }, { "content": " y: u16,\n\n is_header: bool,\n\n row: &[String],\n\n row_index: Option<usize>,\n\n is_selected: bool,\n\n ) {\n\n let mut x_offset_header = x;\n\n let mut remaining_width = area.width.saturating_sub(x);\n\n let cols_offset = state.cols_offset as usize;\n\n // TODO: seems strange that these have to be set every row\n\n let mut has_more_cols_to_show = false;\n\n let mut col_ending_pos_x = 0;\n\n let mut num_cols_rendered = 0;\n\n for (col_index, (hname, &hlen)) in row.iter().zip(column_widths).enumerate() {\n\n if col_index < cols_offset {\n\n continue;\n\n }\n\n let effective_width = min(remaining_width, hlen);\n\n let mut style = Style::default();\n\n if is_header {\n", "file_path": "src/ui.rs", "rank": 57, "score": 4.333557473118581 }, { "content": " }\n\n\n\n pub fn finder_elapsed(&mut self, elapsed: Option<u128>) {\n\n self.finder_elapsed = elapsed.map(|e| e as f64 / 1000.0);\n\n }\n\n\n\n pub fn status_line(&self) -> Option<String> {\n\n let mut line = \"[\".to_string();\n\n if let Some(elapsed) = self.rows_view_elapsed {\n\n line += format!(\"rows:{}ms\", elapsed).as_str();\n\n }\n\n if let Some(elapsed) = self.finder_elapsed {\n\n line += format!(\" finder:{}ms\", elapsed).as_str();\n\n }\n\n line += \"]\";\n\n if line == \"[]\" {\n\n None\n\n }\n\n else {\n\n Some(line)\n", "file_path": "src/ui.rs", "rank": 58, "score": 4.301522229369172 }, { "content": " _ => Control::Nothing,\n\n }\n\n }\n\n\n\n fn is_input_buffering(&self) -> bool {\n\n matches!(self.buffer_state, BufferState::Active(_))\n\n }\n\n\n\n fn reset_buffer(&mut self) {\n\n self.buffer_state = BufferState::Inactive;\n\n self.mode = InputMode::Default;\n\n }\n\n\n\n pub fn mode(&self) -> InputMode {\n\n self.mode.clone()\n\n }\n\n}\n", "file_path": "src/input.rs", "rank": 59, "score": 4.28137379264829 }, { "content": "use crate::csv::{CsvLensReader, Row};\n\nuse crate::find;\n\nuse crate::input::Control;\n\n\n\nuse anyhow::Result;\n\nuse std::cmp::min;\n\nuse std::time::Instant;\n\n\n", "file_path": "src/view.rs", "rank": 61, "score": 3.9508958197578385 }, { "content": " csv_table_state.reset_buffer();\n\n }\n\n Control::BufferContent(buf) => {\n\n csv_table_state.set_buffer(input_handler.mode(), buf.as_str());\n\n }\n\n Control::BufferReset => {\n\n csv_table_state.reset_buffer();\n\n if finder.is_some() {\n\n finder = None;\n\n csv_table_state.finder_state = FinderState::FinderInactive;\n\n rows_view.reset_filter().unwrap();\n\n }\n\n }\n\n _ => {}\n\n }\n\n\n\n if let Some(fdr) = finder.as_mut() {\n\n if !rows_view.is_filter() {\n\n // scroll to first result once ready\n\n if !first_found_scrolled && fdr.count() > 0 {\n", "file_path": "src/main.rs", "rank": 62, "score": 3.912020546439222 }, { "content": " }\n\n\n\n fn get_total(&self) -> Option<usize> {\n\n if let Some(filter) = &self.filter {\n\n return Some(filter.total);\n\n } else if let Some(n) = self\n\n .reader\n\n .get_total_line_numbers()\n\n .or_else(|| self.reader.get_total_line_numbers_approx())\n\n {\n\n return Some(n);\n\n }\n\n None\n\n }\n\n\n\n fn increase_rows_from(&mut self, delta: u64) -> Result<()> {\n\n let new_rows_from = self.rows_from.saturating_add(delta);\n\n self.set_rows_from(new_rows_from)?;\n\n Ok(())\n\n }\n", "file_path": "src/view.rs", "rank": 63, "score": 3.8624179298642636 }, { "content": "\n\n // update rows and elapsed time if there are new results\n\n if show_stats {\n\n csv_table_state.debug_stats.rows_view_elapsed(rows_view.elapsed());\n\n if let Some(fdr) = &finder {\n\n csv_table_state.debug_stats.finder_elapsed(fdr.elapsed());\n\n }\n\n else {\n\n csv_table_state.debug_stats.finder_elapsed(None);\n\n }\n\n }\n\n\n\n // TODO: is this update too late?\n\n csv_table_state.set_rows_offset(rows_view.rows_from());\n\n csv_table_state.selected = rows_view.selected();\n\n\n\n if let Some(n) = rows_view.get_total_line_numbers() {\n\n csv_table_state.set_total_line_number(n);\n\n } else if let Some(n) = rows_view.get_total_line_numbers_approx() {\n\n csv_table_state.set_total_line_number(n);\n", "file_path": "src/main.rs", "rank": 64, "score": 3.759232356003884 }, { "content": " let bg_reader = config.new_reader().unwrap();\n\n let mut n = 0;\n\n let mut iter = bg_reader.into_records();\n\n loop {\n\n let next_pos = iter.reader().position().clone();\n\n if iter.next().is_none() {\n\n break;\n\n }\n\n // must not include headers position here (n > 0)\n\n if n > 0 && n % pos_table_update_every == 0 {\n\n let mut m = _m.lock().unwrap();\n\n (*m).pos_table.push(next_pos);\n\n }\n\n n += 1;\n\n }\n\n let mut m = _m.lock().unwrap();\n\n (*m).total_line_number = Some(n);\n\n (*m).done = true;\n\n });\n\n\n", "file_path": "src/csv.rs", "rank": 65, "score": 3.537143374081914 }, { "content": "\n\n fn decrease_rows_from(&mut self, delta: u64) -> Result<()> {\n\n let new_rows_from = self.rows_from.saturating_sub(delta);\n\n self.set_rows_from(new_rows_from)?;\n\n Ok(())\n\n }\n\n\n\n fn bottom_rows_from(&self) -> Option<u64> {\n\n // fix type conversion craziness\n\n if let Some(n) = self.get_total() {\n\n return Some(n.saturating_sub(self.num_rows as usize) as u64);\n\n }\n\n None\n\n }\n\n\n\n fn do_get_rows(&mut self) -> Result<()> {\n\n let start = Instant::now();\n\n let rows = if let Some(filter) = &self.filter {\n\n let indices = &filter.indices;\n\n self.reader.get_rows_for_indices(indices)?\n", "file_path": "src/view.rs", "rank": 66, "score": 3.3996069558106097 }, { "content": " style = style.add_modifier(Modifier::BOLD);\n\n }\n\n if is_selected {\n\n style = style\n\n .fg(Color::Rgb(255, 200, 0))\n\n .add_modifier(Modifier::BOLD);\n\n }\n\n match &state.finder_state {\n\n // TODO: seems like doing a bit too much of heavy lifting of\n\n // checking for matches (finder's work)\n\n FinderState::FinderActive(active) if active.target.is_match(hname) => {\n\n let mut highlight_style = style.fg(Color::Rgb(200, 0, 0));\n\n if let Some(hl) = &active.found_record {\n\n if let Some(row_index) = row_index {\n\n // TODO: vec::contains slow or does it even matter?\n\n if row_index == hl.row_index()\n\n && hl.column_indices().contains(&col_index)\n\n {\n\n highlight_style = highlight_style.bg(Color::LightYellow);\n\n }\n", "file_path": "src/ui.rs", "rank": 67, "score": 3.366098662145853 }, { "content": " let rows = r.get_rows(0, 50).unwrap();\n\n let expected = vec![\n\n Row::new(1, vec![\"c1\", \" v1\"]),\n\n Row::new(2, vec![\"c2\", \" v2\"]),\n\n ];\n\n assert_eq!(rows, expected);\n\n }\n\n\n\n #[test]\n\n fn test_small_delimiter() {\n\n let mut config = CsvConfig::new(\"tests/data/small.bsv\");\n\n config.delimiter = b'|';\n\n let config = Arc::new(config);\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n let rows = r.get_rows(0, 50).unwrap();\n\n let expected = vec![\n\n Row::new(1, vec![\"c1\", \" v1\"]),\n\n Row::new(2, vec![\"c2\", \" v2\"]),\n\n ];\n\n assert_eq!(rows, expected);\n", "file_path": "src/csv.rs", "rank": 68, "score": 3.3620912592061645 }, { "content": " pub fn decrease_selected(&mut self) {\n\n if let Some(i) = self.selected {\n\n self.set_selected(i.saturating_sub(1));\n\n }\n\n }\n\n\n\n pub fn select_top(&mut self) {\n\n self.set_selected(0);\n\n }\n\n\n\n pub fn select_bottom(&mut self) {\n\n self.set_selected((self.rows.len() as u64).saturating_sub(1))\n\n }\n\n\n\n pub fn selected(&self) -> Option<u64> {\n\n self.selected\n\n }\n\n\n\n pub fn selected_offset(&self) -> Option<u64> {\n\n self.selected.map(|x| x.saturating_add(self.rows_from))\n", "file_path": "src/view.rs", "rank": 69, "score": 3.315867618064057 }, { "content": " let goto_line = match &self.buffer_state {\n\n BufferState::Active(buf) => buf.parse::<usize>().ok(),\n\n _ => None,\n\n };\n\n let res = if let Some(n) = goto_line {\n\n Control::ScrollTo(n)\n\n } else {\n\n Control::BufferReset\n\n };\n\n self.reset_buffer();\n\n res\n\n }\n\n Key::Char('\\n') => {\n\n let control;\n\n if cur_buffer.is_empty() {\n\n control = Control::BufferReset;\n\n } else if self.mode == InputMode::Find {\n\n control = Control::Find(cur_buffer.to_string());\n\n } else if self.mode == InputMode::Filter {\n\n control = Control::Filter(cur_buffer.to_string());\n", "file_path": "src/input.rs", "rank": 71, "score": 3.237946850345385 }, { "content": " }\n\n\n\n pub fn headers(&self) -> &Vec<String> {\n\n &self.headers\n\n }\n\n\n\n pub fn rows(&self) -> &Vec<Row> {\n\n &self.rows\n\n }\n\n\n\n pub fn num_rows(&self) -> u64 {\n\n self.num_rows\n\n }\n\n\n\n pub fn set_num_rows(&mut self, num_rows: u64) -> Result<()> {\n\n if num_rows == self.num_rows {\n\n return Ok(());\n\n }\n\n self.num_rows = num_rows;\n\n self.do_get_rows()?;\n", "file_path": "src/view.rs", "rank": 72, "score": 3.2252883572474187 }, { "content": " }\n\n\n\n #[test]\n\n fn test_irregular() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/irregular.csv\"));\n\n let mut r = CsvLensReader::new(config).unwrap();\n\n let rows = r.get_rows(0, 50).unwrap();\n\n let expected = vec![Row::new(1, vec![\"c1\"]), Row::new(2, vec![\"c2\", \" v2\"])];\n\n assert_eq!(rows, expected);\n\n }\n\n}\n", "file_path": "src/csv.rs", "rank": 73, "score": 3.1238037882424203 }, { "content": "pub mod events;\n", "file_path": "src/util/mod.rs", "rank": 74, "score": 3.062694435091993 }, { "content": " vec![\n\n \"43\",\n\n \"37\",\n\n \"48\",\n\n \"N\",\n\n \"89\",\n\n \"46\",\n\n \"11\",\n\n \"W\",\n\n \"Wisconsin Dells\",\n\n \"WI\",\n\n ],\n\n ),\n\n ];\n\n assert_eq!(rows, expected);\n\n }\n\n\n\n #[test]\n\n fn test_simple_get_rows() {\n\n let config = Arc::new(CsvConfig::new(\"tests/data/simple.csv\"));\n", "file_path": "src/csv.rs", "rank": 75, "score": 3.045312783294971 }, { "content": "\n\n pub fn next(&self) -> Result<Event<Key>, mpsc::RecvError> {\n\n self.rx.recv()\n\n }\n\n\n\n pub fn disable_exit_key(&mut self) {\n\n self.ignore_exit_key.store(true, Ordering::Relaxed);\n\n }\n\n\n\n pub fn enable_exit_key(&mut self) {\n\n self.ignore_exit_key.store(false, Ordering::Relaxed);\n\n }\n\n}\n", "file_path": "src/util/events.rs", "rank": 76, "score": 3.027464970132462 }, { "content": "# csvlens\n\n\n\n`csvlens` is a CSV file viewer in the command line. It is similar to `less` but\n\nmade for CSV.\n\n\n\n![Demo](.github/demo.gif)\n\n\n\n## Usage\n\n\n\nRun `csvlens` by providing the CSV filename:\n\n\n\n```\n\ncsvlens <filename>\n\n```\n\n\n\nPipe CSV data directly to `csvlens`:\n\n\n\n```\n\n<your commands producing some csv data> | csvlens\n\n```\n\n\n\n### Supported interactions\n\n* Scroll: `hjkl`, `← ↓ ↑→ `, `Page Up`, `Page Down`\n\n* Jump to line `n`: `nG`\n\n* Search: `/<thing>`\n\n * Go to next result: `n`\n\n * Go to previous result: `N`\n\n* Filter: `&<thing>` (or `//<thing>`)\n\n\n\n### Optional parameters\n\n* `-d <delimiter>`: Custom delimiter to use when parsing the CSV\n\n (e.g. `csvlens file.csv -d \\t`)\n\n\n\n## Installation\n\n\n\n`csvlens` is available on [crates.io](https://crates.io/crates/csvlens), so you\n\ncan install it using:\n\n```\n\ncargo install csvlens\n\n```\n\n\n\nOr, build and install from source:\n\n```\n\ncargo install --path $(pwd)\n\n```\n\n\n\n`csvlens` is also availble on pkgsrc. If you're using NetBSD you can install it using:\n\n```\n\npkgin install csvlens\n\n```\n", "file_path": "README.md", "rank": 77, "score": 2.635394139010758 }, { "content": " pub fn new() -> InputHandler {\n\n InputHandler {\n\n events: Events::new(),\n\n mode: InputMode::Default,\n\n buffer_state: BufferState::Inactive,\n\n }\n\n }\n\n\n\n pub fn next(&mut self) -> Control {\n\n if let Event::Input(key) = self.events.next().unwrap() {\n\n if self.is_input_buffering() {\n\n return self.handler_buffering(key);\n\n } else {\n\n return self.handler_default(key);\n\n }\n\n }\n\n // tick event, no need to distinguish it for now\n\n Control::Nothing\n\n }\n\n\n", "file_path": "src/input.rs", "rank": 78, "score": 2.471466843565027 }, { "content": " }\n\n }\n\n // highlight parts that match\n\n let mut matches = active.target.find_iter(hname);\n\n let non_matches = active.target.split(hname);\n\n let mut spans = vec![];\n\n for part in non_matches {\n\n let span = Span::styled(part, style);\n\n let cur_match = if let Some(m) = matches.next() {\n\n m.as_str()\n\n } else {\n\n \"\"\n\n };\n\n let p_span = Span::styled(cur_match, highlight_style);\n\n spans.push(span);\n\n spans.push(p_span.clone());\n\n }\n\n spans.pop();\n\n self.set_spans(buf, &spans, x_offset_header, y, effective_width);\n\n }\n", "file_path": "src/ui.rs", "rank": 79, "score": 2.433431978497908 }, { "content": " // TODO: make constant?\n\n let suffix = \"…\";\n\n let suffix_len = suffix.chars().count();\n\n\n\n // Reserve some space before the next column (same number used in get_column_widths)\n\n let mut remaining_width = width.saturating_sub(4);\n\n\n\n // Pack as many spans as possible until hitting width limit\n\n let mut cur_spans = vec![];\n\n for span in spans {\n\n if span.content.len() <= remaining_width.into() {\n\n cur_spans.push(span.clone());\n\n remaining_width = remaining_width.saturating_sub(span.content.len() as u16);\n\n } else {\n\n let truncated_content =\n\n &span.content[..remaining_width.saturating_sub(suffix_len as u16) as usize];\n\n let truncated_span = Span::styled(truncated_content, span.style);\n\n cur_spans.push(truncated_span);\n\n cur_spans.push(Span::raw(suffix));\n\n // TODO: handle breaking into multiple lines, for now don't care about remaining_width\n", "file_path": "src/ui.rs", "rank": 80, "score": 2.370779006654508 }, { "content": " return;\n\n }\n\n }\n\n })\n\n };\n\n let tick_handle = {\n\n thread::spawn(move || loop {\n\n if tx.send(Event::Tick).is_err() {\n\n break;\n\n }\n\n thread::sleep(config.tick_rate);\n\n })\n\n };\n\n Events {\n\n rx,\n\n ignore_exit_key,\n\n input_handle,\n\n tick_handle,\n\n }\n\n }\n", "file_path": "src/util/events.rs", "rank": 81, "score": 2.2740073774824583 }, { "content": " let rows_from = total.saturating_sub(self.num_rows as usize) as u64;\n\n self.set_rows_from(rows_from)?;\n\n }\n\n if self.selected.is_some() {\n\n self.select_bottom()\n\n }\n\n }\n\n Control::ScrollTo(n) => {\n\n let mut rows_from = n.saturating_sub(1) as u64;\n\n if let Some(n) = self.bottom_rows_from() {\n\n rows_from = min(rows_from, n);\n\n }\n\n self.set_rows_from(rows_from)?;\n\n if self.selected.is_some() {\n\n self.select_top()\n\n }\n\n }\n\n _ => {}\n\n }\n\n Ok(())\n", "file_path": "src/view.rs", "rank": 82, "score": 2.0445282124104 }, { "content": " }\n\n\n\n pub fn handle_control(&mut self, control: &Control) -> Result<()> {\n\n match control {\n\n Control::ScrollDown => {\n\n if let Some(i) = self.selected {\n\n if i == self.num_rows - 1 {\n\n self.increase_rows_from(1)?;\n\n } else {\n\n self.increase_selected();\n\n }\n\n } else {\n\n self.increase_rows_from(1)?;\n\n }\n\n }\n\n Control::ScrollPageDown => {\n\n self.increase_rows_from(self.num_rows)?;\n\n if self.selected.is_some() {\n\n self.select_top()\n\n }\n", "file_path": "src/view.rs", "rank": 83, "score": 2.0309277170812745 }, { "content": " } else {\n\n \"?\".to_owned()\n\n };\n\n let current_row = if let Some(i) = state.selected {\n\n self.rows.get(i as usize)\n\n } else {\n\n self.rows.first()\n\n };\n\n let row_num = match current_row {\n\n Some(row) => row.record_num.to_string(),\n\n _ => \"-\".to_owned(),\n\n };\n\n content += format!(\n\n \" [Row {}/{}, Col {}/{}]\",\n\n row_num,\n\n total_str,\n\n state.cols_offset + 1,\n\n state.total_cols,\n\n )\n\n .as_str();\n", "file_path": "src/ui.rs", "rank": 84, "score": 1.9733289553297197 }, { "content": " }\n\n }\n\n\n\n let status_area = Rect::new(\n\n area.x,\n\n area.bottom().saturating_sub(status_height),\n\n area.width,\n\n status_height,\n\n );\n\n self.render_status(status_area, buf, state);\n\n\n\n self.render_other_borders(buf, rows_area, state);\n\n }\n\n}\n\n\n\npub enum BufferState {\n\n Disabled,\n\n Enabled(InputMode, String),\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 85, "score": 1.9116672811527051 }, { "content": " .saturating_sub(status_height),\n\n );\n\n\n\n let row_num_section_width = self.render_row_numbers(buf, state, rows_area, self.rows);\n\n\n\n self.render_row(\n\n buf,\n\n state,\n\n &column_widths,\n\n rows_area,\n\n row_num_section_width,\n\n y_header,\n\n true,\n\n &self.header,\n\n None,\n\n false,\n\n );\n\n\n\n let mut y_offset = y_first_record;\n\n for (i, row) in self.rows.iter().enumerate() {\n", "file_path": "src/ui.rs", "rank": 86, "score": 1.870211625610414 }, { "content": " if record_num - 1 == wanted_index {\n\n let string_record = r?;\n\n let mut fields = Vec::new();\n\n for field in string_record.iter() {\n\n fields.push(String::from(field));\n\n }\n\n let row = Row {\n\n record_num: record_num as usize,\n\n fields,\n\n };\n\n res.push(row);\n\n next_wanted = indices_iter.next();\n\n }\n\n // stop parsing if done scanning whole block between marked positions\n\n if let Some(pos) = next_pos {\n\n if record_num >= pos.record() {\n\n break;\n\n }\n\n }\n\n } else {\n", "file_path": "src/csv.rs", "rank": 87, "score": 1.6974677293784564 }, { "content": " }\n\n\n\n // note that records() excludes header by default, but here the first entry is header\n\n // because of the seek() above.\n\n let mut records = self.reader.records();\n\n\n\n // parse records and collect those that are wanted\n\n loop {\n\n // exit early if all found. This should be common in case of consecutive indices\n\n if next_wanted.is_none() {\n\n break;\n\n }\n\n let wanted_index = *next_wanted.unwrap();\n\n let record_num = records.reader().position().record();\n\n if let Some(r) = records.next() {\n\n stats.log_parsed_record();\n\n // no effective pre-seeking happened, this is still the header\n\n if record_num == 0 {\n\n continue;\n\n }\n", "file_path": "src/csv.rs", "rank": 88, "score": 1.621198102814554 }, { "content": " type State = CsvTableState;\n\n\n\n fn render(self, area: Rect, buf: &mut Buffer, state: &mut Self::State) {\n\n // TODO: draw relative to the provided area\n\n\n\n if area.area() == 0 {\n\n return;\n\n }\n\n\n\n let status_height = 2;\n\n let column_widths = self.get_column_widths(area.width);\n\n let (y_header, y_first_record) = self.render_header_borders(buf, area);\n\n\n\n // row area: including row numbers and row content\n\n let rows_area = Rect::new(\n\n area.x,\n\n y_first_record,\n\n area.width,\n\n area.height\n\n .saturating_sub(y_first_record)\n", "file_path": "src/ui.rs", "rank": 89, "score": 1.6130071184197763 }, { "content": " let mut indices_iter = indices.iter();\n\n\n\n let mut res = Vec::new();\n\n\n\n let mut next_pos = pos_iter.next();\n\n let mut next_wanted = indices_iter.next();\n\n loop {\n\n if next_wanted.is_none() {\n\n break;\n\n }\n\n // seek as close to the next wanted record index as possible\n\n let index = *next_wanted.unwrap();\n\n while let Some(pos) = next_pos {\n\n if pos.record() - 1 <= index {\n\n self.reader.seek(pos.clone())?;\n\n stats.log_seek();\n\n } else {\n\n break;\n\n }\n\n next_pos = pos_iter.next();\n", "file_path": "src/csv.rs", "rank": 90, "score": 1.605924925870279 }, { "content": "\n\n // clear error message without changing other states on any action\n\n if !matches!(control, Control::Nothing) {\n\n user_error = None;\n\n }\n\n\n\n rows_view.handle_control(&control)?;\n\n\n\n match &control {\n\n Control::Quit => {\n\n break;\n\n }\n\n Control::ScrollTo(_) => {\n\n csv_table_state.reset_buffer();\n\n }\n\n Control::ScrollLeft => {\n\n let new_cols_offset = csv_table_state.cols_offset.saturating_sub(1);\n\n csv_table_state.set_cols_offset(new_cols_offset);\n\n }\n\n Control::ScrollRight => {\n", "file_path": "src/main.rs", "rank": 91, "score": 1.605924925870279 }, { "content": " inner_file_res = Some(inner_file);\n\n } else {\n\n inner_file_res = None;\n\n }\n\n } else {\n\n // Handle input from stdin\n\n let mut stdin = std::io::stdin();\n\n let mut buffer: Vec<u8> = vec![];\n\n stdin.read_to_end(&mut buffer)?;\n\n inner_file.write_all(&buffer)?;\n\n inner_file_res = Some(inner_file);\n\n }\n\n\n\n Ok(SeekableFile {\n\n filename: maybe_filename.clone(),\n\n inner_file: inner_file_res,\n\n })\n\n }\n\n\n\n fn filename(&self) -> &str {\n\n if let Some(f) = &self.inner_file {\n\n f.path().to_str().unwrap()\n\n } else {\n\n // If data is from stdin, then inner_file must be there\n\n self.filename.as_ref().unwrap()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 92, "score": 1.5065718783543747 }, { "content": " // quick line count\n\n let total_line_number_approx;\n\n {\n\n let file = File::open(config.filename()).unwrap();\n\n let buf_reader = BufReader::new(file);\n\n // subtract 1 for headers\n\n total_line_number_approx = buf_reader.lines().count().saturating_sub(1);\n\n\n\n let mut m = _m.lock().unwrap();\n\n (*m).total_line_number_approx = Some(total_line_number_approx);\n\n }\n\n\n\n let pos_table_num_entries = 10000;\n\n let minimum_interval = 100; // handle small csv (don't keep pos every line)\n\n let pos_table_update_every = max(\n\n minimum_interval,\n\n total_line_number_approx / pos_table_num_entries,\n\n );\n\n\n\n // full csv parsing\n", "file_path": "src/csv.rs", "rank": 93, "score": 1.406113208234757 }, { "content": "\n\n fn render_other_borders(&self, buf: &mut Buffer, area: Rect, state: &CsvTableState) {\n\n // TODO: maybe should be combined with render_header_borders() above\n\n // Render vertical separator\n\n if state.borders_state.is_none() {\n\n return;\n\n }\n\n\n\n let borders_state = state.borders_state.as_ref().unwrap();\n\n let y_first_record = borders_state.y_first_record;\n\n let section_width = borders_state.x_row_separator;\n\n\n\n let line_number_block = Block::default()\n\n .borders(Borders::RIGHT)\n\n .border_style(Style::default().fg(Color::Rgb(64, 64, 64)));\n\n let line_number_area = Rect::new(0, y_first_record, section_width, area.height);\n\n line_number_block.render(line_number_area, buf);\n\n\n\n // Intersection with header separator\n\n buf.get_mut(section_width - 1, y_first_record - 1)\n", "file_path": "src/ui.rs", "rank": 94, "score": 1.2429292842547879 } ]
Rust
arrow/src/buffer/ops.rs
zhaox1n/arrow-rs
d2cec2ccef6adf92754883bd58a7fdd4858c02a9
#[cfg(feature = "simd")] use crate::util::bit_util; #[cfg(feature = "simd")] use packed_simd::u8x64; #[cfg(feature = "avx512")] use crate::arch::avx512::*; use crate::util::bit_util::ceil; #[cfg(any(feature = "simd", feature = "avx512"))] use std::borrow::BorrowMut; use super::{Buffer, MutableBuffer}; #[cfg(feature = "simd")] pub fn bitwise_bin_op_simd_helper<F_SIMD, F_SCALAR>( left: &Buffer, left_offset: usize, right: &Buffer, right_offset: usize, len: usize, simd_op: F_SIMD, scalar_op: F_SCALAR, ) -> Buffer where F_SIMD: Fn(u8x64, u8x64) -> u8x64, F_SCALAR: Fn(u8, u8) -> u8, { let mut result = MutableBuffer::new(len).with_bitset(len, false); let lanes = u8x64::lanes(); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(lanes); let mut right_chunks = right.as_slice()[right_offset..].chunks_exact(lanes); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(lanes); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| { unsafe { bit_util::bitwise_bin_op_simd(&left, &right, res, &simd_op) }; }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = scalar_op(*left, *right); }); result.into() } #[cfg(feature = "simd")] pub fn bitwise_unary_op_simd_helper<F_SIMD, F_SCALAR>( left: &Buffer, left_offset: usize, len: usize, simd_op: F_SIMD, scalar_op: F_SCALAR, ) -> Buffer where F_SIMD: Fn(u8x64) -> u8x64, F_SCALAR: Fn(u8) -> u8, { let mut result = MutableBuffer::new(len).with_bitset(len, false); let lanes = u8x64::lanes(); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(lanes); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(lanes); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut()) .for_each(|(res, left)| unsafe { let data_simd = u8x64::from_slice_unaligned_unchecked(left); let simd_result = simd_op(data_simd); simd_result.write_to_slice_unaligned_unchecked(res); }); result_chunks .into_remainder() .iter_mut() .zip(left_chunks.remainder().iter()) .for_each(|(res, left)| { *res = scalar_op(*left); }); result.into() } pub fn bitwise_bin_op_helper<F>( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, op: F, ) -> Buffer where F: Fn(u64, u64) -> u64, { let left_chunks = left.bit_chunks(left_offset_in_bits, len_in_bits); let right_chunks = right.bit_chunks(right_offset_in_bits, len_in_bits); let chunks = left_chunks .iter() .zip(right_chunks.iter()) .map(|(left, right)| op(left, right)); let mut buffer = unsafe { MutableBuffer::from_trusted_len_iter(chunks) }; let remainder_bytes = ceil(left_chunks.remainder_len(), 8); let rem = op(left_chunks.remainder_bits(), right_chunks.remainder_bits()); let rem = &rem.to_le_bytes()[0..remainder_bytes]; buffer.extend_from_slice(rem); buffer.into() } pub fn bitwise_unary_op_helper<F>( left: &Buffer, offset_in_bits: usize, len_in_bits: usize, op: F, ) -> Buffer where F: Fn(u64) -> u64, { let mut result = MutableBuffer::new(ceil(len_in_bits, 8)).with_bitset(len_in_bits / 64 * 8, false); let left_chunks = left.bit_chunks(offset_in_bits, len_in_bits); let result_chunks = result.typed_data_mut::<u64>().iter_mut(); result_chunks .zip(left_chunks.iter()) .for_each(|(res, left)| { *res = op(left); }); let remainder_bytes = ceil(left_chunks.remainder_len(), 8); let rem = op(left_chunks.remainder_bits()); let rem = &rem.to_le_bytes()[0..remainder_bytes]; result.extend_from_slice(rem); result.into() } #[cfg(all(target_arch = "x86_64", feature = "avx512"))] pub fn buffer_bin_and( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { let len = len_in_bits / 8; let left_offset = left_offset_in_bits / 8; let right_offset = right_offset_in_bits / 8; let mut result = MutableBuffer::new(len).with_bitset(len, false); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(AVX512_U8X64_LANES); let mut right_chunks = right.as_slice()[right_offset..].chunks_exact(AVX512_U8X64_LANES); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(AVX512_U8X64_LANES); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| unsafe { avx512_bin_and(left, right, res); }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = *left & *right; }); result.into() } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a & b, ) } } #[cfg(all(feature = "simd", not(feature = "avx512")))] pub fn buffer_bin_and( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { bitwise_bin_op_simd_helper( &left, left_offset_in_bits / 8, &right, right_offset_in_bits / 8, len_in_bits / 8, |a, b| a & b, |a, b| a & b, ) } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a & b, ) } } #[cfg(all(not(any(feature = "simd", feature = "avx512"))))] pub fn buffer_bin_and( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { bitwise_bin_op_helper( left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a & b, ) } #[cfg(all(target_arch = "x86_64", feature = "avx512"))] pub fn buffer_bin_or( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { let len = len_in_bits / 8; let left_offset = left_offset_in_bits / 8; let right_offset = right_offset_in_bits / 8; let mut result = MutableBuffer::new(len).with_bitset(len, false); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(AVX512_U8X64_LANES); let mut right_chunks = right.as_slice()[right_offset..].chunks_exact(AVX512_U8X64_LANES); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(AVX512_U8X64_LANES); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| unsafe { avx512_bin_or(left, right, res); }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = *left | *right; }); result.into() } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a | b, ) } } #[cfg(all(feature = "simd", not(feature = "avx512")))] pub fn buffer_bin_or( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { bitwise_bin_op_simd_helper( &left, left_offset_in_bits / 8, &right, right_offset_in_bits / 8, len_in_bits / 8, |a, b| a | b, |a, b| a | b, ) } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a | b, ) } } #[cfg(all(not(any(feature = "simd", feature = "avx512"))))] pub fn buffer_bin_or( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { bitwise_bin_op_helper( left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a | b, ) } pub fn buffer_unary_not( left: &Buffer, offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { #[cfg(feature = "simd")] if offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { return bitwise_unary_op_simd_helper( &left, offset_in_bits / 8, len_in_bits / 8, |a| !a, |a| !a, ); } #[allow(unreachable_code)] { bitwise_unary_op_helper(left, offset_in_bits, len_in_bits, |a| !a) } }
#[cfg(feature = "simd")] use crate::util::bit_util; #[cfg(feature = "simd")] use packed_simd::u8x64; #[cfg(feature = "avx512")] use crate::arch::avx512::*; use crate::util::bit_util::ceil; #[cfg(any(feature = "simd", feature = "avx512"))] use std::borrow::BorrowMut; use super::{Buffer, MutableBuffer}; #[cfg(feature = "simd")] pub fn bitwise_bin_op_simd_helper<F_SIMD, F_SCALAR>( left: &Buffer, left_offset: usize, right: &Buffer, right_offset: usize, len: usize, simd_op: F_SIMD, scalar_op: F_SCALAR, ) -> Buffer where F_SIMD: Fn(u8x64, u8x64) -> u8x64, F_SCALAR: Fn(u8, u8) -> u8, { let mut result = MutableBuffer::new(len).with_bitset(len, false); let lanes = u8x64::lanes(); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(lanes); let mut right_chunks = right.as_slice()[right_offset..].chunks_exact(lanes); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(lanes); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| { unsafe { bit_util::bitwise_bin_op_simd(&left, &right, res, &simd_op) }; }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = scalar_op(*left, *right); }); result.into() } #[cfg(feature = "simd")] pub fn bitwise_unary_op_simd_helper<F_SIMD, F_SCALAR>( left: &Buffer, left_offset: usize, len: usize, simd_op: F_SIMD, scalar_op: F_SCALAR, ) -> Buffer where F_SIMD: Fn(u8x64) -> u8x64, F_SCALAR: Fn(u8) -> u8, { let mut result = MutableBuffer::new(len).with_bitset(len, false); let lanes = u8x64::lanes(); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(lanes); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(lanes); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut()) .for_each(|(res, left)| unsafe { let data_simd = u8x64::from_slice_unaligned_unchecked(left); let simd_result = simd_op(data_simd); simd_result.write_to_slice_unaligned_unchecked(res); }); result_chunks .into_remainder() .iter_mut() .zip(left_chunks.remainder().iter()) .for_each(|(res, left)| { *res = scalar_op(*left); }); result.into() } pub fn bitwise_bin_op_helper<F>( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, op: F, ) -> Buffer where F: Fn(u64, u64) -> u64, { let left_chunks = left.bit_chunks(left_offset_in_bits, len_in_bits); let right_chunks = right.bit_chunks(right_offset_in_bits, len_in_bits); let chunks = left_chunks .iter() .zip(right_chunks.iter()) .map(|(left, right)| op(left, right)); let mut buffer = unsafe { MutableBuffer::from_trusted_len_iter(chunks) }; let remainder_bytes = ceil(left_chunks.remainder_len(), 8); let rem = op(left_chunks.remainder_bits(), right_chunks.remainder_bits()); let rem = &rem.to_le_bytes()[0..remainder_bytes]; buffer.extend_from_slice(rem); buffer.into() }
#[cfg(all(target_arch = "x86_64", feature = "avx512"))] pub fn buffer_bin_and( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { let len = len_in_bits / 8; let left_offset = left_offset_in_bits / 8; let right_offset = right_offset_in_bits / 8; let mut result = MutableBuffer::new(len).with_bitset(len, false); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(AVX512_U8X64_LANES); let mut right_chunks = right.as_slice()[right_offset..].chunks_exact(AVX512_U8X64_LANES); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(AVX512_U8X64_LANES); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| unsafe { avx512_bin_and(left, right, res); }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = *left & *right; }); result.into() } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a & b, ) } } #[cfg(all(feature = "simd", not(feature = "avx512")))] pub fn buffer_bin_and( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { bitwise_bin_op_simd_helper( &left, left_offset_in_bits / 8, &right, right_offset_in_bits / 8, len_in_bits / 8, |a, b| a & b, |a, b| a & b, ) } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a & b, ) } } #[cfg(all(not(any(feature = "simd", feature = "avx512"))))] pub fn buffer_bin_and( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { bitwise_bin_op_helper( left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a & b, ) } #[cfg(all(target_arch = "x86_64", feature = "avx512"))] pub fn buffer_bin_or( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { let len = len_in_bits / 8; let left_offset = left_offset_in_bits / 8; let right_offset = right_offset_in_bits / 8; let mut result = MutableBuffer::new(len).with_bitset(len, false); let mut left_chunks = left.as_slice()[left_offset..].chunks_exact(AVX512_U8X64_LANES); let mut right_chunks = right.as_slice()[right_offset..].chunks_exact(AVX512_U8X64_LANES); let mut result_chunks = result.as_slice_mut().chunks_exact_mut(AVX512_U8X64_LANES); result_chunks .borrow_mut() .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut())) .for_each(|(res, (left, right))| unsafe { avx512_bin_or(left, right, res); }); result_chunks .into_remainder() .iter_mut() .zip( left_chunks .remainder() .iter() .zip(right_chunks.remainder().iter()), ) .for_each(|(res, (left, right))| { *res = *left | *right; }); result.into() } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a | b, ) } } #[cfg(all(feature = "simd", not(feature = "avx512")))] pub fn buffer_bin_or( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { if left_offset_in_bits % 8 == 0 && right_offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { bitwise_bin_op_simd_helper( &left, left_offset_in_bits / 8, &right, right_offset_in_bits / 8, len_in_bits / 8, |a, b| a | b, |a, b| a | b, ) } else { bitwise_bin_op_helper( &left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a | b, ) } } #[cfg(all(not(any(feature = "simd", feature = "avx512"))))] pub fn buffer_bin_or( left: &Buffer, left_offset_in_bits: usize, right: &Buffer, right_offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { bitwise_bin_op_helper( left, left_offset_in_bits, right, right_offset_in_bits, len_in_bits, |a, b| a | b, ) } pub fn buffer_unary_not( left: &Buffer, offset_in_bits: usize, len_in_bits: usize, ) -> Buffer { #[cfg(feature = "simd")] if offset_in_bits % 8 == 0 && len_in_bits % 8 == 0 { return bitwise_unary_op_simd_helper( &left, offset_in_bits / 8, len_in_bits / 8, |a| !a, |a| !a, ); } #[allow(unreachable_code)] { bitwise_unary_op_helper(left, offset_in_bits, len_in_bits, |a| !a) } }
pub fn bitwise_unary_op_helper<F>( left: &Buffer, offset_in_bits: usize, len_in_bits: usize, op: F, ) -> Buffer where F: Fn(u64) -> u64, { let mut result = MutableBuffer::new(ceil(len_in_bits, 8)).with_bitset(len_in_bits / 64 * 8, false); let left_chunks = left.bit_chunks(offset_in_bits, len_in_bits); let result_chunks = result.typed_data_mut::<u64>().iter_mut(); result_chunks .zip(left_chunks.iter()) .for_each(|(res, left)| { *res = op(left); }); let remainder_bytes = ceil(left_chunks.remainder_len(), 8); let rem = op(left_chunks.remainder_bits()); let rem = &rem.to_le_bytes()[0..remainder_bytes]; result.extend_from_slice(rem); result.into() }
function_block-full_function
[ { "content": "fn bench_buffer_and(left: &Buffer, right: &Buffer) {\n\n criterion::black_box((left & right).unwrap());\n\n}\n\n\n", "file_path": "arrow/benches/buffer_bit_ops.rs", "rank": 2, "score": 380009.3854061781 }, { "content": "fn bench_buffer_or(left: &Buffer, right: &Buffer) {\n\n criterion::black_box((left | right).unwrap());\n\n}\n\n\n", "file_path": "arrow/benches/buffer_bit_ops.rs", "rank": 3, "score": 380009.3854061781 }, { "content": "/// Performs `OR` operation on two arrays. If either left or right value is null then the\n\n/// result is also null.\n\n/// # Error\n\n/// This function errors when the arrays have different lengths.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::error::Result;\n\n/// use arrow::compute::kernels::boolean::or;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let b = BooleanArray::from(vec![Some(true), Some(true), Some(false)]);\n\n/// let or_ab = or(&a, &b)?;\n\n/// assert_eq!(or_ab, BooleanArray::from(vec![Some(true), Some(true), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn or(left: &BooleanArray, right: &BooleanArray) -> Result<BooleanArray> {\n\n binary_boolean_kernel(left, right, buffer_bin_or)\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 6, "score": 327703.94838828593 }, { "content": "/// Performs `AND` operation on two arrays. If either left or right value is null then the\n\n/// result is also null.\n\n/// # Error\n\n/// This function errors when the arrays have different lengths.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::error::Result;\n\n/// use arrow::compute::kernels::boolean::and;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let b = BooleanArray::from(vec![Some(true), Some(true), Some(false)]);\n\n/// let and_ab = and(&a, &b)?;\n\n/// assert_eq!(and_ab, BooleanArray::from(vec![Some(false), Some(true), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn and(left: &BooleanArray, right: &BooleanArray) -> Result<BooleanArray> {\n\n binary_boolean_kernel(left, right, buffer_bin_and)\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 7, "score": 327703.94838828593 }, { "content": "/// It's unstable_sort, may not preserve the order of equal elements\n\npub fn partial_sort<T, F>(v: &mut [T], limit: usize, mut is_less: F)\n\nwhere\n\n F: FnMut(&T, &T) -> Ordering,\n\n{\n\n let (before, _mid, _after) = v.select_nth_unstable_by(limit, &mut is_less);\n\n before.sort_unstable_by(is_less);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/sort.rs", "rank": 8, "score": 324581.27247513307 }, { "content": "/// Logical 'or' boolean values with Kleene logic\n\n///\n\n/// # Behavior\n\n///\n\n/// This function behaves as follows with nulls:\n\n///\n\n/// * `true` or `null` = `true`\n\n/// * `null` or `true` = `true`\n\n/// * `false` or `null` = `null`\n\n/// * `null` or `false` = `null`\n\n/// * `null` or `null` = `null`\n\n///\n\n/// In other words, in this context a null value really means \\\"unknown\\\",\n\n/// and an unknown value 'or' true is always true.\n\n/// For a different null behavior, see function \\\"or\\\".\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::error::Result;\n\n/// use arrow::compute::kernels::boolean::or_kleene;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(true), Some(false), None]);\n\n/// let b = BooleanArray::from(vec![None, None, None]);\n\n/// let or_ab = or_kleene(&a, &b)?;\n\n/// assert_eq!(or_ab, BooleanArray::from(vec![Some(true), None, None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// # Fails\n\n///\n\n/// If the operands have different lengths\n\npub fn or_kleene(left: &BooleanArray, right: &BooleanArray) -> Result<BooleanArray> {\n\n if left.null_count().is_zero() && right.null_count().is_zero() {\n\n return or(left, right);\n\n }\n\n\n\n let op = |left_true, left_false, right_true, right_false| {\n\n (\n\n left_true | right_true,\n\n left_true | right_true | (left_false & right_false),\n\n )\n\n };\n\n\n\n binary_boolean_kleene_kernel(left, right, op)\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 9, "score": 322959.9495819714 }, { "content": "/// Logical 'and' boolean values with Kleene logic\n\n///\n\n/// # Behavior\n\n///\n\n/// This function behaves as follows with nulls:\n\n///\n\n/// * `true` and `null` = `null`\n\n/// * `null` and `true` = `null`\n\n/// * `false` and `null` = `false`\n\n/// * `null` and `false` = `false`\n\n/// * `null` and `null` = `null`\n\n///\n\n/// In other words, in this context a null value really means \\\"unknown\\\",\n\n/// and an unknown value 'and' false is always false.\n\n/// For a different null behavior, see function \\\"and\\\".\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::error::Result;\n\n/// use arrow::compute::kernels::boolean::and_kleene;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(true), Some(false), None]);\n\n/// let b = BooleanArray::from(vec![None, None, None]);\n\n/// let and_ab = and_kleene(&a, &b)?;\n\n/// assert_eq!(and_ab, BooleanArray::from(vec![None, Some(false), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// # Fails\n\n///\n\n/// If the operands have different lengths\n\npub fn and_kleene(left: &BooleanArray, right: &BooleanArray) -> Result<BooleanArray> {\n\n if left.null_count().is_zero() && right.null_count().is_zero() {\n\n return and(left, right);\n\n }\n\n\n\n let op = |left_true, left_false, right_true, right_false| {\n\n (\n\n left_true & right_true,\n\n left_false | right_false | (left_true & right_true),\n\n )\n\n };\n\n\n\n binary_boolean_kleene_kernel(left, right, op)\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 10, "score": 322959.9495819714 }, { "content": "fn write_body_buffers<W: Write>(mut writer: W, data: &[u8]) -> Result<usize> {\n\n let len = data.len() as u32;\n\n let pad_len = pad_to_8(len) as u32;\n\n let total_len = len + pad_len;\n\n\n\n // write body buffer\n\n writer.write_all(data)?;\n\n if pad_len > 0 {\n\n writer.write_all(&vec![0u8; pad_len as usize][..])?;\n\n }\n\n\n\n writer.flush()?;\n\n Ok(total_len as usize)\n\n}\n\n\n", "file_path": "arrow/src/ipc/writer.rs", "rank": 11, "score": 322717.89124797087 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "arrow/src/util/bit_util.rs", "rank": 12, "score": 320182.5772178295 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] &= UNSET_BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) &= UNSET_BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "arrow/src/util/bit_util.rs", "rank": 13, "score": 320182.5772178295 }, { "content": "#[inline]\n\npub fn set_array_bit(bits: &mut [u8], i: usize) {\n\n bits[i / 8] |= 1 << (i % 8);\n\n}\n\n\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 14, "score": 314639.6248796647 }, { "content": "#[inline]\n\npub fn unset_array_bit(bits: &mut [u8], i: usize) {\n\n bits[i / 8] &= !(1 << (i % 8));\n\n}\n\n\n\n/// Returns the minimum number of bits needed to represent the value 'x'\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 15, "score": 314639.6248796647 }, { "content": "/// Evaluate `op(left, right)` for [`PrimitiveArray`]s using a specified\n\n/// comparison function.\n\npub fn no_simd_compare_op<T, F>(\n\n left: &PrimitiveArray<T>,\n\n right: &PrimitiveArray<T>,\n\n op: F,\n\n) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n F: Fn(T::Native, T::Native) -> bool,\n\n{\n\n compare_op_primitive!(left, right, op)\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 16, "score": 311353.3184801221 }, { "content": "/// returns a comparison function that compares two values at two different positions\n\n/// between the two arrays.\n\n/// The arrays' types must be equal.\n\n/// # Example\n\n/// ```\n\n/// use arrow::array::{build_compare, Int32Array};\n\n///\n\n/// # fn main() -> arrow::error::Result<()> {\n\n/// let array1 = Int32Array::from(vec![1, 2]);\n\n/// let array2 = Int32Array::from(vec![3, 4]);\n\n///\n\n/// let cmp = build_compare(&array1, &array2)?;\n\n///\n\n/// // 1 (index 0 of array1) is smaller than 4 (index 1 of array2)\n\n/// assert_eq!(std::cmp::Ordering::Less, (cmp)(0, 1));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n// This is a factory of comparisons.\n\n// The lifetime 'a enforces that we cannot use the closure beyond any of the array's lifetime.\n\npub fn build_compare(left: &dyn Array, right: &dyn Array) -> Result<DynComparator> {\n\n use DataType::*;\n\n use IntervalUnit::*;\n\n use TimeUnit::*;\n\n Ok(match (left.data_type(), right.data_type()) {\n\n (a, b) if a != b => {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"Can't compare arrays of different types\".to_string(),\n\n ));\n\n }\n\n (Boolean, Boolean) => compare_boolean(left, right),\n\n (UInt8, UInt8) => compare_primitives::<UInt8Type>(left, right),\n\n (UInt16, UInt16) => compare_primitives::<UInt16Type>(left, right),\n\n (UInt32, UInt32) => compare_primitives::<UInt32Type>(left, right),\n\n (UInt64, UInt64) => compare_primitives::<UInt64Type>(left, right),\n\n (Int8, Int8) => compare_primitives::<Int8Type>(left, right),\n\n (Int16, Int16) => compare_primitives::<Int16Type>(left, right),\n\n (Int32, Int32) => compare_primitives::<Int32Type>(left, right),\n\n (Int64, Int64) => compare_primitives::<Int64Type>(left, right),\n\n (Float32, Float32) => compare_float::<Float32Type>(left, right),\n", "file_path": "arrow/src/array/ord.rs", "rank": 17, "score": 310575.478461684 }, { "content": "/// Limits the output of value to limit...\n\nfn limited_fmt(f: &mut fmt::Formatter<'_>, value: &[u8], limit: usize) -> fmt::Result {\n\n if value.len() > limit {\n\n write!(f, \"{:?}\", &value[..limit])\n\n } else {\n\n write!(f, \"{:?}\", &value)\n\n }\n\n}\n\n\n\nimpl fmt::Display for FlightData {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"FlightData {{\")?;\n\n write!(f, \" descriptor: \")?;\n\n match &self.flight_descriptor {\n\n Some(d) => write!(f, \"{}\", d)?,\n\n None => write!(f, \"None\")?,\n\n };\n\n write!(f, \", header: \")?;\n\n limited_fmt(f, &self.data_header, 8)?;\n\n write!(f, \", metadata: \")?;\n\n limited_fmt(f, &self.app_metadata, 8)?;\n", "file_path": "arrow-flight/src/lib.rs", "rank": 18, "score": 306478.46453672525 }, { "content": "/// Evaluate `op(left, right)` for [`PrimitiveArray`] and scalar using\n\n/// a specified comparison function.\n\npub fn no_simd_compare_op_scalar<T, F>(\n\n left: &PrimitiveArray<T>,\n\n right: T::Native,\n\n op: F,\n\n) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n F: Fn(T::Native, T::Native) -> bool,\n\n{\n\n compare_op_scalar_primitive!(left, right, op)\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 19, "score": 305491.7905681997 }, { "content": "#[inline]\n\npub fn memcpy(source: &[u8], target: &mut [u8]) {\n\n assert!(target.len() >= source.len());\n\n target[..source.len()].copy_from_slice(source)\n\n}\n\n\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 20, "score": 305416.48529539374 }, { "content": "#[inline]\n\npub fn log2(mut x: u64) -> i32 {\n\n if x == 1 {\n\n return 0;\n\n }\n\n x -= 1;\n\n let mut result = 0;\n\n while x > 0 {\n\n x >>= 1;\n\n result += 1;\n\n }\n\n result\n\n}\n\n\n\n/// Returns the `num_bits` least-significant bits of `v`\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 21, "score": 295127.05500351964 }, { "content": "/// Perform `left == right` operation on two arrays.\n\npub fn eq<T>(left: &PrimitiveArray<T>, right: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op(left, right, T::eq, |a, b| a == b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op!(left, right, |a, b| a == b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 22, "score": 292294.4234060287 }, { "content": "/// Perform `left != right` operation on two arrays.\n\npub fn neq<T>(left: &PrimitiveArray<T>, right: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op(left, right, T::ne, |a, b| a != b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op!(left, right, |a, b| a != b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 23, "score": 292294.4234060287 }, { "content": "/// Perform `left != right` operation on an array and a scalar value.\n\npub fn neq_scalar<T>(left: &PrimitiveArray<T>, right: T::Native) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op_scalar(left, right, T::ne, |a, b| a != b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op_scalar!(left, right, |a, b| a != b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 24, "score": 292294.27832648286 }, { "content": "/// Perform `left == right` operation on an array and a scalar value.\n\npub fn eq_scalar<T>(left: &PrimitiveArray<T>, right: T::Native) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op_scalar(left, right, T::eq, |a, b| a == b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op_scalar!(left, right, |a, b| a == b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 25, "score": 292294.27832648286 }, { "content": "/// Perform `left > right` operation on two arrays. Non-null values are greater than null\n\n/// values.\n\npub fn gt<T>(left: &PrimitiveArray<T>, right: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op(left, right, T::gt, |a, b| a > b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op!(left, right, |a, b| a > b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 26, "score": 292293.6009283878 }, { "content": "/// Perform `left < right` operation on two arrays. Null values are less than non-null\n\n/// values.\n\npub fn lt<T>(left: &PrimitiveArray<T>, right: &PrimitiveArray<T>) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op(left, right, T::lt, |a, b| a < b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op!(left, right, |a, b| a < b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 27, "score": 292293.6009283878 }, { "content": "/// Perform `left > right` operation on an array and a scalar value.\n\n/// Non-null values are greater than null values.\n\npub fn gt_scalar<T>(left: &PrimitiveArray<T>, right: T::Native) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op_scalar(left, right, T::gt, |a, b| a > b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op_scalar!(left, right, |a, b| a > b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 28, "score": 292293.4743156802 }, { "content": "/// Perform `left < right` operation on an array and a scalar value.\n\n/// Null values are less than non-null values.\n\npub fn lt_scalar<T>(left: &PrimitiveArray<T>, right: T::Native) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op_scalar(left, right, T::lt, |a, b| a < b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op_scalar!(left, right, |a, b| a < b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 29, "score": 292293.47431568016 }, { "content": "/// Deserialize an IPC message into a schema\n\npub fn schema_from_bytes(bytes: &[u8]) -> Result<Schema> {\n\n if let Ok(ipc) = ipc::root_as_message(bytes) {\n\n if let Some(schema) = ipc.header_as_schema().map(fb_to_schema) {\n\n Ok(schema)\n\n } else {\n\n Err(ArrowError::IoError(\n\n \"Unable to get head as schema\".to_string(),\n\n ))\n\n }\n\n } else {\n\n Err(ArrowError::IoError(\n\n \"Unable to get root as message\".to_string(),\n\n ))\n\n }\n\n}\n\n\n\n/// Get the Arrow data type from the flatbuffer Field table\n\npub(crate) fn get_data_type(field: ipc::Field, may_be_dictionary: bool) -> DataType {\n\n if let Some(dictionary) = field.dictionary() {\n\n if may_be_dictionary {\n", "file_path": "arrow/src/ipc/convert.rs", "rank": 30, "score": 289699.7611839711 }, { "content": "/// Perform `left >= right` operation on an array and a scalar value.\n\n/// Non-null values are greater than null values.\n\npub fn gt_eq_scalar<T>(left: &PrimitiveArray<T>, right: T::Native) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op_scalar(left, right, T::ge, |a, b| a >= b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op_scalar!(left, right, |a, b| a >= b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 31, "score": 288390.5100943895 }, { "content": "/// Perform `left <= right` operation on an array and a scalar value.\n\n/// Null values are less than non-null values.\n\npub fn lt_eq_scalar<T>(left: &PrimitiveArray<T>, right: T::Native) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n{\n\n #[cfg(feature = \"simd\")]\n\n return simd_compare_op_scalar(left, right, T::le, |a, b| a <= b);\n\n #[cfg(not(feature = \"simd\"))]\n\n return compare_op_scalar!(left, right, |a, b| a <= b);\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 32, "score": 288390.5100943895 }, { "content": "#[inline]\n\nfn new_all_set_buffer(len: usize) -> Buffer {\n\n let buffer = MutableBuffer::new(len);\n\n let buffer = buffer.with_bitset(len, true);\n\n\n\n buffer.into()\n\n}\n\n\n\n// disable wrapping inside literal vectors used for test data and assertions\n\n#[rustfmt::skip::macros(vec)]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::datatypes::Int8Type;\n\n use crate::{array::Int32Array, array::Int64Array, datatypes::Field};\n\n\n\n /// Evaluate `KERNEL` with two vectors as inputs and assert against the expected output.\n\n /// `A_VEC` and `B_VEC` can be of type `Vec<i64>` or `Vec<Option<i64>>`.\n\n /// `EXPECTED` can be either `Vec<bool>` or `Vec<Option<bool>>`.\n\n /// The main reason for this macro is that inputs and outputs align nicely after `cargo fmt`.\n\n macro_rules! cmp_i64 {\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 33, "score": 286246.2920694351 }, { "content": "/// Performs unary `NOT` operation on an arrays. If value is null then the result is also\n\n/// null.\n\n/// # Error\n\n/// This function never errors. It returns an error for consistency.\n\n/// # Example\n\n/// ```rust\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::error::Result;\n\n/// use arrow::compute::kernels::boolean::not;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let not_a = not(&a)?;\n\n/// assert_eq!(not_a, BooleanArray::from(vec![Some(true), Some(false), None]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn not(left: &BooleanArray) -> Result<BooleanArray> {\n\n let left_offset = left.offset();\n\n let len = left.len();\n\n\n\n let data = left.data_ref();\n\n let null_bit_buffer = data\n\n .null_bitmap()\n\n .as_ref()\n\n .map(|b| b.bits.bit_slice(left_offset, len));\n\n\n\n let values = buffer_unary_not(&data.buffers()[0], left_offset, len);\n\n\n\n let data = ArrayData::new(\n\n DataType::Boolean,\n\n len,\n\n None,\n\n null_bit_buffer,\n\n 0,\n\n vec![values],\n\n vec![],\n\n );\n\n Ok(BooleanArray::from(data))\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 34, "score": 285118.7671604684 }, { "content": "#[inline]\n\npub fn memcpy_value<T>(source: &T, num_bytes: usize, target: &mut [u8])\n\nwhere\n\n T: ?Sized + AsBytes,\n\n{\n\n assert!(\n\n target.len() >= num_bytes,\n\n \"Not enough space. Only had {} bytes but need to put {} bytes\",\n\n target.len(),\n\n num_bytes\n\n );\n\n memcpy(&source.as_bytes()[..num_bytes], target)\n\n}\n\n\n\n/// Returns the ceil of value/divisor\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 35, "score": 279719.53082631814 }, { "content": "/// Helper function to perform math lambda function on values from two arrays. If either\n\n/// left or right value is null then the output value is also null, so `1 + null` is\n\n/// `null`.\n\n///\n\n/// # Errors\n\n///\n\n/// This function errors if the arrays have different lengths\n\npub fn math_op<T, F>(\n\n left: &PrimitiveArray<T>,\n\n right: &PrimitiveArray<T>,\n\n op: F,\n\n) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: ArrowNumericType,\n\n F: Fn(T::Native, T::Native) -> T::Native,\n\n{\n\n if left.len() != right.len() {\n\n return Err(ArrowError::ComputeError(\n\n \"Cannot perform math operation on arrays of different length\".to_string(),\n\n ));\n\n }\n\n\n\n let null_bit_buffer =\n\n combine_option_bitmap(left.data_ref(), right.data_ref(), left.len())?;\n\n\n\n let values = left\n\n .values()\n", "file_path": "arrow/src/compute/kernels/arithmetic.rs", "rank": 39, "score": 273934.52091481135 }, { "content": "/// Zip two arrays by some boolean mask. Where the mask evaluates `true` values of `truthy`\n\n/// are taken, where the mask evaluates `false` values of `falsy` are taken.\n\n///\n\n/// # Arguments\n\n/// * `mask` - Boolean values used to determine from which array to take the values.\n\n/// * `truthy` - Values of this array are taken if mask evaluates `true`\n\n/// * `falsy` - Values of this array are taken if mask evaluates `false`\n\npub fn zip(\n\n mask: &BooleanArray,\n\n truthy: &dyn Array,\n\n falsy: &dyn Array,\n\n) -> Result<ArrayRef> {\n\n if truthy.data_type() != falsy.data_type() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"arguments need to have the same data type\".into(),\n\n ));\n\n }\n\n if truthy.len() != falsy.len() || falsy.len() != mask.len() {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"all arrays should have the same length\".into(),\n\n ));\n\n }\n\n let falsy = falsy.data();\n\n let truthy = truthy.data();\n\n\n\n let mut mutable = MutableArrayData::new(vec![&*truthy, &*falsy], false, truthy.len());\n\n\n", "file_path": "arrow/src/compute/kernels/zip.rs", "rank": 40, "score": 271072.4603253255 }, { "content": "#[inline]\n\npub fn trailing_bits(v: u64, num_bits: usize) -> u64 {\n\n if num_bits == 0 {\n\n return 0;\n\n }\n\n if num_bits >= 64 {\n\n return v;\n\n }\n\n let n = 64 - num_bits;\n\n (v << n) >> n\n\n}\n\n\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 41, "score": 270857.51617167494 }, { "content": "#[inline]\n\npub fn num_required_bits(x: u64) -> usize {\n\n for i in (0..64).rev() {\n\n if x & (1u64 << i) != 0 {\n\n return i + 1;\n\n }\n\n }\n\n 0\n\n}\n\n\n\nstatic BIT_MASK: [u8; 8] = [1, 2, 4, 8, 16, 32, 64, 128];\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 42, "score": 263227.2590973511 }, { "content": "/// Applies an unary and infalible function to a primitive array.\n\n/// This is the fastest way to perform an operation on a primitive array when\n\n/// the benefits of a vectorized operation outweights the cost of branching nulls and non-nulls.\n\n/// # Implementation\n\n/// This will apply the function for all values, including those on null slots.\n\n/// This implies that the operation must be infalible for any value of the corresponding type\n\n/// or this function may panic.\n\n/// # Example\n\n/// ```rust\n\n/// # use arrow::array::Int32Array;\n\n/// # use arrow::datatypes::Int32Type;\n\n/// # use arrow::compute::kernels::arity::unary;\n\n/// # fn main() {\n\n/// let array = Int32Array::from(vec![Some(5), Some(7), None]);\n\n/// let c = unary::<_, _, Int32Type>(&array, |x| x * 2 + 1);\n\n/// assert_eq!(c, Int32Array::from(vec![Some(11), Some(15), None]));\n\n/// # }\n\n/// ```\n\npub fn unary<I, F, O>(array: &PrimitiveArray<I>, op: F) -> PrimitiveArray<O>\n\nwhere\n\n I: ArrowPrimitiveType,\n\n O: ArrowPrimitiveType,\n\n F: Fn(I::Native) -> O::Native,\n\n{\n\n let values = array.values().iter().map(|v| op(*v));\n\n // JUSTIFICATION\n\n // Benefit\n\n // ~60% speedup\n\n // Soundness\n\n // `values` is an iterator with a known size because arrays are sized.\n\n let buffer = unsafe { Buffer::from_trusted_len_iter(values) };\n\n\n\n let data = into_primitive_array_data::<_, O>(array, buffer);\n\n PrimitiveArray::<O>::from(data)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "arrow/src/compute/kernels/arity.rs", "rank": 43, "score": 262232.55057339923 }, { "content": "/// Helper function to create arrays\n\nfn create_buffer(size: usize) -> Buffer {\n\n let mut result = MutableBuffer::new(size).with_bitset(size, false);\n\n\n\n for i in 0..size {\n\n result.as_slice_mut()[i] = 0b01010101 << i << (i % 4);\n\n }\n\n\n\n result.into()\n\n}\n\n\n", "file_path": "arrow/benches/buffer_bit_ops.rs", "rank": 44, "score": 262062.27391579305 }, { "content": "/// Verifies that a buffer of bytes contains a `Message`\n\n/// and returns it.\n\n/// Note that verification is still experimental and may not\n\n/// catch every error, or be maximally performant. For the\n\n/// previous, unchecked, behavior use\n\n/// `root_as_message_unchecked`.\n\npub fn root_as_message(buf: &[u8]) -> Result<Message, flatbuffers::InvalidFlatbuffer> {\n\n flatbuffers::root::<Message>(buf)\n\n}\n\n#[inline]\n", "file_path": "arrow/src/ipc/gen/Message.rs", "rank": 45, "score": 260510.68572598282 }, { "content": "/// Verifies that a buffer of bytes contains a `Footer`\n\n/// and returns it.\n\n/// Note that verification is still experimental and may not\n\n/// catch every error, or be maximally performant. For the\n\n/// previous, unchecked, behavior use\n\n/// `root_as_footer_unchecked`.\n\npub fn root_as_footer(buf: &[u8]) -> Result<Footer, flatbuffers::InvalidFlatbuffer> {\n\n flatbuffers::root::<Footer>(buf)\n\n}\n\n#[inline]\n", "file_path": "arrow/src/ipc/gen/File.rs", "rank": 46, "score": 260510.68572598282 }, { "content": "/// Verifies that a buffer of bytes contains a `Schema`\n\n/// and returns it.\n\n/// Note that verification is still experimental and may not\n\n/// catch every error, or be maximally performant. For the\n\n/// previous, unchecked, behavior use\n\n/// `root_as_schema_unchecked`.\n\npub fn root_as_schema(buf: &[u8]) -> Result<Schema, flatbuffers::InvalidFlatbuffer> {\n\n flatbuffers::root::<Schema>(buf)\n\n}\n\n#[inline]\n", "file_path": "arrow/src/ipc/gen/Schema.rs", "rank": 47, "score": 260510.68572598282 }, { "content": "/// Verifies that a buffer of bytes contains a `Tensor`\n\n/// and returns it.\n\n/// Note that verification is still experimental and may not\n\n/// catch every error, or be maximally performant. For the\n\n/// previous, unchecked, behavior use\n\n/// `root_as_tensor_unchecked`.\n\npub fn root_as_tensor(buf: &[u8]) -> Result<Tensor, flatbuffers::InvalidFlatbuffer> {\n\n flatbuffers::root::<Tensor>(buf)\n\n}\n\n#[inline]\n", "file_path": "arrow/src/ipc/gen/Tensor.rs", "rank": 48, "score": 260510.68572598282 }, { "content": "#[cfg(feature = \"simd\")]\n\nfn simd_math_op<T, SIMD_OP, SCALAR_OP>(\n\n left: &PrimitiveArray<T>,\n\n right: &PrimitiveArray<T>,\n\n simd_op: SIMD_OP,\n\n scalar_op: SCALAR_OP,\n\n) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: ArrowNumericType,\n\n SIMD_OP: Fn(T::Simd, T::Simd) -> T::Simd,\n\n SCALAR_OP: Fn(T::Native, T::Native) -> T::Native,\n\n{\n\n if left.len() != right.len() {\n\n return Err(ArrowError::ComputeError(\n\n \"Cannot perform math operation on arrays of different length\".to_string(),\n\n ));\n\n }\n\n\n\n let null_bit_buffer =\n\n combine_option_bitmap(left.data_ref(), right.data_ref(), left.len())?;\n\n\n", "file_path": "arrow/src/compute/kernels/arithmetic.rs", "rank": 49, "score": 260089.54300157417 }, { "content": "#[cfg(feature = \"simd\")]\n\nfn simd_compare_op<T, SIMD_OP, SCALAR_OP>(\n\n left: &PrimitiveArray<T>,\n\n right: &PrimitiveArray<T>,\n\n simd_op: SIMD_OP,\n\n scalar_op: SCALAR_OP,\n\n) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n SIMD_OP: Fn(T::Simd, T::Simd) -> T::SimdMask,\n\n SCALAR_OP: Fn(T::Native, T::Native) -> bool,\n\n{\n\n use std::borrow::BorrowMut;\n\n\n\n let len = left.len();\n\n if len != right.len() {\n\n return Err(ArrowError::ComputeError(\n\n \"Cannot perform comparison operation on arrays of different length\"\n\n .to_string(),\n\n ));\n\n }\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 50, "score": 260089.54300157417 }, { "content": "/// Infer the fields of a JSON file by reading all items from the JSON Value Iterator.\n\n///\n\n/// The following type coercion logic is implemented:\n\n/// * `Int64` and `Float64` are converted to `Float64`\n\n/// * Lists and scalars are coerced to a list of a compatible scalar\n\n/// * All other cases are coerced to `Utf8` (String)\n\n///\n\n/// Note that the above coercion logic is different from what Spark has, where it would default to\n\n/// String type in case of List and Scalar values appeared in the same field.\n\n///\n\n/// The reason we diverge here is because we don't have utilities to deal with JSON data once it's\n\n/// interpreted as Strings. We should match Spark's behavior once we added more JSON parsing\n\n/// kernels in the future.\n\npub fn infer_json_schema_from_iterator<I>(value_iter: I) -> Result<Schema>\n\nwhere\n\n I: Iterator<Item = Result<Value>>,\n\n{\n\n let mut field_types: HashMap<String, InferredType> = HashMap::new();\n\n\n\n for record in value_iter {\n\n match record? {\n\n Value::Object(map) => {\n\n collect_field_types_from_object(&mut field_types, &map)?;\n\n }\n\n value => {\n\n return Err(ArrowError::JsonError(format!(\n\n \"Expected JSON record to be an object, found {:?}\",\n\n value\n\n )));\n\n }\n\n };\n\n }\n\n\n", "file_path": "arrow/src/json/reader.rs", "rank": 51, "score": 258703.8809904511 }, { "content": "/// Returns a vector of size `n`, filled with randomly generated bytes.\n\npub fn random_bytes(n: usize) -> Vec<u8> {\n\n let mut result = vec![];\n\n let mut rng = seedable_rng();\n\n for _ in 0..n {\n\n result.push(rng.gen_range(0..255));\n\n }\n\n result\n\n}\n\n\n", "file_path": "arrow/src/util/test_util.rs", "rank": 52, "score": 257892.55581181258 }, { "content": "#[cfg(feature = \"simd\")]\n\nfn simd_compare_op_scalar<T, SIMD_OP, SCALAR_OP>(\n\n left: &PrimitiveArray<T>,\n\n right: T::Native,\n\n simd_op: SIMD_OP,\n\n scalar_op: SCALAR_OP,\n\n) -> Result<BooleanArray>\n\nwhere\n\n T: ArrowNumericType,\n\n SIMD_OP: Fn(T::Simd, T::Simd) -> T::SimdMask,\n\n SCALAR_OP: Fn(T::Native, T::Native) -> bool,\n\n{\n\n use std::borrow::BorrowMut;\n\n\n\n let len = left.len();\n\n\n\n let lanes = T::lanes();\n\n let buffer_size = bit_util::ceil(len, 8);\n\n let mut result = MutableBuffer::new(buffer_size).with_bitset(buffer_size, false);\n\n\n\n // this is currently the case for all our datatypes and allows us to always append full bytes\n", "file_path": "arrow/src/compute/kernels/comparison.rs", "rank": 53, "score": 257173.38675783805 }, { "content": "fn bit_ops_benchmark(c: &mut Criterion) {\n\n let left = create_buffer(512 * 10);\n\n let right = create_buffer(512 * 10);\n\n\n\n c.bench_function(\"buffer_bit_ops and\", |b| {\n\n b.iter(|| bench_buffer_and(&left, &right))\n\n });\n\n\n\n c.bench_function(\"buffer_bit_ops or\", |b| {\n\n b.iter(|| bench_buffer_or(&left, &right))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bit_ops_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "arrow/benches/buffer_bit_ops.rs", "rank": 54, "score": 254562.91942288203 }, { "content": "#[cfg(feature = \"simd\")]\n\nfn simd_signed_unary_math_op<T, SIMD_OP, SCALAR_OP>(\n\n array: &PrimitiveArray<T>,\n\n simd_op: SIMD_OP,\n\n scalar_op: SCALAR_OP,\n\n) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: datatypes::ArrowSignedNumericType,\n\n SIMD_OP: Fn(T::SignedSimd) -> T::SignedSimd,\n\n SCALAR_OP: Fn(T::Native) -> T::Native,\n\n{\n\n let lanes = T::lanes();\n\n let buffer_size = array.len() * std::mem::size_of::<T::Native>();\n\n let mut result = MutableBuffer::new(buffer_size).with_bitset(buffer_size, false);\n\n\n\n let mut result_chunks = result.typed_data_mut().chunks_exact_mut(lanes);\n\n let mut array_chunks = array.values().chunks_exact(lanes);\n\n\n\n result_chunks\n\n .borrow_mut()\n\n .zip(array_chunks.borrow_mut())\n", "file_path": "arrow/src/compute/kernels/arithmetic.rs", "rank": 55, "score": 254370.39104997518 }, { "content": "#[cfg(feature = \"simd\")]\n\nfn simd_float_unary_math_op<T, SIMD_OP, SCALAR_OP>(\n\n array: &PrimitiveArray<T>,\n\n simd_op: SIMD_OP,\n\n scalar_op: SCALAR_OP,\n\n) -> Result<PrimitiveArray<T>>\n\nwhere\n\n T: datatypes::ArrowFloatNumericType,\n\n SIMD_OP: Fn(T::Simd) -> T::Simd,\n\n SCALAR_OP: Fn(T::Native) -> T::Native,\n\n{\n\n let lanes = T::lanes();\n\n let buffer_size = array.len() * std::mem::size_of::<T::Native>();\n\n\n\n let mut result = MutableBuffer::new(buffer_size).with_bitset(buffer_size, false);\n\n\n\n let mut result_chunks = result.typed_data_mut().chunks_exact_mut(lanes);\n\n let mut array_chunks = array.values().chunks_exact(lanes);\n\n\n\n result_chunks\n\n .borrow_mut()\n", "file_path": "arrow/src/compute/kernels/arithmetic.rs", "rank": 56, "score": 254370.39104997518 }, { "content": "pub fn random_numbers_range<T>(n: usize, low: T, high: T, result: &mut Vec<T>)\n\nwhere\n\n T: PartialOrd + SampleUniform + Copy,\n\n{\n\n let mut rng = thread_rng();\n\n for _ in 0..n {\n\n result.push(rng.gen_range(low..high));\n\n }\n\n}\n", "file_path": "parquet/src/util/test_common/rand_gen.rs", "rank": 57, "score": 251629.73192363555 }, { "content": "#[inline]\n\nfn sort_unstable_by<T, F>(array: &mut [T], limit: usize, cmp: F)\n\nwhere\n\n F: FnMut(&T, &T) -> Ordering,\n\n{\n\n if array.len() == limit {\n\n array.sort_unstable_by(cmp);\n\n } else {\n\n partial_sort(array, limit, cmp);\n\n }\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/sort.rs", "rank": 58, "score": 249308.41287518298 }, { "content": "pub fn random_bytes(n: usize) -> Vec<u8> {\n\n let mut result = vec![];\n\n let mut rng = thread_rng();\n\n for _ in 0..n {\n\n result.push(rng.gen_range(0..255));\n\n }\n\n result\n\n}\n\n\n", "file_path": "parquet/src/util/test_common/rand_gen.rs", "rank": 59, "score": 248777.80333150568 }, { "content": "#[inline]\n\npub fn get_bit(data: &[u8], i: usize) -> bool {\n\n (data[i >> 3] & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Utility class for writing bit/byte streams. This class can write data in either\n\n/// bit packed or byte aligned fashion.\n\npub struct BitWriter {\n\n buffer: Vec<u8>,\n\n max_bytes: usize,\n\n buffered_values: u64,\n\n byte_offset: usize,\n\n bit_offset: usize,\n\n start: usize,\n\n}\n\n\n\nimpl BitWriter {\n\n pub fn new(max_bytes: usize) -> Self {\n\n Self {\n\n buffer: vec![0; max_bytes],\n\n max_bytes,\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 60, "score": 248594.07747380674 }, { "content": "#[inline]\n\npub fn get_bit(data: &[u8], i: usize) -> bool {\n\n (data[i >> 3] & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not.\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool {\n\n (*data.add(i >> 3) & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 1\n", "file_path": "arrow/src/util/bit_util.rs", "rank": 61, "score": 248594.07747380674 }, { "content": "/// Read a buffer based on offset and length\n\nfn read_buffer(buf: &ipc::Buffer, a_data: &[u8]) -> Buffer {\n\n let start_offset = buf.offset() as usize;\n\n let end_offset = start_offset + buf.length() as usize;\n\n let buf_data = &a_data[start_offset..end_offset];\n\n Buffer::from(&buf_data)\n\n}\n\n\n", "file_path": "arrow/src/ipc/reader.rs", "rank": 62, "score": 243611.742911932 }, { "content": "/// Layout of Parquet file\n\n/// +---------------------------+-----+---+\n\n/// | Rest of file | B | A |\n\n/// +---------------------------+-----+---+\n\n/// where A: parquet footer, B: parquet metadata.\n\n///\n\n/// The reader first reads DEFAULT_FOOTER_SIZE bytes from the end of the file.\n\n/// If it is not enough according to the length indicated in the footer, it reads more bytes.\n\npub fn parse_metadata<R: ChunkReader>(chunk_reader: &R) -> Result<ParquetMetaData> {\n\n // check file is large enough to hold footer\n\n let file_size = chunk_reader.len();\n\n if file_size < (FOOTER_SIZE as u64) {\n\n return Err(general_err!(\n\n \"Invalid Parquet file. Size is smaller than footer\"\n\n ));\n\n }\n\n\n\n // read and cache up to DEFAULT_FOOTER_READ_SIZE bytes from the end and process the footer\n\n let default_end_len = min(DEFAULT_FOOTER_READ_SIZE, chunk_reader.len() as usize);\n\n let mut default_end_reader = chunk_reader\n\n .get_read(chunk_reader.len() - default_end_len as u64, default_end_len)?;\n\n let mut default_len_end_buf = vec![0; default_end_len];\n\n default_end_reader.read_exact(&mut default_len_end_buf)?;\n\n\n\n // check this is indeed a parquet file\n\n if default_len_end_buf[default_end_len - 4..] != PARQUET_MAGIC {\n\n return Err(general_err!(\"Invalid Parquet file. Corrupt footer\"));\n\n }\n", "file_path": "parquet/src/file/footer.rs", "rank": 63, "score": 239149.1777461816 }, { "content": "///! Prints a visual representation of record batches to stdout\n\npub fn print_batches(results: &[RecordBatch]) -> Result<()> {\n\n println!(\"{}\", create_table(results)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "arrow/src/util/pretty.rs", "rank": 64, "score": 235017.14802420404 }, { "content": "fn mutable_iter_extend_from_slice(data: &[Vec<u32>], capacity: usize) -> Buffer {\n\n criterion::black_box({\n\n let mut result = MutableBuffer::new(capacity);\n\n\n\n data.iter().for_each(|vec| {\n\n vec.iter()\n\n .for_each(|elem| result.extend_from_slice(elem.to_byte_slice()))\n\n });\n\n\n\n result.into()\n\n })\n\n}\n\n\n", "file_path": "arrow/benches/buffer_create.rs", "rank": 65, "score": 226498.5774148762 }, { "content": "#[inline]\n\npub fn convert_to_bytes<T>(val: &T, num_bytes: usize) -> Vec<u8>\n\nwhere\n\n T: ?Sized + AsBytes,\n\n{\n\n let mut bytes: Vec<u8> = vec![0; num_bytes];\n\n memcpy_value(val.as_bytes(), num_bytes, &mut bytes);\n\n bytes\n\n}\n\n\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 66, "score": 224268.396704766 }, { "content": "/// Get the value at the given row in an array as a String.\n\n///\n\n/// Note this function is quite inefficient and is unlikely to be\n\n/// suitable for converting large arrays or record batches.\n\npub fn array_value_to_string(column: &array::ArrayRef, row: usize) -> Result<String> {\n\n if column.is_null(row) {\n\n return Ok(\"\".to_string());\n\n }\n\n match column.data_type() {\n\n DataType::Utf8 => make_string!(array::StringArray, column, row),\n\n DataType::LargeUtf8 => make_string!(array::LargeStringArray, column, row),\n\n DataType::Binary => make_string_hex!(array::BinaryArray, column, row),\n\n DataType::LargeBinary => make_string_hex!(array::LargeBinaryArray, column, row),\n\n DataType::Boolean => make_string!(array::BooleanArray, column, row),\n\n DataType::Int8 => make_string!(array::Int8Array, column, row),\n\n DataType::Int16 => make_string!(array::Int16Array, column, row),\n\n DataType::Int32 => make_string!(array::Int32Array, column, row),\n\n DataType::Int64 => make_string!(array::Int64Array, column, row),\n\n DataType::UInt8 => make_string!(array::UInt8Array, column, row),\n\n DataType::UInt16 => make_string!(array::UInt16Array, column, row),\n\n DataType::UInt32 => make_string!(array::UInt32Array, column, row),\n\n DataType::UInt64 => make_string!(array::UInt64Array, column, row),\n\n DataType::Float16 => make_string!(array::Float32Array, column, row),\n\n DataType::Float32 => make_string!(array::Float32Array, column, row),\n", "file_path": "arrow/src/util/display.rs", "rank": 67, "score": 223927.15119352203 }, { "content": "///! Create a visual representation of record batches\n\npub fn pretty_format_batches(results: &[RecordBatch]) -> Result<String> {\n\n Ok(create_table(results)?.to_string())\n\n}\n\n\n", "file_path": "arrow/src/util/pretty.rs", "rank": 68, "score": 223236.64380280263 }, { "content": "#[inline(always)]\n\npub fn make_string_from_decimal(column: &Arc<dyn Array>, row: usize) -> Result<String> {\n\n let array = column\n\n .as_any()\n\n .downcast_ref::<array::DecimalArray>()\n\n .unwrap();\n\n\n\n let formatted_decimal = array.value_as_string(row);\n\n Ok(formatted_decimal)\n\n}\n\n\n", "file_path": "arrow/src/util/display.rs", "rank": 69, "score": 220507.48127666427 }, { "content": "/// Sort a list of `ArrayRef` using `SortOptions` provided for each array.\n\n///\n\n/// Performs a stable lexicographical sort on values and indices.\n\n///\n\n/// Returns an `ArrowError::ComputeError(String)` if any of the array type is either unsupported by\n\n/// `lexsort_to_indices` or `take`.\n\n///\n\n/// Example:\n\n///\n\n/// ```\n\n/// use std::convert::From;\n\n/// use std::sync::Arc;\n\n/// use arrow::array::{ArrayRef, StringArray, PrimitiveArray, as_primitive_array};\n\n/// use arrow::compute::kernels::sort::{SortColumn, SortOptions, lexsort};\n\n/// use arrow::datatypes::Int64Type;\n\n///\n\n/// let sorted_columns = lexsort(&vec![\n\n/// SortColumn {\n\n/// values: Arc::new(PrimitiveArray::<Int64Type>::from(vec![\n\n/// None,\n\n/// Some(-2),\n\n/// Some(89),\n\n/// Some(-64),\n\n/// Some(101),\n\n/// ])) as ArrayRef,\n\n/// options: None,\n\n/// },\n\n/// SortColumn {\n\n/// values: Arc::new(StringArray::from(vec![\n\n/// Some(\"hello\"),\n\n/// Some(\"world\"),\n\n/// Some(\",\"),\n\n/// Some(\"foobar\"),\n\n/// Some(\"!\"),\n\n/// ])) as ArrayRef,\n\n/// options: Some(SortOptions {\n\n/// descending: true,\n\n/// nulls_first: false,\n\n/// }),\n\n/// },\n\n/// ], None).unwrap();\n\n///\n\n/// assert_eq!(as_primitive_array::<Int64Type>(&sorted_columns[0]).value(1), -64);\n\n/// assert!(sorted_columns[0].is_null(0));\n\n/// ```\n\npub fn lexsort(columns: &[SortColumn], limit: Option<usize>) -> Result<Vec<ArrayRef>> {\n\n let indices = lexsort_to_indices(columns, limit)?;\n\n columns\n\n .iter()\n\n .map(|c| take(c.values.as_ref(), &indices, None))\n\n .collect()\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/sort.rs", "rank": 70, "score": 216918.54662639924 }, { "content": "///! Prints a visual representation of a list of column to stdout\n\npub fn print_columns(col_name: &str, results: &[ArrayRef]) -> Result<()> {\n\n println!(\"{}\", create_column(col_name, results)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "arrow/src/util/pretty.rs", "rank": 71, "score": 215549.98103627554 }, { "content": "/// Method to convert from Thrift.\n\npub fn from_thrift(elements: &[SchemaElement]) -> Result<TypePtr> {\n\n let mut index = 0;\n\n let mut schema_nodes = Vec::new();\n\n while index < elements.len() {\n\n let t = from_thrift_helper(elements, index)?;\n\n index = t.0;\n\n schema_nodes.push(t.1);\n\n }\n\n if schema_nodes.len() != 1 {\n\n return Err(general_err!(\n\n \"Expected exactly one root node, but found {}\",\n\n schema_nodes.len()\n\n ));\n\n }\n\n\n\n Ok(schema_nodes.remove(0))\n\n}\n\n\n", "file_path": "parquet/src/schema/types.rs", "rank": 72, "score": 211661.10751359578 }, { "content": "#[inline]\n\npub fn max_buffer_size(\n\n encoding: Encoding,\n\n max_level: i16,\n\n num_buffered_values: usize,\n\n) -> usize {\n\n let bit_width = log2(max_level as u64 + 1) as u8;\n\n match encoding {\n\n Encoding::RLE => {\n\n RleEncoder::max_buffer_size(bit_width, num_buffered_values)\n\n + RleEncoder::min_buffer_size(bit_width)\n\n }\n\n Encoding::BIT_PACKED => {\n\n ceil((num_buffered_values * bit_width as usize) as i64, 8) as usize\n\n }\n\n _ => panic!(\"Unsupported encoding type {}\", encoding),\n\n }\n\n}\n\n\n\n/// Encoder for definition/repetition levels.\n\n/// Currently only supports RLE and BIT_PACKED (dev/null) encoding, including v2.\n", "file_path": "parquet/src/encodings/levels.rs", "rank": 73, "score": 211063.56906309593 }, { "content": "fn benchmark(c: &mut Criterion) {\n\n let size = 2usize.pow(15);\n\n let data = create_data(size);\n\n\n\n let bool_data = create_data_bool(size);\n\n let cap = data.iter().map(|i| i.len()).sum();\n\n let byte_cap = cap * std::mem::size_of::<u32>();\n\n\n\n c.bench_function(\"mutable iter extend_from_slice\", |b| {\n\n b.iter(|| {\n\n mutable_iter_extend_from_slice(\n\n criterion::black_box(&data),\n\n criterion::black_box(0),\n\n )\n\n })\n\n });\n\n c.bench_function(\"mutable\", |b| {\n\n b.iter(|| mutable_buffer(criterion::black_box(&data), criterion::black_box(0)))\n\n });\n\n\n", "file_path": "arrow/benches/buffer_create.rs", "rank": 74, "score": 208945.79317995167 }, { "content": "/// Parses message type as string into a Parquet [`Type`](crate::schema::types::Type)\n\n/// which, for example, could be used to extract individual columns. Returns Parquet\n\n/// general error when parsing or validation fails.\n\npub fn parse_message_type(message_type: &str) -> Result<Type> {\n\n let mut parser = Parser {\n\n tokenizer: &mut Tokenizer::from_str(message_type),\n\n };\n\n parser.parse_message_type()\n\n}\n\n\n", "file_path": "parquet/src/schema/parser.rs", "rank": 75, "score": 208348.63563613623 }, { "content": "/// Returns a prepared function optimized to filter multiple arrays.\n\n/// Creating this function requires time, but using it is faster than [filter] when the\n\n/// same filter needs to be applied to multiple arrays (e.g. a multi-column `RecordBatch`).\n\n/// WARNING: the nulls of `filter` are ignored and the value on its slot is considered.\n\n/// Therefore, it is considered undefined behavior to pass `filter` with null values.\n\npub fn build_filter(filter: &BooleanArray) -> Result<Filter> {\n\n let iter = SlicesIterator::new(filter);\n\n let filter_count = iter.filter_count();\n\n let chunks = iter.collect::<Vec<_>>();\n\n\n\n Ok(Box::new(move |array: &ArrayData| {\n\n match filter_count {\n\n // return all\n\n len if len == array.len() => array.clone(),\n\n 0 => ArrayData::new_empty(array.data_type()),\n\n _ => {\n\n let mut mutable = MutableArrayData::new(vec![array], false, filter_count);\n\n chunks\n\n .iter()\n\n .for_each(|(start, end)| mutable.extend(0, *start, *end));\n\n mutable.freeze()\n\n }\n\n }\n\n }))\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/filter.rs", "rank": 76, "score": 208347.93289512 }, { "content": "#[inline]\n\npub fn string_to_timestamp_nanos(s: &str) -> Result<i64> {\n\n // Fast path: RFC3339 timestamp (with a T)\n\n // Example: 2020-09-08T13:42:29.190855Z\n\n if let Ok(ts) = DateTime::parse_from_rfc3339(s) {\n\n return Ok(ts.timestamp_nanos());\n\n }\n\n\n\n // Implement quasi-RFC3339 support by trying to parse the\n\n // timestamp with various other format specifiers to to support\n\n // separating the date and time with a space ' ' rather than 'T' to be\n\n // (more) compatible with Apache Spark SQL\n\n\n\n // timezone offset, using ' ' as a separator\n\n // Example: 2020-09-08 13:42:29.190855-05:00\n\n if let Ok(ts) = DateTime::parse_from_str(s, \"%Y-%m-%d %H:%M:%S%.f%:z\") {\n\n return Ok(ts.timestamp_nanos());\n\n }\n\n\n\n // with an explicit Z, using ' ' as a separator\n\n // Example: 2020-09-08 13:42:29Z\n", "file_path": "arrow/src/compute/kernels/cast_utils.rs", "rank": 77, "score": 208343.5110989987 }, { "content": "/// Convert arrow schema to parquet schema\n\npub fn arrow_to_parquet_schema(schema: &Schema) -> Result<SchemaDescriptor> {\n\n let fields: Result<Vec<TypePtr>> = schema\n\n .fields()\n\n .iter()\n\n .map(|field| arrow_to_parquet_type(field).map(Arc::new))\n\n .collect();\n\n let group = Type::group_type_builder(\"arrow_schema\")\n\n .with_fields(&mut fields?)\n\n .build()?;\n\n Ok(SchemaDescriptor::new(Arc::new(group)))\n\n}\n\n\n", "file_path": "parquet/src/arrow/schema.rs", "rank": 78, "score": 208343.5110989987 }, { "content": "fn buffer_from_iter(data: &[Vec<bool>]) -> Vec<Buffer> {\n\n criterion::black_box(\n\n data.iter()\n\n .map(|vec| vec.iter().copied().collect::<Buffer>())\n\n .collect::<Vec<_>>(),\n\n )\n\n}\n\n\n", "file_path": "arrow/benches/buffer_create.rs", "rank": 79, "score": 207806.26269430114 }, { "content": "/// Method to convert to Thrift.\n\npub fn to_thrift(schema: &Type) -> Result<Vec<SchemaElement>> {\n\n if !schema.is_group() {\n\n return Err(general_err!(\"Root schema must be Group type\"));\n\n }\n\n let mut elements: Vec<SchemaElement> = Vec::new();\n\n to_thrift_helper(schema, &mut elements);\n\n Ok(elements)\n\n}\n\n\n", "file_path": "parquet/src/schema/types.rs", "rank": 80, "score": 207064.14496996434 }, { "content": "///! Create a visual representation of columns\n\npub fn pretty_format_columns(col_name: &str, results: &[ArrayRef]) -> Result<String> {\n\n Ok(create_column(col_name, results)?.to_string())\n\n}\n\n\n", "file_path": "arrow/src/util/pretty.rs", "rank": 81, "score": 205644.58023275388 }, { "content": "pub fn read_json_file(json_name: &str) -> Result<ArrowFile> {\n\n let json_file = File::open(json_name)?;\n\n let reader = BufReader::new(json_file);\n\n let arrow_json: Value = serde_json::from_reader(reader).unwrap();\n\n let schema = Schema::from(&arrow_json[\"schema\"])?;\n\n // read dictionaries\n\n let mut dictionaries = HashMap::new();\n\n if let Some(dicts) = arrow_json.get(\"dictionaries\") {\n\n for d in dicts\n\n .as_array()\n\n .expect(\"Unable to get dictionaries as array\")\n\n {\n\n let json_dict: ArrowJsonDictionaryBatch = serde_json::from_value(d.clone())\n\n .expect(\"Unable to get dictionary from JSON\");\n\n // TODO: convert to a concrete Arrow type\n\n dictionaries.insert(json_dict.id, json_dict);\n\n }\n\n }\n\n\n\n let mut batches = vec![];\n", "file_path": "integration-testing/src/lib.rs", "rank": 82, "score": 205200.4532333053 }, { "content": "/// Convert parquet column schema to arrow field.\n\npub fn parquet_to_arrow_field(parquet_column: &ColumnDescriptor) -> Result<Field> {\n\n let schema = parquet_column.self_type();\n\n\n\n let mut leaves = HashSet::new();\n\n leaves.insert(parquet_column.self_type() as *const Type);\n\n\n\n ParquetTypeConverter::new(schema, &leaves)\n\n .to_field()\n\n .map(|opt| opt.unwrap())\n\n}\n\n\n", "file_path": "parquet/src/arrow/schema.rs", "rank": 83, "score": 205200.4532333053 }, { "content": "#[inline]\n\nfn pad_to_8(len: u32) -> usize {\n\n (((len + 7) & !7) - len) as usize\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use std::fs::File;\n\n use std::io::Read;\n\n use std::sync::Arc;\n\n\n\n use flate2::read::GzDecoder;\n\n use ipc::MetadataVersion;\n\n\n\n use crate::array::*;\n\n use crate::datatypes::Field;\n\n use crate::ipc::reader::*;\n\n use crate::util::integration_util::*;\n\n\n", "file_path": "arrow/src/ipc/writer.rs", "rank": 84, "score": 204874.74810982804 }, { "content": "fn mutable_buffer_from_iter(data: &[Vec<bool>]) -> Vec<Buffer> {\n\n criterion::black_box(\n\n data.iter()\n\n .map(|vec| vec.iter().copied().collect::<MutableBuffer>().into())\n\n .collect::<Vec<_>>(),\n\n )\n\n}\n\n\n", "file_path": "arrow/benches/buffer_create.rs", "rank": 85, "score": 204545.22154845437 }, { "content": "#[inline]\n\npub fn from_ne_slice<T: FromBytes>(bs: &[u8]) -> T {\n\n let mut b = T::Buffer::default();\n\n {\n\n let b = b.as_mut();\n\n let bs = &bs[..b.len()];\n\n b.copy_from_slice(bs);\n\n }\n\n T::from_ne_bytes(b)\n\n}\n\n\n", "file_path": "parquet/src/util/bit_util.rs", "rank": 86, "score": 204122.0059532749 }, { "content": "/// Returns a non-null [BooleanArray] with whether each value of the array is not null.\n\n/// # Error\n\n/// This function never errors.\n\n/// # Example\n\n/// ```rust\n\n/// # use arrow::error::Result;\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::compute::kernels::boolean::is_not_null;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let a_is_not_null = is_not_null(&a)?;\n\n/// assert_eq!(a_is_not_null, BooleanArray::from(vec![true, true, false]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn is_not_null(input: &dyn Array) -> Result<BooleanArray> {\n\n let len = input.len();\n\n\n\n let output = match input.data_ref().null_buffer() {\n\n None => {\n\n let len_bytes = ceil(len, 8);\n\n MutableBuffer::new(len_bytes)\n\n .with_bitset(len_bytes, true)\n\n .into()\n\n }\n\n Some(buffer) => buffer.bit_slice(input.offset(), len),\n\n };\n\n\n\n let data =\n\n ArrayData::new(DataType::Boolean, len, None, None, 0, vec![output], vec![]);\n\n\n\n Ok(BooleanArray::from(data))\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 87, "score": 203767.53750381112 }, { "content": "/// Returns a non-null [BooleanArray] with whether each value of the array is null.\n\n/// # Error\n\n/// This function never errors.\n\n/// # Example\n\n/// ```rust\n\n/// # use arrow::error::Result;\n\n/// use arrow::array::BooleanArray;\n\n/// use arrow::compute::kernels::boolean::is_null;\n\n/// # fn main() -> Result<()> {\n\n/// let a = BooleanArray::from(vec![Some(false), Some(true), None]);\n\n/// let a_is_null = is_null(&a)?;\n\n/// assert_eq!(a_is_null, BooleanArray::from(vec![false, false, true]));\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn is_null(input: &dyn Array) -> Result<BooleanArray> {\n\n let len = input.len();\n\n\n\n let output = match input.data_ref().null_buffer() {\n\n None => {\n\n let len_bytes = ceil(len, 8);\n\n MutableBuffer::from_len_zeroed(len_bytes).into()\n\n }\n\n Some(buffer) => buffer_unary_not(buffer, input.offset(), len),\n\n };\n\n\n\n let data =\n\n ArrayData::new(DataType::Boolean, len, None, None, 0, vec![output], vec![]);\n\n\n\n Ok(BooleanArray::from(data))\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/boolean.rs", "rank": 88, "score": 203767.53750381112 }, { "content": "/// Returns an array of Int32/Int64 denoting the number of bytes in each string in the array.\n\n///\n\n/// * this only accepts StringArray/Utf8 and LargeString/LargeUtf8\n\n/// * length of null is null.\n\n/// * length is in number of bytes\n\npub fn length(array: &dyn Array) -> Result<ArrayRef> {\n\n match array.data_type() {\n\n DataType::Utf8 => Ok(octet_length::<i32, Int32Type>(array)),\n\n DataType::LargeUtf8 => Ok(octet_length::<i64, Int64Type>(array)),\n\n _ => Err(ArrowError::ComputeError(format!(\n\n \"length not supported for {:?}\",\n\n array.data_type()\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "arrow/src/compute/kernels/length.rs", "rank": 89, "score": 203746.54855536725 }, { "content": "/// Concatenate multiple [Array] of the same type into a single [ArrayRef].\n\npub fn concat(arrays: &[&dyn Array]) -> Result<ArrayRef> {\n\n if arrays.is_empty() {\n\n return Err(ArrowError::ComputeError(\n\n \"concat requires input of at least one array\".to_string(),\n\n ));\n\n } else if arrays.len() == 1 {\n\n let array = arrays[0];\n\n return Ok(array.slice(0, array.len()));\n\n }\n\n\n\n if arrays\n\n .iter()\n\n .any(|array| array.data_type() != arrays[0].data_type())\n\n {\n\n return Err(ArrowError::InvalidArgumentError(\n\n \"It is not possible to concatenate arrays of different data types.\"\n\n .to_string(),\n\n ));\n\n }\n\n\n", "file_path": "arrow/src/compute/kernels/concat.rs", "rank": 90, "score": 203746.54855536725 }, { "content": "#[inline]\n\n#[deprecated(since = \"2.0.0\", note = \"Deprecated in favor of `root_as...` methods.\")]\n\npub fn get_root_as_message<'a>(buf: &'a [u8]) -> Message<'a> {\n\n unsafe { flatbuffers::root_unchecked::<Message<'a>>(buf) }\n\n}\n\n\n", "file_path": "arrow/src/ipc/gen/Message.rs", "rank": 91, "score": 203280.47749967122 }, { "content": "#[inline]\n\n#[deprecated(since = \"2.0.0\", note = \"Deprecated in favor of `root_as...` methods.\")]\n\npub fn get_root_as_tensor<'a>(buf: &'a [u8]) -> Tensor<'a> {\n\n unsafe { flatbuffers::root_unchecked::<Tensor<'a>>(buf) }\n\n}\n\n\n", "file_path": "arrow/src/ipc/gen/Tensor.rs", "rank": 92, "score": 203280.47749967122 }, { "content": "#[inline]\n\n#[deprecated(since = \"2.0.0\", note = \"Deprecated in favor of `root_as...` methods.\")]\n\npub fn get_root_as_schema<'a>(buf: &'a [u8]) -> Schema<'a> {\n\n unsafe { flatbuffers::root_unchecked::<Schema<'a>>(buf) }\n\n}\n\n\n", "file_path": "arrow/src/ipc/gen/Schema.rs", "rank": 93, "score": 203280.47749967122 }, { "content": "#[inline]\n\n#[deprecated(since = \"2.0.0\", note = \"Deprecated in favor of `root_as...` methods.\")]\n\npub fn get_root_as_footer<'a>(buf: &'a [u8]) -> Footer<'a> {\n\n unsafe { flatbuffers::root_unchecked::<Footer<'a>>(buf) }\n\n}\n\n\n", "file_path": "arrow/src/ipc/gen/File.rs", "rank": 94, "score": 203280.47749967122 }, { "content": "fn mutable_buffer(data: &[Vec<u32>], capacity: usize) -> Buffer {\n\n criterion::black_box({\n\n let mut result = MutableBuffer::new(capacity);\n\n\n\n data.iter().for_each(|vec| result.extend_from_slice(vec));\n\n\n\n result.into()\n\n })\n\n}\n\n\n", "file_path": "arrow/benches/buffer_create.rs", "rank": 95, "score": 201480.5379801272 }, { "content": " let lanes = T::lanes();\n\n let buffer_size = left.len() * std::mem::size_of::<T::Native>();\n\n let mut result = MutableBuffer::new(buffer_size).with_bitset(buffer_size, false);\n\n\n\n let mut result_chunks = result.typed_data_mut().chunks_exact_mut(lanes);\n\n let mut left_chunks = left.values().chunks_exact(lanes);\n\n let mut right_chunks = right.values().chunks_exact(lanes);\n\n\n\n result_chunks\n\n .borrow_mut()\n\n .zip(left_chunks.borrow_mut().zip(right_chunks.borrow_mut()))\n\n .for_each(|(result_slice, (left_slice, right_slice))| {\n\n let simd_left = T::load(left_slice);\n\n let simd_right = T::load(right_slice);\n\n let simd_result = T::bin_op(simd_left, simd_right, &simd_op);\n\n T::write(simd_result, result_slice);\n\n });\n\n\n\n let result_remainder = result_chunks.into_remainder();\n\n let left_remainder = left_chunks.remainder();\n", "file_path": "arrow/src/compute/kernels/arithmetic.rs", "rank": 98, "score": 70.47930667472674 } ]
Rust
sdk/program/src/entrypoint_deprecated.rs
rob-ti/SAFE
315d06c27d3923472ef8f94f04be4bfe762dd91a
extern crate alloc; use crate::{account_info::AccountInfo, program_error::ProgramError, pubkey::Pubkey}; use alloc::vec::Vec; use std::{ cell::RefCell, mem::size_of, rc::Rc, result::Result as ResultGeneric, slice::{from_raw_parts, from_raw_parts_mut}, }; pub type ProgramResult = ResultGeneric<(), ProgramError>; pub type ProcessInstruction = fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; pub const SUCCESS: u64 = 0; #[macro_export] macro_rules! entrypoint_deprecated { ($process_instruction:ident) => { #[no_mangle] pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64 { let (program_id, accounts, instruction_data) = unsafe { $crate::entrypoint_deprecated::deserialize(input) }; match $process_instruction(&program_id, &accounts, &instruction_data) { Ok(()) => $crate::entrypoint_deprecated::SUCCESS, Err(error) => error.into(), } } }; } #[allow(clippy::type_complexity)] pub unsafe fn deserialize<'a>(input: *mut u8) -> (&'a Pubkey, Vec<AccountInfo<'a>>, &'a [u8]) { let mut offset: usize = 0; #[allow(clippy::cast_ptr_alignment)] let num_accounts = *(input.add(offset) as *const u64) as usize; offset += size_of::<u64>(); let mut accounts = Vec::with_capacity(num_accounts); for _ in 0..num_accounts { let dup_info = *(input.add(offset) as *const u8); offset += size_of::<u8>(); if dup_info == std::u8::MAX { #[allow(clippy::cast_ptr_alignment)] let is_signer = *(input.add(offset) as *const u8) != 0; offset += size_of::<u8>(); #[allow(clippy::cast_ptr_alignment)] let is_writable = *(input.add(offset) as *const u8) != 0; offset += size_of::<u8>(); let key: &Pubkey = &*(input.add(offset) as *const Pubkey); offset += size_of::<Pubkey>(); #[allow(clippy::cast_ptr_alignment)] let lamports = Rc::new(RefCell::new(&mut *(input.add(offset) as *mut u64))); offset += size_of::<u64>(); #[allow(clippy::cast_ptr_alignment)] let data_len = *(input.add(offset) as *const u64) as usize; offset += size_of::<u64>(); let data = Rc::new(RefCell::new({ from_raw_parts_mut(input.add(offset), data_len) })); offset += data_len; let owner: &Pubkey = &*(input.add(offset) as *const Pubkey); offset += size_of::<Pubkey>(); #[allow(clippy::cast_ptr_alignment)] let executable = *(input.add(offset) as *const u8) != 0; offset += size_of::<u8>(); #[allow(clippy::cast_ptr_alignment)] let rent_epoch = *(input.add(offset) as *const u64); offset += size_of::<u64>(); accounts.push(AccountInfo { is_signer, is_writable, key, lamports, data, owner, executable, rent_epoch, }); } else { accounts.push(accounts[dup_info as usize].clone()); } } #[allow(clippy::cast_ptr_alignment)] let instruction_data_len = *(input.add(offset) as *const u64) as usize; offset += size_of::<u64>(); let instruction_data = { from_raw_parts(input.add(offset), instruction_data_len) }; offset += instruction_data_len; let program_id: &Pubkey = &*(input.add(offset) as *const Pubkey); (program_id, accounts, instruction_data) }
extern crate alloc; use crate::{account_info::AccountInfo, program_error::ProgramError, pubkey::Pubkey}; use alloc::vec::Vec; use std::{ cell::RefCell, mem::size_of, rc::Rc, result::Result as ResultGeneric, slice::{from_raw_parts, from_raw_parts_mut}, }; pub type ProgramResult = ResultGeneric<(), ProgramError>; pub type ProcessInstruction = fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; pub const SUCCESS: u64 = 0; #[macro_export] macro_rules! entrypoint_deprecated { ($process_instruction:ident) => { #[no_mangle] pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64 { let (program_id, accounts, instruction_data) = unsafe { $crate::entrypoint_deprecated::deserialize(input) }; match $process_instruction(&program_id, &accounts, &instruction_data) { Ok(()) => $crate::entrypoint_deprecated::SUCCESS, Err(error) => error.into(), } } }; } #[allow(clippy::type_complexity)] pub unsafe fn deserialize<'a>(input: *mut u8) -> (&'a Pubkey, Vec<AccountInfo<'a>>, &'a [u8]) { let mut offset: usize = 0; #[allow(clippy::cast_ptr_alignment)] let num_accounts = *(input.add(offset) as *const u64) as usize; offset += size_of::<u64>();
let mut accounts = Vec::with_capacity(num_accounts); for _ in 0..num_accounts { let dup_info = *(input.add(offset) as *const u8); offset += size_of::<u8>(); if dup_info == std::u8::MAX { #[allow(clippy::cast_ptr_alignment)] let is_signer = *(input.add(offset) as *const u8) != 0; offset += size_of::<u8>(); #[allow(clippy::cast_ptr_alignment)] let is_writable = *(input.add(offset) as *const u8) != 0; offset += size_of::<u8>(); let key: &Pubkey = &*(input.add(offset) as *const Pubkey); offset += size_of::<Pubkey>(); #[allow(clippy::cast_ptr_alignment)] let lamports = Rc::new(RefCell::new(&mut *(input.add(offset) as *mut u64))); offset += size_of::<u64>(); #[allow(clippy::cast_ptr_alignment)] let data_len = *(input.add(offset) as *const u64) as usize; offset += size_of::<u64>(); let data = Rc::new(RefCell::new({ from_raw_parts_mut(input.add(offset), data_len) })); offset += data_len; let owner: &Pubkey = &*(input.add(offset) as *const Pubkey); offset += size_of::<Pubkey>(); #[allow(clippy::cast_ptr_alignment)] let executable = *(input.add(offset) as *const u8) != 0; offset += size_of::<u8>(); #[allow(clippy::cast_ptr_alignment)] let rent_epoch = *(input.add(offset) as *const u64); offset += size_of::<u64>(); accounts.push(AccountInfo { is_signer, is_writable, key, lamports, data, owner, executable, rent_epoch, }); } else { accounts.push(accounts[dup_info as usize].clone()); } } #[allow(clippy::cast_ptr_alignment)] let instruction_data_len = *(input.add(offset) as *const u64) as usize; offset += size_of::<u64>(); let instruction_data = { from_raw_parts(input.add(offset), instruction_data_len) }; offset += instruction_data_len; let program_id: &Pubkey = &*(input.add(offset) as *const Pubkey); (program_id, accounts, instruction_data) }
function_block-function_prefix_line
[ { "content": "/// Create `AccountInfo`s\n\npub fn create_account_infos(accounts: &mut [(Pubkey, Account)]) -> Vec<AccountInfo> {\n\n accounts.iter_mut().map(Into::into).collect()\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 0, "score": 425486.5331603654 }, { "content": "pub fn parse_config(data: &[u8], pubkey: &Pubkey) -> Result<ConfigAccountType, ParseAccountError> {\n\n let parsed_account = if pubkey == &solana_stake_program::config::id() {\n\n get_config_data(data)\n\n .ok()\n\n .and_then(|data| deserialize::<StakeConfig>(data).ok())\n\n .map(|config| ConfigAccountType::StakeConfig(config.into()))\n\n } else {\n\n deserialize::<ConfigKeys>(data).ok().and_then(|key_list| {\n\n if !key_list.keys.is_empty() && key_list.keys[0].0 == validator_info::id() {\n\n parse_config_data::<String>(data, key_list.keys).and_then(|validator_info| {\n\n Some(ConfigAccountType::ValidatorInfo(UiConfig {\n\n keys: validator_info.keys,\n\n config_data: serde_json::from_str(&validator_info.config_data).ok()?,\n\n }))\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n };\n\n parsed_account.ok_or(ParseAccountError::AccountNotParsable(\n\n ParsableAccount::Config,\n\n ))\n\n}\n\n\n", "file_path": "account-decoder/src/parse_config.rs", "rank": 1, "score": 397460.34227338433 }, { "content": "pub fn parse_sysvar(data: &[u8], pubkey: &Pubkey) -> Result<SysvarAccountType, ParseAccountError> {\n\n let parsed_account = {\n\n if pubkey == &sysvar::clock::id() {\n\n deserialize::<Clock>(data)\n\n .ok()\n\n .map(|clock| SysvarAccountType::Clock(clock.into()))\n\n } else if pubkey == &sysvar::epoch_schedule::id() {\n\n deserialize(data).ok().map(SysvarAccountType::EpochSchedule)\n\n } else if pubkey == &sysvar::fees::id() {\n\n deserialize::<Fees>(data)\n\n .ok()\n\n .map(|fees| SysvarAccountType::Fees(fees.into()))\n\n } else if pubkey == &sysvar::recent_blockhashes::id() {\n\n deserialize::<RecentBlockhashes>(data)\n\n .ok()\n\n .map(|recent_blockhashes| {\n\n let recent_blockhashes = recent_blockhashes\n\n .iter()\n\n .map(|entry| UiRecentBlockhashesEntry {\n\n blockhash: entry.blockhash.to_string(),\n", "file_path": "account-decoder/src/parse_sysvar.rs", "rank": 2, "score": 397460.34227338433 }, { "content": "pub fn read_pubkey(current: &mut usize, data: &[u8]) -> Result<Pubkey, SanitizeError> {\n\n let len = std::mem::size_of::<Pubkey>();\n\n if data.len() < *current + len {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = Pubkey::new(&data[*current..*current + len]);\n\n *current += len;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 3, "score": 390950.29085459583 }, { "content": "// Only used by bench, not safe to call otherwise accounts can conflict with the\n\n// accounts cache!\n\npub fn update_accounts_bench(accounts: &Accounts, pubkeys: &[Pubkey], slot: u64) {\n\n for pubkey in pubkeys {\n\n let amount = thread_rng().gen_range(0, 10);\n\n let account = Account::new(amount, 0, &Account::default().owner);\n\n accounts.store_slow_uncached(slot, &pubkey, &account);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n // TODO: all the bank tests are bank specific, issue: 2194\n\n\n\n use super::*;\n\n use crate::rent_collector::RentCollector;\n\n use solana_sdk::{\n\n account::Account,\n\n epoch_schedule::EpochSchedule,\n\n fee_calculator::FeeCalculator,\n\n genesis_config::ClusterType,\n\n hash::Hash,\n", "file_path": "runtime/src/accounts.rs", "rank": 4, "score": 384507.7676849504 }, { "content": "pub fn allocate(pubkey: &Pubkey, space: u64) -> Instruction {\n\n let account_metas = vec![AccountMeta::new(*pubkey, true)];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::Allocate { space },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 5, "score": 382405.7575070737 }, { "content": "pub fn get_token_account_mint(data: &[u8]) -> Option<Pubkey> {\n\n if data.len() == Account::get_packed_len() {\n\n Some(Pubkey::new(&data[0..32]))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_token() {\n\n let mint_pubkey = SplTokenPubkey::new(&[2; 32]);\n\n let owner_pubkey = SplTokenPubkey::new(&[3; 32]);\n\n let mut account_data = vec![0; Account::get_packed_len()];\n\n let mut account = Account::unpack_unchecked(&account_data).unwrap();\n\n account.mint = mint_pubkey;\n\n account.owner = owner_pubkey;\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 6, "score": 364692.5155429057 }, { "content": "pub fn add_genesis_accounts(genesis_config: &mut GenesisConfig, mut issued_lamports: u64) {\n\n // add_stakes() and add_validators() award tokens for rent exemption and\n\n // to cover an initial transfer-free period of the network\n\n\n\n issued_lamports += add_stakes(\n\n genesis_config,\n\n &CREATOR_STAKER_INFOS,\n\n &UNLOCKS_HALF_AT_9_MONTHS,\n\n ) + add_stakes(\n\n genesis_config,\n\n &SERVICE_STAKER_INFOS,\n\n &UNLOCKS_ALL_AT_9_MONTHS,\n\n ) + add_stakes(\n\n genesis_config,\n\n &FOUNDATION_STAKER_INFOS,\n\n &UNLOCKS_ALL_DAY_ZERO,\n\n ) + add_stakes(genesis_config, &GRANTS_STAKER_INFOS, &UNLOCKS_ALL_DAY_ZERO)\n\n + add_stakes(\n\n genesis_config,\n\n &COMMUNITY_STAKER_INFOS,\n", "file_path": "genesis/src/genesis_accounts.rs", "rank": 7, "score": 364453.9186091137 }, { "content": "pub fn setup_bank_and_vote_pubkeys(num_vote_accounts: usize, stake: u64) -> (Bank, Vec<Pubkey>) {\n\n // Create some voters at genesis\n\n let validator_voting_keypairs: Vec<_> = (0..num_vote_accounts)\n\n .map(|_| ValidatorVoteKeypairs::new_rand())\n\n .collect();\n\n\n\n let vote_pubkeys: Vec<_> = validator_voting_keypairs\n\n .iter()\n\n .map(|k| k.vote_keypair.pubkey())\n\n .collect();\n\n let GenesisConfigInfo { genesis_config, .. } =\n\n genesis_utils::create_genesis_config_with_vote_accounts(\n\n 10_000,\n\n &validator_voting_keypairs,\n\n vec![stake; validator_voting_keypairs.len()],\n\n );\n\n let bank = Bank::new(&genesis_config);\n\n (bank, vote_pubkeys)\n\n}\n\n\n", "file_path": "runtime/src/bank_utils.rs", "rank": 8, "score": 364261.5274336372 }, { "content": "pub fn add_genesis_accounts(genesis_config: &mut GenesisConfig) -> u64 {\n\n config::add_genesis_account(genesis_config)\n\n}\n\n\n\n#[macro_use]\n\nextern crate solana_frozen_abi_macro;\n", "file_path": "programs/stake/src/lib.rs", "rank": 9, "score": 348539.46194183285 }, { "content": "pub fn add_genesis_account(genesis_config: &mut GenesisConfig) -> u64 {\n\n let mut account = create_config_account(vec![], &Config::default(), 0);\n\n let lamports = genesis_config.rent.minimum_balance(account.data.len());\n\n\n\n account.lamports = lamports.max(1);\n\n\n\n genesis_config.add_account(id(), account);\n\n\n\n lamports\n\n}\n\n\n", "file_path": "programs/stake/src/config.rs", "rank": 10, "score": 348539.46194183285 }, { "content": "pub fn read_u8(current: &mut usize, data: &[u8]) -> Result<u8, SanitizeError> {\n\n if data.len() < *current + 1 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let e = data[*current];\n\n *current += 1;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 11, "score": 345493.9858170379 }, { "content": "/// Returns the highest index after computing a weighted shuffle.\n\n/// Saves doing any sorting for O(n) max calculation.\n\n// TODO: Remove in favor of rand::distributions::WeightedIndex.\n\npub fn weighted_best(weights_and_indexes: &[(u64, usize)], seed: [u8; 32]) -> usize {\n\n if weights_and_indexes.is_empty() {\n\n return 0;\n\n }\n\n let mut rng = ChaChaRng::from_seed(seed);\n\n let total_weight: u64 = weights_and_indexes.iter().map(|x| x.0).sum();\n\n let mut lowest_weight = std::u128::MAX;\n\n let mut best_index = 0;\n\n for v in weights_and_indexes {\n\n // This generates an \"inverse\" weight but it avoids floating point math\n\n let x = (total_weight / v.0)\n\n .to_u64()\n\n .expect(\"values > u64::max are not supported\");\n\n // capture the u64 into u128s to prevent overflow\n\n let computed_weight = rng.gen_range(1, u128::from(std::u16::MAX)) * u128::from(x);\n\n // The highest input weight maps to the lowest computed weight\n\n if computed_weight < lowest_weight {\n\n lowest_weight = computed_weight;\n\n best_index = v.1;\n\n }\n", "file_path": "core/src/weighted_shuffle.rs", "rank": 12, "score": 344220.7887758761 }, { "content": "pub fn to_account(feature: &Feature, account: &mut Account) -> Option<()> {\n\n bincode::serialize_into(&mut account.data[..], feature).ok()\n\n}\n\n\n", "file_path": "sdk/src/feature.rs", "rank": 13, "score": 340768.55388496135 }, { "content": "pub fn vote_account_stakes(bank: &Bank) -> HashMap<Pubkey, u64> {\n\n bank.vote_accounts()\n\n .into_iter()\n\n .map(|(id, (stake, _))| (id, stake))\n\n .collect()\n\n}\n\n\n", "file_path": "ledger/src/staking_utils.rs", "rank": 14, "score": 340710.0957360729 }, { "content": "pub fn create_account(lamports: u64) -> RefCell<Account> {\n\n RefCell::new(\n\n Account::new_data_with_space(\n\n lamports,\n\n &Versions::new_current(State::Uninitialized),\n\n State::size(),\n\n &crate::system_program::id(),\n\n )\n\n .expect(\"nonce_account\"),\n\n )\n\n}\n\n\n", "file_path": "sdk/src/nonce_account.rs", "rank": 15, "score": 337496.7258716462 }, { "content": "pub fn finalize(account_pubkey: &Pubkey, program_id: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*account_pubkey, true),\n\n AccountMeta::new_readonly(rent::id(), false),\n\n ];\n\n Instruction::new(*program_id, &LoaderInstruction::Finalize, account_metas)\n\n}\n", "file_path": "sdk/program/src/loader_instruction.rs", "rank": 16, "score": 334857.51581629645 }, { "content": "pub fn append_u8(buf: &mut Vec<u8>, data: u8) {\n\n let start = buf.len();\n\n buf.resize(buf.len() + 1, 0);\n\n buf[start] = data;\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 17, "score": 328096.2179019577 }, { "content": "pub fn transfer(from_pubkey: &Pubkey, to_pubkey: &Pubkey, lamports: u64) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*from_pubkey, true),\n\n AccountMeta::new(*to_pubkey, false),\n\n ];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::Transfer { lamports },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 18, "score": 326885.6668197557 }, { "content": "pub fn create_account(feature: &Feature, lamports: u64) -> Account {\n\n let data_len = Feature::size_of().max(bincode::serialized_size(feature).unwrap() as usize);\n\n let mut account = Account::new(lamports, data_len, &id());\n\n to_account(feature, &mut account).unwrap();\n\n account\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn feature_deserialize_none() {\n\n let just_initialized = Account::new(42, Feature::size_of(), &id());\n\n assert_eq!(\n\n from_account(&just_initialized),\n\n Some(Feature { activated_at: None })\n\n );\n\n }\n\n}\n", "file_path": "sdk/src/feature.rs", "rank": 19, "score": 326387.28563984565 }, { "content": "pub fn token_amount_to_ui_amount(amount: u64, decimals: u8) -> UiTokenAmount {\n\n // Use `amount_to_ui_amount()` once spl_token is bumped to a version that supports it: https://github.com/solana-labs/solana-program-library/pull/211\n\n let amount_decimals = amount as f64 / 10_usize.pow(decimals as u32) as f64;\n\n UiTokenAmount {\n\n ui_amount: amount_decimals,\n\n decimals,\n\n amount: amount.to_string(),\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UiMint {\n\n pub mint_authority: Option<String>,\n\n pub supply: StringAmount,\n\n pub decimals: u8,\n\n pub is_initialized: bool,\n\n pub freeze_authority: Option<String>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct UiMultisig {\n\n pub num_required_signers: u8,\n\n pub num_valid_signers: u8,\n\n pub is_initialized: bool,\n\n pub signers: Vec<String>,\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 20, "score": 325520.4100338195 }, { "content": "#[allow(dead_code)]\n\npub fn sol_log_params(accounts: &[AccountInfo], data: &[u8]) {\n\n for (i, account) in accounts.iter().enumerate() {\n\n msg!(\"AccountInfo\");\n\n msg!(0, 0, 0, 0, i);\n\n msg!(\"- Is signer\");\n\n msg!(0, 0, 0, 0, account.is_signer);\n\n msg!(\"- Key\");\n\n account.key.log();\n\n msg!(\"- Lamports\");\n\n msg!(0, 0, 0, 0, account.lamports());\n\n msg!(\"- Account data length\");\n\n msg!(0, 0, 0, 0, account.data_len());\n\n msg!(\"- Owner\");\n\n account.owner.log();\n\n }\n\n msg!(\"Instruction data\");\n\n sol_log_slice(data);\n\n}\n\n\n\n/// Print the remaining compute units the program may consume\n", "file_path": "sdk/program/src/log.rs", "rank": 21, "score": 324968.78923351044 }, { "content": "/// Serialize a `Sysvar` into an `Account`'s data.\n\npub fn to_account<S: Sysvar>(sysvar: &S, account: &mut Account) -> Option<()> {\n\n bincode::serialize_into(&mut account.data[..], sysvar).ok()\n\n}\n\n\n\n/// Return the information required to construct an `AccountInfo`. Used by the\n\n/// `AccountInfo` conversion implementations.\n\nimpl solana_program::account_info::Account for Account {\n\n fn get(&mut self) -> (&mut u64, &mut [u8], &Pubkey, bool, Epoch) {\n\n (\n\n &mut self.lamports,\n\n &mut self.data,\n\n &self.owner,\n\n self.executable,\n\n self.rent_epoch,\n\n )\n\n }\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 22, "score": 324524.8246404568 }, { "content": "#[inline(never)]\n\npub fn recurse(data: &mut [u8]) {\n\n if data.len() <= 1 {\n\n return;\n\n }\n\n recurse(&mut data[1..]);\n\n msg!(line!(), 0, 0, 0, data[0]);\n\n}\n\n\n\n/// # Safety\n\n#[inline(never)]\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn entrypoint(input: *mut u8) -> u64 {\n\n msg!(\"Call depth\");\n\n let depth = *(input.add(16) as *mut u8);\n\n msg!(line!(), 0, 0, 0, depth);\n\n let mut data = Vec::with_capacity(depth as usize);\n\n for i in 0_u8..depth {\n\n data.push(i);\n\n }\n\n recurse(&mut data);\n\n SUCCESS\n\n}\n\n\n\ncustom_panic_default!();\n", "file_path": "programs/bpf/rust/call_depth/src/lib.rs", "rank": 23, "score": 323021.34663902246 }, { "content": "/// Computes a normalized(log of actual stake) stake\n\npub fn get_stake<S: std::hash::BuildHasher>(id: &Pubkey, stakes: &HashMap<Pubkey, u64, S>) -> f32 {\n\n // cap the max balance to u32 max (it should be plenty)\n\n let bal = f64::from(u32::max_value()).min(*stakes.get(id).unwrap_or(&0) as f64);\n\n 1_f32.max((bal as f32).ln())\n\n}\n\n\n", "file_path": "core/src/crds_gossip.rs", "rank": 24, "score": 322931.14108801406 }, { "content": "fn deposit_many(bank: &Bank, pubkeys: &mut Vec<Pubkey>, num: usize) {\n\n for t in 0..num {\n\n let pubkey = solana_sdk::pubkey::new_rand();\n\n let account = Account::new((t + 1) as u64, 0, &Account::default().owner);\n\n pubkeys.push(pubkey);\n\n assert!(bank.get_account(&pubkey).is_none());\n\n bank.deposit(&pubkey, (t + 1) as u64);\n\n assert_eq!(bank.get_account(&pubkey).unwrap(), account);\n\n }\n\n}\n\n\n", "file_path": "runtime/benches/accounts.rs", "rank": 25, "score": 322532.97285396315 }, { "content": "pub fn create_account(lamports: u64, config: &Config) -> Account {\n\n create_config_account(vec![], config, lamports)\n\n}\n\n\n", "file_path": "programs/stake/src/config.rs", "rank": 26, "score": 321563.29694203875 }, { "content": "pub fn append_slice(buf: &mut Vec<u8>, data: &[u8]) {\n\n let start = buf.len();\n\n buf.resize(buf.len() + data.len(), 0);\n\n let end = buf.len();\n\n buf[start..end].copy_from_slice(data);\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 27, "score": 319692.26298562606 }, { "content": "pub fn process_slots(rpc_client: &RpcClient, accounts_info: &mut AccountsInfo, batch_size: u64) {\n\n let end_slot = accounts_info.slot + batch_size;\n\n loop {\n\n let start_slot = accounts_info.slot + 1;\n\n info!(\"start_slot:{} - end_slot:{}\", start_slot, end_slot);\n\n if start_slot >= end_slot {\n\n break;\n\n }\n\n let latest_available_slot = rpc_client.get_slot().unwrap_or_else(|err| {\n\n datapoint_error!(\n\n \"stake-monitor-failure\",\n\n (\"err\", format!(\"get_slot() failed: {}\", err), String)\n\n );\n\n 0\n\n });\n\n\n\n if accounts_info.slot >= latest_available_slot {\n\n info!(\"Waiting for a slot greater than {}...\", accounts_info.slot);\n\n sleep(Duration::from_secs(5));\n\n continue;\n", "file_path": "stake-monitor/src/lib.rs", "rank": 28, "score": 319330.15244659845 }, { "content": "pub fn parse_vote(data: &[u8]) -> Result<VoteAccountType, ParseAccountError> {\n\n let mut vote_state = VoteState::deserialize(data).map_err(ParseAccountError::from)?;\n\n let epoch_credits = vote_state\n\n .epoch_credits()\n\n .iter()\n\n .map(|(epoch, credits, previous_credits)| UiEpochCredits {\n\n epoch: *epoch,\n\n credits: credits.to_string(),\n\n previous_credits: previous_credits.to_string(),\n\n })\n\n .collect();\n\n let votes = vote_state\n\n .votes\n\n .iter()\n\n .map(|lockout| UiLockout {\n\n slot: lockout.slot,\n\n confirmation_count: lockout.confirmation_count,\n\n })\n\n .collect();\n\n let authorized_voters = vote_state\n", "file_path": "account-decoder/src/parse_vote.rs", "rank": 29, "score": 319143.700890037 }, { "content": "pub fn parse_stake(data: &[u8]) -> Result<StakeAccountType, ParseAccountError> {\n\n let stake_state: StakeState = deserialize(data)\n\n .map_err(|_| ParseAccountError::AccountNotParsable(ParsableAccount::Stake))?;\n\n let parsed_account = match stake_state {\n\n StakeState::Uninitialized => StakeAccountType::Uninitialized,\n\n StakeState::Initialized(meta) => StakeAccountType::Initialized(UiStakeAccount {\n\n meta: meta.into(),\n\n stake: None,\n\n }),\n\n StakeState::Stake(meta, stake) => StakeAccountType::Delegated(UiStakeAccount {\n\n meta: meta.into(),\n\n stake: Some(stake.into()),\n\n }),\n\n StakeState::RewardsPool => StakeAccountType::RewardsPool,\n\n };\n\n Ok(parsed_account)\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\", tag = \"type\", content = \"info\")]\n", "file_path": "account-decoder/src/parse_stake.rs", "rank": 30, "score": 319143.700890037 }, { "content": "pub fn renege(contract: &Pubkey, from: &Pubkey, to: &Pubkey, lamports: u64) -> Instruction {\n\n let mut account_metas = vec![\n\n AccountMeta::new(*contract, false),\n\n AccountMeta::new(*from, true),\n\n ];\n\n if from != to {\n\n account_metas.push(AccountMeta::new(*to, false));\n\n }\n\n Instruction::new(id(), &VestInstruction::Renege(lamports), account_metas)\n\n}\n\n\n", "file_path": "programs/vest/src/vest_instruction.rs", "rank": 31, "score": 318819.48063363525 }, { "content": "pub fn account_identity_ok(account: &Account) -> Result<(), Error> {\n\n if account.owner != system_program::id() {\n\n Err(Error::InvalidAccountOwner)\n\n } else if account.data.is_empty() {\n\n Err(Error::UnexpectedDataSize)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "client/src/nonce_utils.rs", "rank": 32, "score": 318046.40717720316 }, { "content": "/// Create and sign new SystemInstruction::Transfer transaction to many destinations\n\npub fn transfer_many(from_pubkey: &Pubkey, to_lamports: &[(Pubkey, u64)]) -> Vec<Instruction> {\n\n to_lamports\n\n .iter()\n\n .map(|(to_pubkey, lamports)| transfer(from_pubkey, to_pubkey, *lamports))\n\n .collect()\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 33, "score": 317677.99188313016 }, { "content": "// A helper function to convert a solana_sdk::pubkey::Pubkey to spl_sdk::pubkey::Pubkey\n\npub fn spl_token_v2_0_pubkey(pubkey: &Pubkey) -> SplTokenPubkey {\n\n SplTokenPubkey::from_str(&pubkey.to_string()).unwrap()\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 34, "score": 317282.3633204404 }, { "content": "// A helper function to convert a spl_sdk::pubkey::Pubkey to solana_sdk::pubkey::Pubkey\n\npub fn pubkey_from_spl_token_v2_0(pubkey: &SplTokenPubkey) -> Pubkey {\n\n Pubkey::from_str(&pubkey.to_string()).unwrap()\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 35, "score": 317282.3633204404 }, { "content": "/// Create an executable account with the given shared object name.\n\npub fn create_loadable_account(name: &str, lamports: u64) -> Account {\n\n Account {\n\n lamports,\n\n owner: id(),\n\n data: name.as_bytes().to_vec(),\n\n executable: true,\n\n rent_epoch: 0,\n\n }\n\n}\n", "file_path": "sdk/src/native_loader.rs", "rank": 36, "score": 316972.9875272148 }, { "content": "pub fn update_account<'a, I>(account: &mut Account, recent_blockhash_iter: I) -> Option<()>\n\nwhere\n\n I: IntoIterator<Item = IterItem<'a>>,\n\n{\n\n let sorted = BinaryHeap::from_iter(recent_blockhash_iter);\n\n let sorted_iter = IntoIterSorted::new(sorted);\n\n let recent_blockhash_iter = sorted_iter.take(MAX_ENTRIES);\n\n let recent_blockhashes: RecentBlockhashes = recent_blockhash_iter.collect();\n\n to_account(&recent_blockhashes, account)\n\n}\n\n\n", "file_path": "sdk/src/recent_blockhashes_account.rs", "rank": 37, "score": 316453.0398021208 }, { "content": "fn get_balance_at(client: &RpcClient, pubkey: &Pubkey, i: usize) -> Result<u64, ClientError> {\n\n let address = stake_accounts::derive_stake_account_address(pubkey, i);\n\n client.get_balance(&address)\n\n}\n\n\n", "file_path": "stake-accounts/src/main.rs", "rank": 38, "score": 315886.6735682485 }, { "content": "fn sort_stakes(stakes: &mut Vec<(Pubkey, u64)>) {\n\n // Sort first by stake. If stakes are the same, sort by pubkey to ensure a\n\n // deterministic result.\n\n // Note: Use unstable sort, because we dedup right after to remove the equal elements.\n\n stakes.sort_unstable_by(|(l_pubkey, l_stake), (r_pubkey, r_stake)| {\n\n if r_stake == l_stake {\n\n r_pubkey.cmp(&l_pubkey)\n\n } else {\n\n r_stake.cmp(&l_stake)\n\n }\n\n });\n\n\n\n // Now that it's sorted, we can do an O(n) dedup.\n\n stakes.dedup();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use solana_runtime::genesis_utils::{\n", "file_path": "ledger/src/leader_schedule_utils.rs", "rank": 39, "score": 315326.2586946472 }, { "content": "/// Create an `Account` from a `Sysvar`.\n\npub fn create_account<S: Sysvar>(sysvar: &S, lamports: u64) -> Account {\n\n let data_len = S::size_of().max(bincode::serialized_size(sysvar).unwrap() as usize);\n\n let mut account = Account::new(lamports, data_len, &solana_program::sysvar::id());\n\n to_account::<S>(sysvar, &mut account).unwrap();\n\n account\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 40, "score": 311903.36611744994 }, { "content": "pub fn check_recent_balance(expected_balance: u64, client: &RpcClient, pubkey: &Pubkey) {\n\n (0..5).for_each(|tries| {\n\n let balance = client\n\n .get_balance_with_commitment(pubkey, CommitmentConfig::processed())\n\n .unwrap()\n\n .value;\n\n if balance == expected_balance {\n\n return;\n\n }\n\n if tries == 4 {\n\n assert_eq!(balance, expected_balance);\n\n }\n\n sleep(Duration::from_millis(500));\n\n });\n\n}\n\n\n", "file_path": "cli/src/test_utils.rs", "rank": 41, "score": 311497.38245902094 }, { "content": "/// Create a new payment script.\n\npub fn payment(from: &Pubkey, to: &Pubkey, contract: &Pubkey, lamports: u64) -> Vec<Instruction> {\n\n let expr = BudgetExpr::new_payment(lamports, to);\n\n create_account(from, &contract, lamports, expr)\n\n}\n\n\n", "file_path": "programs/budget/src/budget_instruction.rs", "rank": 42, "score": 310012.1202930107 }, { "content": "pub fn construct_eth_pubkey(pubkey: &secp256k1::PublicKey) -> [u8; HASHED_PUBKEY_SERIALIZED_SIZE] {\n\n let mut addr = [0u8; HASHED_PUBKEY_SERIALIZED_SIZE];\n\n addr.copy_from_slice(&sha3::Keccak256::digest(&pubkey.serialize()[1..])[12..]);\n\n assert_eq!(addr.len(), HASHED_PUBKEY_SERIALIZED_SIZE);\n\n addr\n\n}\n\n\n", "file_path": "sdk/src/secp256k1_instruction.rs", "rank": 43, "score": 309873.3495268064 }, { "content": "// A helper function to convert spl_token_v2_0::id() as spl_sdk::pubkey::Pubkey to\n\n// solana_sdk::pubkey::Pubkey\n\npub fn spl_token_id_v2_0() -> Pubkey {\n\n Pubkey::from_str(&spl_token_v2_0::id().to_string()).unwrap()\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 44, "score": 308621.14891525544 }, { "content": "pub fn read_u16(current: &mut usize, data: &[u8]) -> Result<u16, SanitizeError> {\n\n if data.len() < *current + 2 {\n\n return Err(SanitizeError::IndexOutOfBounds);\n\n }\n\n let mut fixed_data = [0u8; 2];\n\n fixed_data.copy_from_slice(&data[*current..*current + 2]);\n\n let e = u16::from_le_bytes(fixed_data);\n\n *current += 2;\n\n Ok(e)\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 45, "score": 307093.0117682939 }, { "content": "pub fn load_genesis_accounts(file: &str, genesis_config: &mut GenesisConfig) -> io::Result<u64> {\n\n let mut lamports = 0;\n\n let accounts_file = File::open(file.to_string())?;\n\n\n\n let genesis_accounts: HashMap<String, Base64Account> =\n\n serde_yaml::from_reader(accounts_file)\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", err)))?;\n\n\n\n for (key, account_details) in genesis_accounts {\n\n let pubkey = pubkey_from_str(key.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid pubkey/keypair {}: {:?}\", key, err),\n\n )\n\n })?;\n\n\n\n let owner_program_id = Pubkey::from_str(account_details.owner.as_str()).map_err(|err| {\n\n io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Invalid owner: {}: {:?}\", account_details.owner, err),\n", "file_path": "genesis/src/main.rs", "rank": 46, "score": 306599.4853012997 }, { "content": "pub fn spl_programs(rent: &Rent) -> Vec<(Pubkey, Account)> {\n\n SPL_PROGRAMS\n\n .iter()\n\n .map(|(program_id, elf)| {\n\n (\n\n *program_id,\n\n Account {\n\n lamports: rent.minimum_balance(elf.len()).min(1),\n\n data: elf.to_vec(),\n\n owner: solana_program::bpf_loader::id(),\n\n executable: true,\n\n rent_epoch: 0,\n\n },\n\n )\n\n })\n\n .collect()\n\n}\n", "file_path": "program-test/src/programs.rs", "rank": 47, "score": 305722.8361643509 }, { "content": "// A helper function to convert spl_token_v2_0::native_mint::id() as spl_sdk::pubkey::Pubkey to\n\n// solana_sdk::pubkey::Pubkey\n\npub fn spl_token_v2_0_native_mint() -> Pubkey {\n\n Pubkey::from_str(&spl_token_v2_0::native_mint::id().to_string()).unwrap()\n\n}\n\n\n", "file_path": "account-decoder/src/parse_token.rs", "rank": 48, "score": 303668.0570320939 }, { "content": "pub fn get_account(rpc_client: &RpcClient, nonce_pubkey: &Pubkey) -> Result<Account, Error> {\n\n get_account_with_commitment(rpc_client, nonce_pubkey, CommitmentConfig::default())\n\n}\n\n\n", "file_path": "client/src/nonce_utils.rs", "rank": 49, "score": 303648.37051900255 }, { "content": "pub fn create_account_with_data<'a, I>(lamports: u64, recent_blockhash_iter: I) -> Account\n\nwhere\n\n I: IntoIterator<Item = IterItem<'a>>,\n\n{\n\n let mut account = create_account::<RecentBlockhashes>(&RecentBlockhashes::default(), lamports);\n\n update_account(&mut account, recent_blockhash_iter).unwrap();\n\n account\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::account::from_account;\n\n use rand::{seq::SliceRandom, thread_rng};\n\n use solana_program::{\n\n fee_calculator::FeeCalculator,\n\n hash::{Hash, HASH_BYTES},\n\n sysvar::recent_blockhashes::Entry,\n\n };\n\n\n", "file_path": "sdk/src/recent_blockhashes_account.rs", "rank": 50, "score": 303596.14459420135 }, { "content": "pub fn spl_token_amount(amount: f64, decimals: u8) -> u64 {\n\n (amount * 10_usize.pow(decimals as u32) as f64) as u64\n\n}\n\n\n", "file_path": "tokens/src/spl_token.rs", "rank": 51, "score": 301456.1463285178 }, { "content": "pub fn advance_nonce_account(nonce_pubkey: &Pubkey, authorized_pubkey: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*nonce_pubkey, false),\n\n AccountMeta::new_readonly(recent_blockhashes::id(), false),\n\n AccountMeta::new_readonly(*authorized_pubkey, true),\n\n ];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::AdvanceNonceAccount,\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 52, "score": 300957.47259563056 }, { "content": "#[cfg(feature = \"full\")]\n\npub fn write_pubkey_file(outfile: &str, pubkey: Pubkey) -> Result<(), Box<dyn std::error::Error>> {\n\n use std::io::Write;\n\n\n\n let printable = format!(\"{}\", pubkey);\n\n let serialized = serde_json::to_string(&printable)?;\n\n\n\n if let Some(outdir) = std::path::Path::new(&outfile).parent() {\n\n std::fs::create_dir_all(outdir)?;\n\n }\n\n let mut f = std::fs::File::create(outfile)?;\n\n f.write_all(&serialized.into_bytes())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "sdk/src/pubkey.rs", "rank": 53, "score": 299894.19093894877 }, { "content": "pub fn account_request(owner: &Pubkey, new: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new(*owner, true),\n\n AccountMeta::new(*new, false),\n\n ];\n\n Instruction::new(id(), &ExchangeInstruction::AccountRequest, account_metas)\n\n}\n\n\n", "file_path": "programs/exchange/src/exchange_instruction.rs", "rank": 54, "score": 299516.5407185242 }, { "content": "/// Apply account data to a contract waiting on an AccountData witness.\n\npub fn apply_account_data(witness_pubkey: &Pubkey, contract: &Pubkey, to: &Pubkey) -> Instruction {\n\n let account_metas = vec![\n\n AccountMeta::new_readonly(*witness_pubkey, false),\n\n AccountMeta::new(*contract, false),\n\n AccountMeta::new(*to, false),\n\n ];\n\n Instruction::new(id(), &BudgetInstruction::ApplyAccountData, account_metas)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::budget_expr::BudgetExpr;\n\n\n\n #[test]\n\n fn test_budget_instruction_verify() {\n\n let alice_pubkey = solana_sdk::pubkey::new_rand();\n\n let bob_pubkey = solana_sdk::pubkey::new_rand();\n\n let budget_pubkey = solana_sdk::pubkey::new_rand();\n\n payment(&alice_pubkey, &bob_pubkey, &budget_pubkey, 1); // No panic! indicates success.\n", "file_path": "programs/budget/src/budget_instruction.rs", "rank": 55, "score": 298855.3206031608 }, { "content": "pub fn set_owner(account_pubkey: &Pubkey, old_pubkey: &Pubkey, new_pubkey: &Pubkey) -> Instruction {\n\n let keys = vec![\n\n AccountMeta::new(*account_pubkey, false),\n\n AccountMeta::new(*old_pubkey, true),\n\n ];\n\n Instruction::new(crate::id(), &new_pubkey, keys)\n\n}\n", "file_path": "programs/ownable/src/ownable_instruction.rs", "rank": 56, "score": 298349.80319717596 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn multicast(sock: &UdpSocket, packet: &mut [u8], dests: &[&SocketAddr]) -> io::Result<usize> {\n\n let count = dests.len();\n\n for a in dests {\n\n sock.send_to(packet, a)?;\n\n }\n\n\n\n Ok(count)\n\n}\n\n\n", "file_path": "streamer/src/sendmmsg.rs", "rank": 57, "score": 297506.4822508603 }, { "content": "fn identify_my_partition(partitions: &[u8], index: u64, size: u64) -> usize {\n\n let mut my_partition = 0;\n\n let mut watermark = 0;\n\n for (i, p) in partitions.iter().enumerate() {\n\n watermark += *p;\n\n if u64::from(watermark) >= index * 100 / size {\n\n my_partition = i;\n\n break;\n\n }\n\n }\n\n\n\n my_partition\n\n}\n\n\n", "file_path": "net-shaper/src/main.rs", "rank": 58, "score": 296018.1224856989 }, { "content": "#[cfg(feature = \"full\")]\n\npub fn new_rand() -> Pubkey {\n\n Pubkey::new(&rand::random::<[u8; PUBKEY_BYTES]>())\n\n}\n\n\n", "file_path": "sdk/src/pubkey.rs", "rank": 59, "score": 293458.3700916247 }, { "content": "pub fn mark_disabled(batches: &mut Vec<Packets>, r: &[Vec<u8>]) {\n\n batches.iter_mut().zip(r).for_each(|(b, v)| {\n\n b.packets\n\n .iter_mut()\n\n .zip(v)\n\n .for_each(|(p, f)| p.meta.discard = *f == 0)\n\n });\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use solana_perf::packet::Packet;\n\n\n\n #[test]\n\n fn test_mark_disabled() {\n\n let mut batch = Packets::default();\n\n batch.packets.push(Packet::default());\n\n let mut batches: Vec<Packets> = vec![batch];\n\n mark_disabled(&mut batches, &[vec![0]]);\n\n assert_eq!(batches[0].packets[0].meta.discard, true);\n\n mark_disabled(&mut batches, &[vec![1]]);\n\n assert_eq!(batches[0].packets[0].meta.discard, false);\n\n }\n\n}\n", "file_path": "core/src/sigverify.rs", "rank": 60, "score": 292863.85406460945 }, { "content": "type VoteAccountsHashMap = HashMap<Pubkey, (u64 /*stake*/, ArcVoteAccount)>;\n\n\n\nimpl From<VoteAccountsHashMap> for VoteAccounts {\n\n fn from(vote_accounts: VoteAccountsHashMap) -> Self {\n\n Self {\n\n vote_accounts,\n\n staked_nodes: RwLock::default(),\n\n staked_nodes_once: Once::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Borrow<VoteAccountsHashMap> for VoteAccounts {\n\n fn borrow(&self) -> &VoteAccountsHashMap {\n\n &self.vote_accounts\n\n }\n\n}\n\n\n\nimpl FromIterator<(Pubkey, (u64 /*stake*/, ArcVoteAccount))> for VoteAccounts {\n\n fn from_iter<I>(iter: I) -> Self\n", "file_path": "runtime/src/vote_account.rs", "rank": 61, "score": 292673.46343467047 }, { "content": "pub fn store_current_index(data: &mut [u8], instruction_index: u16) {\n\n let last_index = data.len() - 2;\n\n data[last_index..last_index + 2].copy_from_slice(&instruction_index.to_le_bytes());\n\n}\n\n\n", "file_path": "sdk/program/src/sysvar/instructions.rs", "rank": 62, "score": 292628.5215226378 }, { "content": "pub fn append_u16(buf: &mut Vec<u8>, data: u16) {\n\n let start = buf.len();\n\n buf.resize(buf.len() + 2, 0);\n\n let end = buf.len();\n\n buf[start..end].copy_from_slice(&data.to_le_bytes());\n\n}\n\n\n", "file_path": "sdk/program/src/serialize_utils.rs", "rank": 63, "score": 289897.42079539807 }, { "content": "#[cfg(feature = \"full\")]\n\npub fn read_pubkey_file(infile: &str) -> Result<Pubkey, Box<dyn std::error::Error>> {\n\n let f = std::fs::File::open(infile.to_string())?;\n\n let printable: String = serde_json::from_reader(f)?;\n\n\n\n use std::str::FromStr;\n\n Ok(Pubkey::from_str(&printable)?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::remove_file;\n\n\n\n #[test]\n\n fn test_read_write_pubkey() -> Result<(), Box<dyn std::error::Error>> {\n\n let filename = \"test_pubkey.json\";\n\n let pubkey = solana_sdk::pubkey::new_rand();\n\n write_pubkey_file(filename, pubkey)?;\n\n let read = read_pubkey_file(filename)?;\n\n assert_eq!(read, pubkey);\n\n remove_file(filename)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "sdk/src/pubkey.rs", "rank": 64, "score": 289837.3831102719 }, { "content": "// Return a pubkey for an argument that can itself be parsed into a pubkey,\n\n// or is a filename that can be read as a keypair\n\npub fn pubkey_of(matches: &ArgMatches<'_>, name: &str) -> Option<Pubkey> {\n\n value_of(matches, name).or_else(|| keypair_of(matches, name).map(|keypair| keypair.pubkey()))\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 65, "score": 288359.6107030669 }, { "content": "type DashMapVersionHash = DashMap<Pubkey, (u64, Hash)>;\n\n\n\nlazy_static! {\n\n // FROZEN_ACCOUNT_PANIC is used to signal local_cluster that an AccountsDB panic has occurred,\n\n // as |cargo test| cannot observe panics in other threads\n\n pub static ref FROZEN_ACCOUNT_PANIC: Arc<AtomicBool> = Arc::new(AtomicBool::new(false));\n\n}\n\n\n\npub enum ScanStorageResult<R, B> {\n\n Cached(Vec<R>),\n\n Stored(B),\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct ErrorCounters {\n\n pub total: usize,\n\n pub account_in_use: usize,\n\n pub account_loaded_twice: usize,\n\n pub account_not_found: usize,\n\n pub blockhash_not_found: usize,\n", "file_path": "runtime/src/accounts_db.rs", "rank": 66, "score": 287807.63597612845 }, { "content": "/// Assuming layout is\n\n/// signature: Signature\n\n/// signed_msg: {\n\n/// type: ShredType\n\n/// slot: u64,\n\n/// ...\n\n/// }\n\n/// Signature is the first thing in the packet, and slot is the first thing in the signed message.\n\npub fn verify_shred_cpu(packet: &Packet, slot_leaders: &HashMap<u64, [u8; 32]>) -> Option<u8> {\n\n let sig_start = 0;\n\n let sig_end = size_of::<Signature>();\n\n let slot_start = sig_end + size_of::<ShredType>();\n\n let slot_end = slot_start + size_of::<u64>();\n\n let msg_start = sig_end;\n\n if packet.meta.discard {\n\n return Some(0);\n\n }\n\n trace!(\"slot start and end {} {}\", slot_start, slot_end);\n\n if packet.meta.size < slot_end {\n\n return Some(0);\n\n }\n\n let slot: u64 = limited_deserialize(&packet.data[slot_start..slot_end]).ok()?;\n\n let msg_end = if packet.meta.repair {\n\n packet.meta.size.saturating_sub(SIZE_OF_NONCE)\n\n } else {\n\n packet.meta.size\n\n };\n\n trace!(\"slot {}\", slot);\n", "file_path": "ledger/src/sigverify_shreds.rs", "rank": 67, "score": 286698.8971448759 }, { "content": "#[allow(clippy::same_item_push)]\n\npub fn create_ticks(num_ticks: u64, hashes_per_tick: u64, mut hash: Hash) -> Vec<Entry> {\n\n let mut ticks = Vec::with_capacity(num_ticks as usize);\n\n for _ in 0..num_ticks {\n\n let new_tick = next_entry_mut(&mut hash, hashes_per_tick, vec![]);\n\n ticks.push(new_tick);\n\n }\n\n\n\n ticks\n\n}\n\n\n", "file_path": "ledger/src/entry.rs", "rank": 68, "score": 286566.93796903215 }, { "content": "#[allow(clippy::result_unit_err)]\n\npub fn decode_len(bytes: &[u8]) -> Result<(usize, usize), ()> {\n\n let mut len = 0;\n\n let mut size = 0;\n\n for byte in bytes.iter() {\n\n match visit_byte(*byte, len, size) {\n\n VisitResult::More(l, s) => {\n\n len = l;\n\n size = s;\n\n }\n\n VisitResult::Done(len, size) => return Ok((len, size)),\n\n VisitResult::Err => return Err(()),\n\n }\n\n }\n\n Err(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use assert_matches::assert_matches;\n", "file_path": "sdk/program/src/short_vec.rs", "rank": 69, "score": 286399.67168172356 }, { "content": "pub fn timestamp() -> u64 {\n\n let now = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"create timestamp in timing\");\n\n duration_as_ms(&now)\n\n}\n\n\n\npub const SECONDS_PER_YEAR: f64 = 365.242_199 * 24.0 * 60.0 * 60.0;\n\n\n", "file_path": "sdk/src/timing.rs", "rank": 70, "score": 283366.612230759 }, { "content": "pub fn from_account(account: &Account) -> Option<Feature> {\n\n if account.owner != id() {\n\n None\n\n } else {\n\n bincode::deserialize(&account.data).ok()\n\n }\n\n}\n\n\n", "file_path": "sdk/src/feature.rs", "rank": 71, "score": 282159.30863514927 }, { "content": "fn resize_vec(keyvec: &mut PinnedVec<u8>) -> usize {\n\n //HACK: Pubkeys vector is passed along as a `Packets` buffer to the GPU\n\n //TODO: GPU needs a more opaque interface, which can handle variable sized structures for data\n\n //Pad the Pubkeys buffer such that it is bigger than a buffer of Packet sized elems\n\n let num_in_packets = (keyvec.len() + (size_of::<Packet>() - 1)) / size_of::<Packet>();\n\n keyvec.resize(num_in_packets * size_of::<Packet>(), 0u8);\n\n num_in_packets\n\n}\n\n\n", "file_path": "ledger/src/sigverify_shreds.rs", "rank": 72, "score": 282148.9440189023 }, { "content": "pub fn pubkeys_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<Pubkey>> {\n\n matches.values_of(name).map(|values| {\n\n values\n\n .map(|value| {\n\n value.parse::<Pubkey>().unwrap_or_else(|_| {\n\n read_keypair_file(value)\n\n .expect(\"read_keypair_file failed\")\n\n .pubkey()\n\n })\n\n })\n\n .collect()\n\n })\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 73, "score": 280075.55934477743 }, { "content": "#[cfg(test)]\n\nfn check_accounts(accounts: &Accounts, pubkeys: &[Pubkey], num: usize) {\n\n for _ in 1..num {\n\n let idx = thread_rng().gen_range(0, num - 1);\n\n let ancestors = vec![(0, 0)].into_iter().collect();\n\n let account = accounts.load_slow(&ancestors, &pubkeys[idx]);\n\n let account1 = Some((\n\n Account::new((idx + 1) as u64, 0, &Account::default().owner),\n\n 0,\n\n ));\n\n assert_eq!(account, account1);\n\n }\n\n}\n\n\n", "file_path": "runtime/src/serde_snapshot/tests.rs", "rank": 74, "score": 280070.8986074164 }, { "content": "#[allow(clippy::same_item_push)]\n\npub fn create_random_ticks(num_ticks: u64, max_hashes_per_tick: u64, mut hash: Hash) -> Vec<Entry> {\n\n let mut ticks = Vec::with_capacity(num_ticks as usize);\n\n for _ in 0..num_ticks {\n\n let hashes_per_tick = thread_rng().gen_range(1, max_hashes_per_tick);\n\n let new_tick = next_entry_mut(&mut hash, hashes_per_tick, vec![]);\n\n ticks.push(new_tick);\n\n }\n\n\n\n ticks\n\n}\n\n\n", "file_path": "ledger/src/entry.rs", "rank": 75, "score": 279643.73636326834 }, { "content": "pub fn max_ticks_per_n_shreds(num_shreds: u64, shred_data_size: Option<usize>) -> u64 {\n\n let ticks = create_ticks(1, 0, Hash::default());\n\n max_entries_per_n_shred(&ticks[0], num_shreds, shred_data_size)\n\n}\n\n\n", "file_path": "ledger/src/shred.rs", "rank": 76, "score": 278284.7522795877 }, { "content": "pub fn next_entry_mut(start: &mut Hash, num_hashes: u64, transactions: Vec<Transaction>) -> Entry {\n\n let entry = Entry::new(&start, num_hashes, transactions);\n\n *start = entry.hash;\n\n entry\n\n}\n\n\n", "file_path": "ledger/src/entry.rs", "rank": 77, "score": 277159.38235380984 }, { "content": "/// Return a list of contract messages and a list of vesting-date/lamports pairs.\n\npub fn create_vesting_schedule(start_date: Date<Utc>, mut lamports: u64) -> Vec<(Date<Utc>, u64)> {\n\n let mut schedule = vec![];\n\n\n\n // 1/3 vest after one year from start date.\n\n let (mut stipend, remainder) = div(lamports, 3);\n\n stipend += remainder;\n\n\n\n let dt = get_month(start_date, 12);\n\n schedule.push((dt, stipend));\n\n\n\n lamports -= stipend;\n\n\n\n // Remaining 66% vest monthly after one year.\n\n let payments = 24u32;\n\n let (stipend, remainder) = div(lamports, u64::from(payments));\n\n for n in 0..payments {\n\n let mut stipend = stipend;\n\n if u64::from(n) < remainder {\n\n stipend += 1;\n\n }\n", "file_path": "programs/vest/src/vest_schedule.rs", "rank": 78, "score": 274885.1224492913 }, { "content": "pub fn copy_return_values(sig_lens: &[Vec<u32>], out: &PinnedVec<u8>, rvs: &mut Vec<Vec<u8>>) {\n\n let mut num = 0;\n\n for (vs, sig_vs) in rvs.iter_mut().zip(sig_lens.iter()) {\n\n for (v, sig_v) in vs.iter_mut().zip(sig_vs.iter()) {\n\n if *sig_v == 0 {\n\n *v = 0;\n\n } else {\n\n let mut vout = 1;\n\n for _ in 0..*sig_v {\n\n if 0 == out[num] {\n\n vout = 0;\n\n }\n\n num += 1;\n\n }\n\n *v = vout;\n\n }\n\n if *v != 0 {\n\n trace!(\"VERIFIED PACKET!!!!!\");\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "perf/src/sigverify.rs", "rank": 79, "score": 273964.5782283932 }, { "content": "pub fn assign(pubkey: &Pubkey, owner: &Pubkey) -> Instruction {\n\n let account_metas = vec![AccountMeta::new(*pubkey, true)];\n\n Instruction::new(\n\n system_program::id(),\n\n &SystemInstruction::Assign { owner: *owner },\n\n account_metas,\n\n )\n\n}\n\n\n", "file_path": "sdk/program/src/system_instruction.rs", "rank": 80, "score": 273825.7590077565 }, { "content": "pub fn is_upgrade_instruction(instruction_data: &[u8]) -> bool {\n\n 3 == instruction_data[0]\n\n}\n\n\n", "file_path": "sdk/program/src/bpf_loader_upgradeable.rs", "rank": 81, "score": 273176.7676553803 }, { "content": "fn position(keys: &[Pubkey], key: &Pubkey) -> u8 {\n\n keys.iter().position(|k| k == key).unwrap() as u8\n\n}\n\n\n", "file_path": "sdk/program/src/message.rs", "rank": 82, "score": 272735.314096972 }, { "content": "pub fn fee_calculator_of(account: &Account) -> Option<FeeCalculator> {\n\n let state = StateMut::<Versions>::state(account)\n\n .ok()?\n\n .convert_to_current();\n\n match state {\n\n State::Initialized(data) => Some(data.fee_calculator),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::pubkey::Pubkey;\n\n\n\n #[test]\n\n fn test_verify_bad_account_owner_fails() {\n\n let program_id = Pubkey::new_unique();\n\n assert_ne!(program_id, crate::system_program::id());\n\n let account = Account::new_data_with_space(\n\n 42,\n\n &Versions::new_current(State::Uninitialized),\n\n State::size(),\n\n &program_id,\n\n )\n\n .expect(\"nonce_account\");\n\n assert!(!verify_nonce_account(&account, &Hash::default()));\n\n }\n\n}\n", "file_path": "sdk/src/nonce_account.rs", "rank": 83, "score": 271440.46760732244 }, { "content": "/// Initialize the vote_state for a vote account\n\n/// Assumes that the account is being init as part of a account creation or balance transfer and\n\n/// that the transaction must be signed by the staker's keys\n\npub fn initialize_account<S: std::hash::BuildHasher>(\n\n vote_account: &KeyedAccount,\n\n vote_init: &VoteInit,\n\n signers: &HashSet<Pubkey, S>,\n\n clock: &Clock,\n\n) -> Result<(), InstructionError> {\n\n let versioned = State::<VoteStateVersions>::state(vote_account)?;\n\n\n\n if !versioned.is_uninitialized() {\n\n return Err(InstructionError::AccountAlreadyInitialized);\n\n }\n\n\n\n // node must agree to accept this vote account\n\n verify_authorized_signer(&vote_init.node_pubkey, signers)?;\n\n\n\n vote_account.set_state(&VoteStateVersions::new_current(VoteState::new(\n\n vote_init, clock,\n\n )))\n\n}\n\n\n", "file_path": "programs/vote/src/vote_state/mod.rs", "rank": 84, "score": 271284.89467477315 }, { "content": "pub fn parse_nonce(data: &[u8]) -> Result<UiNonceState, ParseAccountError> {\n\n let nonce_state: Versions = bincode::deserialize(data)\n\n .map_err(|_| ParseAccountError::from(InstructionError::InvalidAccountData))?;\n\n let nonce_state = nonce_state.convert_to_current();\n\n match nonce_state {\n\n State::Uninitialized => Ok(UiNonceState::Uninitialized),\n\n State::Initialized(data) => Ok(UiNonceState::Initialized(UiNonceData {\n\n authority: data.authority.to_string(),\n\n blockhash: data.blockhash.to_string(),\n\n fee_calculator: data.fee_calculator.into(),\n\n })),\n\n }\n\n}\n\n\n\n/// A duplicate representation of NonceState for pretty JSON serialization\n\n#[derive(Debug, Serialize, Deserialize, PartialEq)]\n\n#[serde(rename_all = \"camelCase\", tag = \"type\", content = \"info\")]\n\npub enum UiNonceState {\n\n Uninitialized,\n\n Initialized(UiNonceData),\n", "file_path": "account-decoder/src/parse_nonce.rs", "rank": 85, "score": 269300.114319757 }, { "content": "pub fn create_genesis_config_with_vote_accounts_and_cluster_type(\n\n mint_lamports: u64,\n\n voting_keypairs: &[impl Borrow<ValidatorVoteKeypairs>],\n\n stakes: Vec<u64>,\n\n cluster_type: ClusterType,\n\n) -> GenesisConfigInfo {\n\n assert!(!voting_keypairs.is_empty());\n\n assert_eq!(voting_keypairs.len(), stakes.len());\n\n\n\n let mint_keypair = Keypair::new();\n\n let voting_keypair =\n\n Keypair::from_bytes(&voting_keypairs[0].borrow().vote_keypair.to_bytes()).unwrap();\n\n\n\n let genesis_config = create_genesis_config_with_leader_ex(\n\n mint_lamports,\n\n &mint_keypair.pubkey(),\n\n &voting_keypairs[0].borrow().node_keypair.pubkey(),\n\n &voting_keypairs[0].borrow().vote_keypair.pubkey(),\n\n &voting_keypairs[0].borrow().stake_keypair.pubkey(),\n\n stakes[0],\n", "file_path": "runtime/src/genesis_utils.rs", "rank": 86, "score": 269135.1514476971 }, { "content": "fn slice_hash(slice: &[u8], hash_index: u64) -> u64 {\n\n let mut hasher = FnvHasher::with_key(hash_index);\n\n hasher.write(slice);\n\n hasher.finish()\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> BloomHashIndex for T {\n\n fn hash_at_index(&self, hash_index: u64) -> u64 {\n\n slice_hash(self.as_ref(), hash_index)\n\n }\n\n}\n\n\n\npub struct AtomicBloom<T> {\n\n num_bits: u64,\n\n keys: Vec<u64>,\n\n bits: Vec<AtomicU64>,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n\nimpl<T: BloomHashIndex> From<Bloom<T>> for AtomicBloom<T> {\n", "file_path": "runtime/src/bloom.rs", "rank": 87, "score": 268965.74561407743 }, { "content": "// Return pubkey/signature pairs for a string of the form pubkey=signature\n\npub fn pubkeys_sigs_of(matches: &ArgMatches<'_>, name: &str) -> Option<Vec<(Pubkey, Signature)>> {\n\n matches.values_of(name).map(|values| {\n\n values\n\n .map(|pubkey_signer_string| {\n\n let mut signer = pubkey_signer_string.split('=');\n\n let key = Pubkey::from_str(signer.next().unwrap()).unwrap();\n\n let sig = Signature::from_str(signer.next().unwrap()).unwrap();\n\n (key, sig)\n\n })\n\n .collect()\n\n })\n\n}\n\n\n\n// Return a signer from matches at `name`\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 88, "score": 268567.98968304927 }, { "content": "/// Invoke a cross-program instruction\n\n///\n\n/// Note that the program id of the instruction being issued must also be included in\n\n/// `account_infos`.\n\npub fn invoke(instruction: &Instruction, account_infos: &[AccountInfo]) -> ProgramResult {\n\n invoke_signed(instruction, account_infos, &[])\n\n}\n\n\n", "file_path": "sdk/program/src/program.rs", "rank": 89, "score": 268505.54090425756 }, { "content": "/// Create a `Sysvar` from an `Account`'s data.\n\npub fn from_account<S: Sysvar>(account: &Account) -> Option<S> {\n\n bincode::deserialize(&account.data).ok()\n\n}\n\n\n", "file_path": "sdk/src/account.rs", "rank": 90, "score": 268361.0763575471 }, { "content": "pub fn lamports_of_sol(matches: &ArgMatches<'_>, name: &str) -> Option<u64> {\n\n value_of(matches, name).map(sol_to_lamports)\n\n}\n\n\n", "file_path": "clap-utils/src/input_parsers.rs", "rank": 91, "score": 267960.04864489776 }, { "content": "/// Convenience function for working with keyed accounts in tests\n\npub fn with_test_keyed_account<F>(lamports: u64, signer: bool, f: F)\n\nwhere\n\n F: Fn(&KeyedAccount),\n\n{\n\n let pubkey = Pubkey::new_unique();\n\n let account = create_account(lamports);\n\n let keyed_account = KeyedAccount::new(&pubkey, signer, &account);\n\n f(&keyed_account)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{\n\n account_utils::State as AccountUtilsState,\n\n keyed_account::KeyedAccount,\n\n nonce::{self, State},\n\n nonce_account::verify_nonce_account,\n\n system_instruction::NonceError,\n\n sysvar::recent_blockhashes::create_test_recent_blockhashes,\n", "file_path": "sdk/src/nonce_keyed_account.rs", "rank": 92, "score": 267297.5235245988 }, { "content": "pub fn compute_hash_time_ns(hashes_sample_size: u64) -> u64 {\n\n info!(\"Running {} hashes...\", hashes_sample_size);\n\n let mut v = Hash::default();\n\n let start = Instant::now();\n\n for _ in 0..hashes_sample_size {\n\n v = hash(&v.as_ref());\n\n }\n\n start.elapsed().as_nanos() as u64\n\n}\n\n\n", "file_path": "ledger/src/poh.rs", "rank": 93, "score": 267169.1309094374 }, { "content": "pub fn state_from_account(account: &Account) -> Result<State, Error> {\n\n account_identity_ok(account)?;\n\n StateMut::<Versions>::state(account)\n\n .map_err(|_| Error::InvalidAccountData)\n\n .map(|v| v.convert_to_current())\n\n}\n\n\n", "file_path": "client/src/nonce_utils.rs", "rank": 94, "score": 265532.57868448575 }, { "content": "pub fn data_from_account(account: &Account) -> Result<Data, Error> {\n\n account_identity_ok(account)?;\n\n state_from_account(account).and_then(|ref s| data_from_state(s).map(|d| d.clone()))\n\n}\n\n\n", "file_path": "client/src/nonce_utils.rs", "rank": 95, "score": 265532.57868448575 }, { "content": "// fun fact: rustc is very close to make this const fn.\n\npub fn bootstrap_validator_stake_lamports() -> u64 {\n\n StakeState::get_rent_exempt_reserve(&Rent::default())\n\n}\n\n\n\npub struct ValidatorVoteKeypairs {\n\n pub node_keypair: Keypair,\n\n pub vote_keypair: Keypair,\n\n pub stake_keypair: Keypair,\n\n}\n\n\n\nimpl ValidatorVoteKeypairs {\n\n pub fn new(node_keypair: Keypair, vote_keypair: Keypair, stake_keypair: Keypair) -> Self {\n\n Self {\n\n node_keypair,\n\n vote_keypair,\n\n stake_keypair,\n\n }\n\n }\n\n\n\n pub fn new_rand() -> Self {\n", "file_path": "runtime/src/genesis_utils.rs", "rank": 96, "score": 265255.20923192275 }, { "content": "pub fn get_system_account_kind(account: &Account) -> Option<SystemAccountKind> {\n\n if system_program::check_id(&account.owner) {\n\n if account.data.is_empty() {\n\n Some(SystemAccountKind::System)\n\n } else if account.data.len() == nonce::State::size() {\n\n match account.state().ok()? {\n\n nonce::state::Versions::Current(state) => match *state {\n\n nonce::State::Initialized(_) => Some(SystemAccountKind::Nonce),\n\n _ => None,\n\n },\n\n }\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "runtime/src/system_instruction_processor.rs", "rank": 97, "score": 264521.4189169924 }, { "content": "pub fn terminate(contract: &Pubkey, from: &Pubkey, to: &Pubkey) -> Instruction {\n\n let mut account_metas = vec![\n\n AccountMeta::new(*contract, false),\n\n AccountMeta::new(*from, true),\n\n ];\n\n if from != to {\n\n account_metas.push(AccountMeta::new(*to, false));\n\n }\n\n Instruction::new(id(), &VestInstruction::Terminate, account_metas)\n\n}\n\n\n", "file_path": "programs/vest/src/vest_instruction.rs", "rank": 98, "score": 264038.1755454575 } ]
Rust
src/config.rs
AmaranthineCodices/whimsy
e46684a0e66277a18694324268e8635cdc22f054
use std::default::Default; use std::path::{Path, PathBuf}; use crate::keybind; lazy_static::lazy_static! { pub static ref DEFAULT_CONFIG_PATH: PathBuf = { let mut cfg_dir = dirs::config_dir().expect("Could not find user configuration directory."); cfg_dir.push("whimsy"); cfg_dir.push("whimsy.yaml"); cfg_dir }; } #[derive(Debug, thiserror::Error)] pub enum ConfigReadError { #[error("could not read config file: {0}")] IoError(std::io::Error), #[error("could not deserialize config file contents: {0}")] DeserializeError(serde_yaml::Error), } #[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)] #[serde(default)] pub struct ConfigDirectives { #[serde(rename = "live-reload")] pub live_reload_configuration: bool, } impl Default for ConfigDirectives { fn default() -> Self { ConfigDirectives { live_reload_configuration: false, } } } #[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Direction { Up, Left, Right, Down, } #[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Metric { Percent(f32), Absolute(f32), } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Action { Push { direction: Direction, fraction: f32, }, Nudge { direction: Direction, distance: Metric, }, } #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct Binding { pub key: keybind::Key, pub modifiers: Vec<keybind::Modifier>, pub action: Action, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[serde(default)] pub struct Config { pub directives: ConfigDirectives, pub bindings: Vec<Binding>, } impl Default for Config { fn default() -> Self { Config { directives: ConfigDirectives::default(), bindings: vec![ Binding { key: keybind::Key::Left, modifiers: vec![keybind::Modifier::Super, keybind::Modifier::Shift], action: Action::Push { direction: Direction::Left, fraction: 2.0, }, }, Binding { key: keybind::Key::Left, modifiers: vec![ keybind::Modifier::Super, keybind::Modifier::Shift, keybind::Modifier::Alt, ], action: Action::Nudge { direction: Direction::Left, distance: Metric::Absolute(100.0), }, }, ], } } } pub fn read_config_from_file(path: &dyn AsRef<Path>) -> Result<Option<Config>, ConfigReadError> { if !path.as_ref().exists() { return Ok(None); } let config_string = std::fs::read_to_string(path).map_err(|e| ConfigReadError::IoError(e))?; serde_yaml::from_str(&config_string).map_err(|e| ConfigReadError::DeserializeError(e)) } pub fn create_default_config() -> std::io::Result<()> { let default_config = Config::default(); let default_path: &PathBuf = &DEFAULT_CONFIG_PATH; let config_string = serde_yaml::to_string(&default_config).unwrap(); std::fs::create_dir_all(&default_path.parent().unwrap())?; std::fs::write(&default_path, &config_string)?; Ok(()) }
use std::default::Default; use std::path::{Path, PathBuf}; use crate::keybind; lazy_static::lazy_static! { pub static ref DEFAULT_CONFIG_PATH: PathBuf = { let mut cfg_dir = dirs::config_dir().expect("Could not find user configuration directory."); cfg_dir.push("whimsy"); cfg_dir.push("whimsy.yaml"); cfg_dir }; } #[derive(Debug, thiserror::Error)] pub enum ConfigReadError { #[error("could not read config file: {0}")] IoError(std::io::Error), #[error("could not deserialize config file contents: {0}")] DeserializeError(serde_yaml::Error), } #[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)] #[serde(default)] pub struct ConfigDirectives { #[serde(rename = "live-reload")] pub live_reload_configuration: bool, } impl Default for ConfigDirectives { fn default() -> Self { ConfigDirectives { live_reload_configuration: false, } } } #[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Direction { Up, Left, Right, Down, } #[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Metric { Percent(f32), Absolute(f32), } #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum Action { Push { direction: Direction, fraction: f32, }, Nudge { direction: Direction, distance: Metric, }, } #[derive(Debug, serde::Serialize, serde::Deserialize)] pub struct Binding { pub key: keybind::Key, pub modifiers: Vec<keybind::Modifier>, pub action: Action, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[serde(default)] pub struct Config { pub directives: ConfigDirectives, pub bindings: Vec<Binding>, } impl Default for Config {
} pub fn read_config_from_file(path: &dyn AsRef<Path>) -> Result<Option<Config>, ConfigReadError> { if !path.as_ref().exists() { return Ok(None); } let config_string = std::fs::read_to_string(path).map_err(|e| ConfigReadError::IoError(e))?; serde_yaml::from_str(&config_string).map_err(|e| ConfigReadError::DeserializeError(e)) } pub fn create_default_config() -> std::io::Result<()> { let default_config = Config::default(); let default_path: &PathBuf = &DEFAULT_CONFIG_PATH; let config_string = serde_yaml::to_string(&default_config).unwrap(); std::fs::create_dir_all(&default_path.parent().unwrap())?; std::fs::write(&default_path, &config_string)?; Ok(()) }
fn default() -> Self { Config { directives: ConfigDirectives::default(), bindings: vec![ Binding { key: keybind::Key::Left, modifiers: vec![keybind::Modifier::Super, keybind::Modifier::Shift], action: Action::Push { direction: Direction::Left, fraction: 2.0, }, }, Binding { key: keybind::Key::Left, modifiers: vec![ keybind::Modifier::Super, keybind::Modifier::Shift, keybind::Modifier::Alt, ], action: Action::Nudge { direction: Direction::Left, distance: Metric::Absolute(100.0), }, }, ], } }
function_block-full_function
[ { "content": "fn modifier_to_flag_code(modifier: &Modifier) -> isize {\n\n match modifier {\n\n Modifier::Control => winuser::MOD_CONTROL,\n\n Modifier::Alt => winuser::MOD_ALT,\n\n Modifier::Shift => winuser::MOD_SHIFT,\n\n Modifier::Super => winuser::MOD_WIN,\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)]\n\n#[repr(i32)]\n\n#[serde(rename_all = \"kebab-case\")]\n\npub enum Key {\n\n Backspace = 0x08,\n\n Tab = 0x09,\n\n Clear = 0x0C,\n\n #[serde(alias = \"return\")]\n\n Enter = 0x0D,\n\n Pause = 0x13,\n\n CapsLock = 0x14,\n", "file_path": "src/keybind.rs", "rank": 2, "score": 47456.998334640346 }, { "content": "pub fn get_focused_window() -> Option<Window> {\n\n unsafe {\n\n let handle = winuser::GetForegroundWindow();\n\n\n\n if !handle.is_null() {\n\n Some(Window::from_window_handle(handle))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Monitor {\n\n handle: MonitorHandle,\n\n}\n\n\n\nimpl Monitor {\n\n pub fn get_work_area(&self) -> Win32Result<Rect> {\n\n unsafe {\n\n let mut monitor_info: winuser::MONITORINFO = std::mem::zeroed();\n\n // u32 cast is safe, usize will be 32 bits or larger on all platforms we care about.\n\n monitor_info.cbSize = std::mem::size_of::<winuser::MONITORINFO>() as u32;\n\n\n\n evaluate_fallible_winapi!(winuser::GetMonitorInfoW(self.handle, &mut monitor_info));\n\n Ok(Rect::from_win32_rect(monitor_info.rcWork))\n\n }\n\n }\n\n}\n", "file_path": "src/window.rs", "rank": 3, "score": 46818.53120205638 }, { "content": "fn main() -> Result<()> {\n\n color_eyre::install()?;\n\n dotenv::dotenv()?;\n\n pretty_env_logger::init();\n\n\n\n let cli_options = cli::CliOptions::from_args();\n\n match cli_options.command {\n\n Some(cli::CliCommand::OpenConfigFile) => {\n\n let config_path = &config::DEFAULT_CONFIG_PATH;\n\n\n\n if !config_path.exists() {\n\n log::info!(\"Whimsy configuration file {} does not exist, writing a default configuration file to this location.\", config_path.display());\n\n if let Err(error) = config::create_default_config() {\n\n log::error!(\n\n \"Unable to write default configuration file to {}.\\nError: {}\",\n\n config_path.display(),\n\n error\n\n );\n\n std::process::exit(1);\n\n }\n", "file_path": "src/main.rs", "rank": 4, "score": 27794.92368596595 }, { "content": " config::Metric::Percent(fraction) => match direction {\n\n config::Direction::Up | config::Direction::Down => {\n\n height as f32 * fraction\n\n }\n\n config::Direction::Left | config::Direction::Right => {\n\n width as f32 * fraction\n\n }\n\n },\n\n } as i32;\n\n\n\n let nudged_rect = starting_rect.nudge(direction, absolute_distance);\n\n\n\n active_window.set_rect(nudged_rect).unwrap();\n\n log::debug!(\n\n \"Nudged active window {:?}px in direction {:?} - new rect {:?}\",\n\n absolute_distance,\n\n direction,\n\n nudged_rect\n\n );\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 11, "score": 17.452034078648552 }, { "content": "\n\n return Ok(());\n\n }\n\n Some(cli::CliCommand::RegenerateConfigFile) => {\n\n config::create_default_config()?;\n\n\n\n return Ok(());\n\n }\n\n None => {}\n\n }\n\n\n\n let config_path = cli_options.config_file;\n\n\n\n let config = config::read_config_from_file(&config_path)?.unwrap_or_default();\n\n let mut kb = keybind::Keybinds::new();\n\n let mut kb_bindings = HashMap::new();\n\n\n\n for binding in &config.bindings {\n\n let binding_id = kb\n\n .register_keybind(binding.key, &binding.modifiers)\n", "file_path": "src/main.rs", "rank": 12, "score": 15.325593561171452 }, { "content": " let monitor_work_area = monitor.get_work_area().unwrap();\n\n let pushed_rect = monitor_work_area.slice_rect(direction, fraction); \n\n log::debug!(\n\n \"Pushed active window (direction {:?}, fraction {:?}) to rect {:?}\",\n\n direction,\n\n fraction,\n\n pushed_rect\n\n );\n\n active_window.set_rect(pushed_rect).unwrap();\n\n }\n\n }\n\n config::Action::Nudge {\n\n direction,\n\n distance,\n\n } => {\n\n if let Some(mut active_window) = window::get_focused_window() {\n\n let starting_rect = active_window.get_rect().unwrap();\n\n let (width, height) = starting_rect.wh();\n\n let absolute_distance = match distance {\n\n config::Metric::Absolute(value) => value,\n", "file_path": "src/main.rs", "rank": 13, "score": 14.407456450167366 }, { "content": "}\n\n\n\npub enum KeybindMessage {\n\n Quit,\n\n BindActivated(i32),\n\n}\n\n\n\nimpl Keybinds {\n\n pub fn new() -> Keybinds {\n\n Keybinds {\n\n bind_id_increment: 0,\n\n active_binds: HashSet::new(),\n\n }\n\n }\n\n\n\n // FIXME: Use an actual enum instead of an integer code for better error checking.\n\n // Need to figure out how to autogenerate this.\n\n pub fn register_keybind(&mut self, key_code: Key, modifiers: &[Modifier]) -> Result<i32, ()> {\n\n let id = self.bind_id_increment;\n\n let modifier_flags = modifiers.iter().fold(0, |accumulator, modifier| {\n", "file_path": "src/keybind.rs", "rank": 14, "score": 12.363735271720397 }, { "content": "use std::path::PathBuf;\n\n\n\nuse structopt::StructOpt;\n\n\n\n#[derive(StructOpt, Debug)]\n\npub enum CliCommand {\n\n #[structopt(\n\n name = \"open-cfg\",\n\n about = \"Opens the whimsy configuration file in the default text editor for YAML files.\"\n\n )]\n\n OpenConfigFile,\n\n #[structopt(\n\n name = \"regenerate-cfg\",\n\n about = \"Restores the whimsy configuration file to the default.\"\n\n )]\n\n RegenerateConfigFile,\n\n}\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(about, author)]\n", "file_path": "src/cli.rs", "rank": 15, "score": 10.895850156741849 }, { "content": "pub struct CliOptions {\n\n #[structopt(subcommand)]\n\n pub command: Option<CliCommand>,\n\n #[structopt(\n\n short,\n\n long,\n\n env = \"WHIMSY_CFG\",\n\n default_value(crate::config::DEFAULT_CONFIG_PATH.to_str().unwrap())\n\n )]\n\n /// The path to the whimsy configuration file to use.\n\n pub config_file: PathBuf,\n\n}\n", "file_path": "src/cli.rs", "rank": 16, "score": 10.60540518919027 }, { "content": "\n\n Rect::xyxy(left, top, right, bottom)\n\n }\n\n\n\n pub fn wh(&self) -> (i32, i32) {\n\n (\n\n (self.right - self.left).abs(),\n\n (self.bottom - self.top).abs(),\n\n )\n\n }\n\n\n\n pub fn slice_rect(&self, direction: Direction, slice_factor: f32) -> Rect {\n\n let (width, height) = self.wh();\n\n let width_slice = ((width as f32) / slice_factor) as i32;\n\n let height_slice = ((height as f32) / slice_factor) as i32;\n\n\n\n match direction {\n\n Direction::Up => Rect::xywh(self.left, self.top, width, height_slice),\n\n Direction::Left => Rect::xywh(self.left, self.top, width_slice, height),\n\n Direction::Right => Rect::xywh(\n", "file_path": "src/window.rs", "rank": 17, "score": 10.442393040365829 }, { "content": " .unwrap();\n\n kb_bindings.insert(binding_id, binding);\n\n }\n\n\n\n loop {\n\n match kb.poll_message_loop().unwrap() {\n\n keybind::KeybindMessage::Quit => {\n\n log::debug!(\"Stopping keybind message polling due to a quit message\");\n\n break;\n\n }\n\n keybind::KeybindMessage::BindActivated(id) => {\n\n let &binding = kb_bindings.get(&id).unwrap();\n\n\n\n match binding.action {\n\n config::Action::Push {\n\n direction,\n\n fraction,\n\n } => {\n\n if let Some(mut active_window) = window::get_focused_window() {\n\n let monitor = active_window.get_monitor();\n", "file_path": "src/main.rs", "rank": 18, "score": 10.212198581032943 }, { "content": "use std::collections::HashSet;\n\n\n\nuse winapi::um::winuser;\n\n\n\n#[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)]\n\n#[serde(rename_all = \"kebab-case\")]\n\npub enum Modifier {\n\n #[serde(alias = \"ctrl\")]\n\n Control,\n\n Alt,\n\n Shift,\n\n #[serde(alias = \"win\")]\n\n Super,\n\n}\n\n\n", "file_path": "src/keybind.rs", "rank": 19, "score": 9.90260168177779 }, { "content": " Direction::Right => Rect::xywh(self.left + amount, self.top, width, height),\n\n }\n\n }\n\n\n\n fn from_win32_rect(rect: Win32Rect) -> Rect {\n\n Rect::xyxy(rect.left, rect.top, rect.right, rect.bottom)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Window {\n\n handle: WindowHandle,\n\n}\n\n\n\nimpl Window {\n\n fn from_window_handle(handle: WindowHandle) -> Window {\n\n Window { handle }\n\n }\n\n\n\n pub fn get_rect(&self) -> Win32Result<Rect> {\n", "file_path": "src/window.rs", "rank": 20, "score": 8.08408720442824 }, { "content": " self.left + width - width_slice,\n\n self.top,\n\n width_slice,\n\n height,\n\n ),\n\n Direction::Down => Rect::xywh(\n\n self.left,\n\n self.top + height - height_slice,\n\n width,\n\n height_slice,\n\n ),\n\n }\n\n }\n\n\n\n pub fn nudge(&self, direction: Direction, amount: i32) -> Rect {\n\n let (width, height) = self.wh();\n\n match direction {\n\n Direction::Up => Rect::xywh(self.left, self.top - amount, width, height),\n\n Direction::Down => Rect::xywh(self.left, self.top + amount, width, height),\n\n Direction::Left => Rect::xywh(self.left - amount, self.top, width, height),\n", "file_path": "src/window.rs", "rank": 21, "score": 7.493750171320309 }, { "content": "Copyright 2020 Lily Brown <[email protected]>\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n", "file_path": "license.md", "rank": 22, "score": 7.252073261722033 }, { "content": " accumulator | modifier_to_flag_code(modifier)\n\n }) | winuser::MOD_NOREPEAT;\n\n\n\n log::debug!(\n\n \"Registering keybind with internal ID {:?}, modifiers {:?}, and virtual keycode {:?}.\",\n\n id,\n\n modifiers,\n\n key_code\n\n );\n\n\n\n unsafe {\n\n evaluate_fallible_winapi!(winuser::RegisterHotKey(\n\n std::ptr::null_mut(),\n\n id,\n\n // cast is safe, the maximum value of modifier_flags is 0x400F\n\n modifier_flags as u32,\n\n key_code as u32\n\n ));\n\n }\n\n\n", "file_path": "src/keybind.rs", "rank": 23, "score": 6.304546155326624 }, { "content": " (right, left)\n\n };\n\n\n\n let (y0, y1) = if top < bottom {\n\n (top, bottom)\n\n } else {\n\n (bottom, top)\n\n };\n\n\n\n Rect {\n\n left: x0,\n\n top: y0,\n\n right: x1,\n\n bottom: y1,\n\n }\n\n }\n\n\n\n pub fn xywh(left: i32, top: i32, width: i32, height: i32) -> Rect {\n\n let right = left + width;\n\n let bottom = top + height;\n", "file_path": "src/window.rs", "rank": 24, "score": 6.248817360263928 }, { "content": "/// Rustified abstraction layer over winapi for interacting with (top-level) windows.\n\nuse winapi::um::winuser;\n\n\n\nuse crate::config::Direction;\n\n\n", "file_path": "src/window.rs", "rank": 25, "score": 6.120410842023919 }, { "content": " }\n\n\n\n // Use Win32 to spawn the system's text editor.\n\n // EDITOR is not always set on Windows machines, so it's not the\n\n // right choice here.\n\n unsafe {\n\n use std::os::windows::prelude::*;\n\n let path_os_str = config_path.as_os_str();\n\n let mut path_bytes: Vec<u16> = path_os_str.encode_wide().collect();\n\n\n\n // The OsStr slice is not null-terminated, so we need to null-\n\n // terminate the byte sequence before we give it to Win32.\n\n path_bytes.push(0);\n\n\n\n let result_code = winapi::um::shellapi::ShellExecuteW(\n\n std::ptr::null_mut(),\n\n std::ptr::null(),\n\n path_bytes[..].as_ptr(),\n\n std::ptr::null(),\n\n std::ptr::null(),\n", "file_path": "src/main.rs", "rank": 26, "score": 6.059555939783151 }, { "content": " unsafe {\n\n let mut winapi_rect: Win32Rect = std::mem::zeroed();\n\n evaluate_fallible_winapi!(winuser::GetWindowRect(self.handle, &mut winapi_rect));\n\n\n\n Ok(Rect {\n\n left: winapi_rect.left,\n\n top: winapi_rect.top,\n\n right: winapi_rect.right,\n\n bottom: winapi_rect.bottom,\n\n })\n\n }\n\n }\n\n\n\n pub fn set_rect(&mut self, rect: Rect) -> Win32Result<()> {\n\n // Size and position will change, Z order will not. We don't want to activate the window,\n\n // and this call should be non-blocking.\n\n let flags = winuser::SWP_NOZORDER | winuser::SWP_NOACTIVATE | winuser::SWP_ASYNCWINDOWPOS;\n\n let (w, h) = rect.wh();\n\n\n\n unsafe {\n", "file_path": "src/window.rs", "rank": 27, "score": 6.036463358108197 }, { "content": " self.active_binds.insert(id);\n\n self.bind_id_increment += 1;\n\n Ok(id)\n\n }\n\n\n\n pub fn poll_message_loop(&self) -> Result<KeybindMessage, ()> {\n\n unsafe {\n\n let mut msg: winuser::MSG = std::mem::zeroed();\n\n\n\n // Do not use evaluate_fallible_winapi! because GetMessage has different return values than it can accept.\n\n let result = winuser::GetMessageW(\n\n &mut msg,\n\n std::ptr::null_mut(),\n\n winuser::WM_HOTKEY,\n\n winuser::WM_HOTKEY,\n\n );\n\n\n\n match result {\n\n // Result code 0 is a WM_QUIT message; we should stop the loop here.\n\n 0 => Ok(KeybindMessage::Quit),\n", "file_path": "src/keybind.rs", "rank": 28, "score": 5.809602892033512 }, { "content": "// util before all others due to the macros it contains\n\nmod util;\n\n\n\nmod cli;\n\nmod config;\n\nmod keybind;\n\nmod window;\n\n\n\nuse std::collections::HashMap;\n\n\n\nuse color_eyre::eyre::Result;\n\n\n\nuse structopt::StructOpt;\n\n\n\nuse winapi::shared::winerror;\n\nuse winapi::um::shellapi;\n\nuse winapi::um::winuser;\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 5.4737508549272045 }, { "content": " evaluate_fallible_winapi!(winuser::SetWindowPos(\n\n self.handle,\n\n std::ptr::null_mut(),\n\n rect.left,\n\n rect.top,\n\n w,\n\n h,\n\n flags\n\n ));\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn get_monitor(&self) -> Monitor {\n\n unsafe {\n\n Monitor {\n\n handle: winuser::MonitorFromWindow(self.handle, winuser::MONITOR_DEFAULTTONEAREST),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/window.rs", "rank": 30, "score": 4.516040198180841 }, { "content": " winuser::SW_SHOWNORMAL,\n\n // Ugly: ShellExecute returns a fake HINSTANCE for backwards compatibility.\n\n // It is actually a result code. We need to convert it to an i32 in order\n\n // to actually check it.\n\n // For more information on this mess, check the Win32 API documentation\n\n // for ShellExecuteW:\n\n // https://docs.microsoft.com/en-us/windows/win32/api/shellapi/nf-shellapi-shellexecutew#return-value\n\n ) as usize as i32;\n\n\n\n // A result code less than 32 indicates failure.\n\n if result_code <= 32 {\n\n match result_code as u32 {\n\n winerror::ERROR_FILE_NOT_FOUND => println!(\"Unable to open the configuration file at {}: file not found\", config_path.display()),\n\n shellapi::SE_ERR_ACCESSDENIED => println!(\"Unable to open the configuration file at {}: whimsy cannot access the configuration file (access denied).\", config_path.display()),\n\n _ => println!(\"Unable to open the configuration file at {} due to an internal OS error.\\nError code: {}\", config_path.display(), result_code),\n\n }\n\n\n\n std::process::exit(1);\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 31, "score": 4.369292512161307 }, { "content": " F12 = 0x7B,\n\n F13 = 0x7C,\n\n F14 = 0x7D,\n\n F15 = 0x7E,\n\n F16 = 0x7F,\n\n F17 = 0x80,\n\n F18 = 0x81,\n\n F19 = 0x82,\n\n F20 = 0x83,\n\n F21 = 0x84,\n\n F22 = 0x85,\n\n F23 = 0x86,\n\n F24 = 0x87,\n\n NumLock = 0x90,\n\n ScrollLock = 0x91,\n\n}\n\n\n\npub struct Keybinds {\n\n bind_id_increment: i32,\n\n active_binds: HashSet<i32>,\n", "file_path": "src/keybind.rs", "rank": 32, "score": 3.1000716994906234 }, { "content": " // -1 is an error.\n\n -1 => {\n\n log::error!(\n\n \"Error from GetMessageW: {}\",\n\n winapi::um::errhandlingapi::GetLastError(),\n\n );\n\n\n\n Err(())\n\n }\n\n // Anything else is a successful message retrieval; if this is the case, `msg`\n\n // is safe to read.\n\n _ => {\n\n debug_assert!(msg.message == winuser::WM_HOTKEY, \"The keybind message loop only handles WM_HOTKEY messages, but it has received a message that is not a hotkey message.\");\n\n let id = msg.wParam as i32;\n\n\n\n if !self.active_binds.contains(&id) {\n\n log::error!(\"Unregistered keybind with ID {} was fired.\", id);\n\n return Err(());\n\n }\n\n\n\n log::trace!(\"Keybind {} pressed\", id);\n\n Ok(KeybindMessage::BindActivated(id))\n\n }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/keybind.rs", "rank": 33, "score": 2.8189321078036524 }, { "content": " Escape = 0x1B,\n\n Space = 0x20,\n\n PageUp = 0x21,\n\n PageDown = 0x22,\n\n End = 0x23,\n\n Home = 0x24,\n\n Left = 0x25,\n\n Up = 0x26,\n\n Right = 0x27,\n\n Down = 0x28,\n\n Select = 0x29,\n\n PrintScreen = 0x2C,\n\n Insert = 0x2D,\n\n Delete = 0x2E,\n\n #[serde(alias = \"0\")]\n\n Zero = 0x30,\n\n #[serde(alias = \"1\")]\n\n One = 0x31,\n\n #[serde(alias = \"2\")]\n\n Two = 0x32,\n", "file_path": "src/keybind.rs", "rank": 34, "score": 2.725005604449841 }, { "content": "# whimsy\n\n\n\nA window manager for Windows with an emphasis on ease of use and flexibility.\n\n\n", "file_path": "readme.md", "rank": 35, "score": 2.4146151698346006 }, { "content": "#![macro_use]\n\nmacro_rules! evaluate_fallible_winapi {\n\n ($e:expr) => {\n\n let winapi_success: i32 = $e;\n\n\n\n if winapi_success == 0 {\n\n log::error!(\n\n \"Error from winapi in expression {}: {}\",\n\n stringify!($e),\n\n winapi::um::errhandlingapi::GetLastError(),\n\n );\n\n\n\n return Err(());\n\n }\n\n };\n\n}\n", "file_path": "src/util.rs", "rank": 36, "score": 1.6353816581072989 } ]
Rust
src/codegen.rs
tamaroning/ironcc
c75f6dfc300ee44154cd68b5d7f3801bfeb40279
extern crate llvm_sys as llvm; use self::llvm::core::*; use self::llvm::prelude::*; use crate::node; use crate::types; /* use self::llvm::execution_engine; use self::llvm::target::*; use llvm::execution_engine::LLVMCreateExecutionEngineForModule; use llvm::execution_engine::LLVMDisposeExecutionEngine; use llvm::execution_engine::LLVMGetFunctionAddress; use llvm::execution_engine::LLVMLinkInMCJIT; use std::mem; */ use node::{BinaryOps, UnaryOps, AST}; use std::collections::HashMap; use std::ffi::CString; use std::ptr; use types::Type; #[derive(Debug)] pub struct VarInfo { ty: Type, llvm_val: LLVMValueRef, } impl VarInfo { pub fn new(ty: Type, llvm_val: LLVMValueRef) -> VarInfo { VarInfo { ty: ty, llvm_val: llvm_val, } } } pub struct Codegen { context: LLVMContextRef, module: LLVMModuleRef, builder: LLVMBuilderRef, cur_func: Option<LLVMValueRef>, local_varmap: Vec<HashMap<String, VarInfo>>, } pub unsafe fn cstr(s: &'static str) -> CString { CString::new(s).unwrap() } pub unsafe fn is_exist_terminator(builder: LLVMBuilderRef) -> bool { LLVMIsATerminatorInst(LLVMGetLastInstruction(LLVMGetInsertBlock(builder))) != ptr::null_mut() } pub unsafe fn inside_load(ast: &AST) -> &AST { match ast { AST::Load(node) => node, _ => panic!("Error: ast load"), } } impl Codegen { pub unsafe fn new(mod_name: &str) -> Codegen { let c_mod_name = CString::new(mod_name).unwrap(); Codegen { context: LLVMContextCreate(), module: LLVMModuleCreateWithNameInContext(c_mod_name.as_ptr(), LLVMContextCreate()), builder: LLVMCreateBuilderInContext(LLVMContextCreate()), cur_func: None, local_varmap: Vec::new(), } } pub unsafe fn typecast(&mut self, val: LLVMValueRef, to: LLVMTypeRef) -> LLVMValueRef { let from = LLVMTypeOf(val); match LLVMGetTypeKind(from) { llvm::LLVMTypeKind::LLVMPointerTypeKind => match LLVMGetTypeKind(to) { llvm::LLVMTypeKind::LLVMIntegerTypeKind => { return LLVMBuildPtrToInt(self.builder, val, to, cstr("cast").as_ptr()); } _ => panic!(), }, _ => val, } } pub unsafe fn type_to_llvmty(&self, ty: &Type) -> LLVMTypeRef { match &ty { Type::Int => LLVMInt32Type(), Type::Ptr(basety) => LLVMPointerType(self.type_to_llvmty(&*basety), 0), Type::Func(ret_type, param_types, _) => LLVMFunctionType( self.type_to_llvmty(ret_type), || -> *mut LLVMTypeRef { let mut param_llvm_types: Vec<LLVMTypeRef> = Vec::new(); for param_type in param_types { param_llvm_types.push(self.type_to_llvmty(param_type)); } param_llvm_types.as_mut_slice().as_mut_ptr() }(), param_types.len() as u32, 0, ), _ => panic!("Unsupported type"), } } pub unsafe fn dump_module(&self) { LLVMDumpModule(self.module); } pub unsafe fn write_llvm_bc(&mut self) { llvm::bit_writer::LLVMWriteBitcodeToFile(self.module, cstr("a.bc").as_ptr()); } pub unsafe fn gen_program(&mut self, program: Vec<AST>) { for top_level in program { match top_level { AST::FuncDef(func_ty, func_name, body) => { self.gen_func_def(func_ty, func_name, body); } _ => panic!("Unsupported node type"), } } LLVMDisposeBuilder(self.builder); /* //JIT exec // build engine let mut ee = mem::uninitialized(); let mut out = mem::zeroed(); //LLVMLinkInMCJIT(); LLVM_InitializeNativeTarget(); LLVM_InitializeNativeAsmParser(); LLVM_InitializeNativeAsmParser(); //LLVMInitializeX LLVMCreateExecutionEngineForModule(&mut ee, self.module, &mut out); let addr = LLVMGetFunctionAddress(ee, b"main\0".as_ptr() as *const _); let f: extern "C" fn() -> i32 = mem::transmute(addr); println!("ret = {}", f()); LLVMDisposeExecutionEngine(ee); LLVMContextDispose(self.context); */ } pub unsafe fn gen_func_def(&mut self, func_ty: Box<Type>, func_name: String, body: Box<AST>) { let func_ty = self.type_to_llvmty(&func_ty); let func = LLVMAddFunction(self.module, CString::new(func_name.as_str()).unwrap().as_ptr(), func_ty); let bb_entry = LLVMAppendBasicBlock(func, cstr("entry").as_ptr()); LLVMPositionBuilderAtEnd(self.builder, bb_entry); self.cur_func = Some(func); self.local_varmap.push(HashMap::new()); self.gen(&*body); if !is_exist_terminator(self.builder) { LLVMBuildRet(self.builder, self.make_int(0, false).unwrap().0); } self.local_varmap.pop(); } pub unsafe fn gen(&mut self, ast: &AST) -> Option<(LLVMValueRef, Option<Type>)> { match &ast { AST::Block(ref block) => self.gen_block(block), AST::UnaryOp(ref ast, ref op) => self.gen_unary_op(&**ast, &*op), AST::BinaryOp(ref lhs, ref rhs, ref op) => self.gen_binary_op(&**lhs, &**rhs, &*op), AST::Int(ref n) => self.make_int(*n as u64, false), AST::If(ref cond, ref then, ref els) => self.gen_if(&**cond, &**then, &**els), AST::For(ref init, ref cond, ref step, ref body) => self.gen_for(&**init, &**cond, &**step, &**body), AST::Return(None) => Some((LLVMBuildRetVoid(self.builder), None)), AST::Return(Some(ref val)) => self.gen_return(val), AST::Load(ref expr) => self.gen_load(expr), AST::Variable(ref name) => self.gen_var(name), AST::VariableDecl(ref ty, ref name, ref init_opt) => { self.gen_local_var_decl(ty, name, init_opt) } _ => None, } } pub unsafe fn gen_block(&mut self, block: &Vec<AST>) -> Option<(LLVMValueRef, Option<Type>)> { for ast in block { self.gen(ast); } None } pub unsafe fn gen_local_var_decl( &mut self, ty: &Type, name: &String, init_opt: &Option<Box<AST>>, ) -> Option<(LLVMValueRef, Option<Type>)> { let func = self.cur_func.unwrap(); let builder = LLVMCreateBuilderInContext(self.context); let entry_bb = LLVMGetEntryBasicBlock(func); let first_inst = LLVMGetFirstInstruction(entry_bb); if first_inst == ptr::null_mut() { LLVMPositionBuilderAtEnd(builder, entry_bb); } else { LLVMPositionBuilderBefore(builder, first_inst); } let llvm_ty = self.type_to_llvmty(ty); let var = LLVMBuildAlloca(builder, llvm_ty, CString::new(name.as_str()).unwrap().as_ptr()); self.local_varmap .last_mut() .unwrap() .insert(name.clone(), VarInfo::new(ty.clone(), var)); if let Some(init) = init_opt { self.gen_assign(&AST::Variable(name.clone()), &init); } None } pub unsafe fn gen_unary_op( &mut self, ast: &AST, op: &UnaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { let res = match op { UnaryOps::Plus => self.gen(ast), UnaryOps::Minus => { let val = self.gen(ast).unwrap().0; let neg = LLVMBuildNeg(self.builder, val, cstr("neg").as_ptr()); Some((neg, Some(Type::Int))) } UnaryOps::Addr => self.gen(inside_load(ast)), UnaryOps::Deref => self.gen_load(ast), _ => panic!("Unsupported unary op"), }; res } pub unsafe fn gen_binary_op( &mut self, lhs: &AST, rhs: &AST, op: &BinaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { if let BinaryOps::Assign = op { return self.gen_assign(&inside_load(lhs), rhs); } let (lhs_val, lhs_ty) = self.gen(&*lhs).unwrap(); let (rhs_val, rhs_ty) = self.gen(&*rhs).unwrap(); let lhs_ty = lhs_ty.unwrap(); let rhs_ty = rhs_ty.unwrap(); if matches!(&lhs_ty, Type::Ptr(_)) { return self.gen_ptr_binary_op(lhs_val, rhs_val, lhs_ty, op); } else if matches!(&rhs_ty, Type::Ptr(_)) { return self.gen_ptr_binary_op(lhs_val, rhs_val, rhs_ty, op); } self.gen_int_binary_op(&lhs_val, &rhs_val, lhs_ty, op) } pub unsafe fn gen_ptr_binary_op( &mut self, lhs_val: LLVMValueRef, rhs_val: LLVMValueRef, ty: Type, op: &BinaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { let mut numidx = vec![match *op { BinaryOps::Add => rhs_val, BinaryOps::Sub => LLVMBuildSub( self.builder, self.make_int(0, true).unwrap().0, rhs_val, cstr("sub").as_ptr(), ), _ => panic!(), }]; let ret = LLVMBuildGEP( self.builder, lhs_val, numidx.as_mut_slice().as_mut_ptr(), 1, cstr("add").as_ptr(), ); Some((ret, Some(ty))) } pub unsafe fn gen_int_binary_op( &mut self, lhs_val: &LLVMValueRef, rhs_val: &LLVMValueRef, ty: Type, op: &BinaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { let res = match op { BinaryOps::Add => LLVMBuildAdd(self.builder, *lhs_val, *rhs_val, cstr("add").as_ptr()), BinaryOps::Sub => LLVMBuildSub(self.builder, *lhs_val, *rhs_val, cstr("sub").as_ptr()), BinaryOps::Mul => LLVMBuildMul(self.builder, *lhs_val, *rhs_val, cstr("mul").as_ptr()), BinaryOps::Div => LLVMBuildSDiv(self.builder, *lhs_val, *rhs_val, cstr("sdiv").as_ptr()), BinaryOps::Eq => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntEQ, *lhs_val, *rhs_val, cstr("eql").as_ptr(), ), BinaryOps::Ne => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntNE, *lhs_val, *rhs_val, cstr("ne").as_ptr(), ), BinaryOps::Lt => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntSLT, *lhs_val, *rhs_val, cstr("lt").as_ptr(), ), BinaryOps::Le => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntSLE, *lhs_val, *rhs_val, cstr("le").as_ptr(), ), _ => panic!("Unsupported bianry op"), }; Some((res, Some(ty))) } pub unsafe fn gen_load(&mut self, ast: &AST) -> Option<(LLVMValueRef, Option<Type>)> { match ast { AST::Variable(ref name) => { let (val, ty) = self.gen(ast).unwrap(); let ty = ty.unwrap(); let ret = LLVMBuildLoad(self.builder, val, cstr("var").as_ptr()); match ty { Type::Ptr(origin_ty) => Some((ret, Some(*origin_ty))), _ => panic!(), } }, _ => { let (val, ty) = self.gen(ast).unwrap(); let ret = LLVMBuildLoad(self.builder, val, cstr("var").as_ptr()); Some((ret, Some(Type::Ptr(Box::new(ty.unwrap()))))) }, } } pub unsafe fn gen_var(&mut self, name: &String) -> Option<(LLVMValueRef, Option<Type>)> { if self.local_varmap.is_empty() { panic!(); } let mut i = (self.local_varmap.len() - 1) as isize; while i >= 0 { let var_info_opt = self.local_varmap[i as usize].get(name); match var_info_opt { Some(ref var_info) => { return Some(( var_info.llvm_val, Some(Type::Ptr(Box::new(var_info.ty.clone()))), )); } _ => (), } i -= 1; } panic!("local variable not found"); } pub unsafe fn gen_assign( &mut self, lhs: &AST, rhs: &AST, ) -> Option<(LLVMValueRef, Option<Type>)> { let (rhs_val, ty) = self.gen(rhs).unwrap(); let (dst, dst_ty) = self.gen(lhs).unwrap(); LLVMBuildStore(self.builder, rhs_val, dst); let load = LLVMBuildLoad(self.builder, dst, cstr("load").as_ptr()); Some((load, dst_ty)) } pub unsafe fn gen_if(&mut self, cond: &AST, then: &AST, els: &AST) -> Option<(LLVMValueRef, Option<Type>)> { let cond_val = self.gen(cond).unwrap().0; let func = self.cur_func.unwrap(); let bb_then = LLVMAppendBasicBlock(func, cstr("then").as_ptr()); let bb_else = LLVMAppendBasicBlock(func, cstr("else").as_ptr()); let bb_endif = LLVMAppendBasicBlock(func, cstr("endif").as_ptr()); LLVMBuildCondBr(self.builder, cond_val, bb_then, bb_else); LLVMPositionBuilderAtEnd(self.builder, bb_then); self.gen(then); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_endif); } LLVMPositionBuilderAtEnd(self.builder, bb_else); self.gen(els); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_endif); } LLVMPositionBuilderAtEnd(self.builder, bb_endif); None } pub unsafe fn gen_for(&mut self, init: &AST, cond: &AST, step: &AST, body: &AST) -> Option<(LLVMValueRef, Option<Type>)> { self.gen(init); let func = self.cur_func.unwrap(); let bb_begin = LLVMAppendBasicBlock(func, cstr("begin").as_ptr()); let bb_body = LLVMAppendBasicBlock(func, cstr("body").as_ptr()); let bb_update = LLVMAppendBasicBlock(func, cstr("update").as_ptr()); let bb_end = LLVMAppendBasicBlock(func, cstr("end").as_ptr()); LLVMBuildBr(self.builder, bb_begin); LLVMPositionBuilderAtEnd(self.builder, bb_begin); let cond_val = self.gen(cond).unwrap().0; LLVMBuildCondBr(self.builder, cond_val, bb_body, bb_end); LLVMPositionBuilderAtEnd(self.builder, bb_body); self.gen(body); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_update); } LLVMPositionBuilderAtEnd(self.builder, bb_update); self.gen(step); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_begin); } LLVMPositionBuilderAtEnd(self.builder, bb_end); None } pub unsafe fn gen_return(&mut self, ast: &AST) -> Option<(LLVMValueRef, Option<Type>)> { let ret_val = self.gen(ast); LLVMBuildRet(self.builder, ret_val.unwrap().0); None } pub unsafe fn make_int( &mut self, n: u64, is_unsigned: bool, ) -> Option<(LLVMValueRef, Option<Type>)> { Some(( LLVMConstInt(LLVMInt32Type(), n, if is_unsigned { 1 } else { 0 }), Some(Type::Int), )) } }
extern crate llvm_sys as llvm; use self::llvm::core::*; use self::llvm::prelude::*; use crate::node; use crate::types; /* use self::llvm::execution_engine; use self::llvm::target::*; use llvm::execution_engine::LLVMCreateExecutionEngineForModule; use llvm::execution_engine::LLVMDisposeExecutionEngine; use llvm::execution_engine::LLVMGetFunctionAddress; use llvm::execution_engine::LLVMLinkInMCJIT; use std::mem; */ use node::{BinaryOps, UnaryOps, AST}; use std::collections::HashMap; use std::ffi::CString; use std::ptr; use types::Type; #[derive(Debug)] pub struct VarInfo { ty: Type, llvm_val: LLVMValueRef, } impl VarInfo { pub fn new(ty: Type, llvm_val: LLVMValueRef) -> VarInfo { VarInfo { ty: ty, llvm_val: llvm_val, } } } pub struct Codegen { context: LLVMContextRef, module: LLVMModuleRef, builder: LLVMBuilderRef, cur_func: Option<LLVMValueRef>, local_varmap: Vec<HashMap<String, VarInfo>>, } pub unsafe fn cstr(s: &'static str) -> CString { CString::new(s).unwrap() } pub unsafe fn is_exist_terminator(builder: LLVMBuilderRef) -> bool { LLVMIsATerminatorInst(LLVMGetLastInstruction(LLVMGetInsertBlock(builder))) != ptr::null_mut() } pub unsafe fn inside_load(ast: &AST) -> &AST { match ast { AST::Load(node) => node, _ => panic!("Error: ast load"), } } impl Codegen { pub unsafe fn new(mod_name: &str) -> Codegen { let c_mod_name = CString::new(mod_name).unwrap(); Codegen { context: LLVMContextCreate(), module: LLVMModuleCreateWithNameInContext(c_mod_name.as_ptr(), LLVMContextCreate()), builder: LLVMCreateBuilderInContext(LLVMContextCreate()), cur_func: None, local_varmap: Vec::new(), } } pub unsafe fn typecast(&mut self, val: LLVMValueRef, to: LLVMTypeRef) -> LLVMValueRef { let from = LLVMTypeOf(val); match LLVMGetTypeKind(from) { llvm::LLVMTypeKind::LLVMPointerTypeKind => match LLVMGetTypeKind(to) { llvm::LLVMTypeKind::LLVMIntegerTypeKind => { return LLVMBuildPtrToInt(self.builder, val, to, cstr("cast").as_ptr()); } _ => panic!(), }, _ => val, } } pub unsafe fn type_to_llvmty(&self, ty: &Type) -> LLVMTypeRef { match &ty { Type::Int => LLVMInt32Type(), Type::Ptr(basety) => LLVMPointerType(self.type_to_llvmty(&*basety), 0), Type::Func(ret_type, param_types, _) => LLVMFunctionType( self.type_to_llvmty(ret_type), || -> *mut LLVMTypeRef { let mut param_llvm_types: Vec<LLVMTypeRef> = Vec::new(); for param_type in param_types { param_llvm_types.push(self.type_to_llvmty(param_type)); } param_llvm_types.as_mut_slice().as_mut_ptr() }(), param_types.len() as u32, 0, ), _ => panic!("Unsupported type"), } } pub unsafe fn dump_module(&self) { LLVMDumpModule(self.module); } pub unsafe fn write_llvm_bc(&mut self) { llvm::bit_writer::LLVMWriteBitcodeToFile(self.module, cstr("a.bc").as_ptr()); } pub unsafe fn gen_program(&mut self, program: Vec<AST>) { for top_level in program { match top_level { AST::FuncDef(func_ty, func_name, body) => { self.gen_func_def(func_ty, func_name, body); } _ => panic!("Unsupported node type"), } } LLVMDisposeBuilder(self.builder); /* //JIT exec // build engine let mut ee = mem::uninitialized(); let mut out = mem::zeroed(); //LLVMLinkInMCJIT(); LLVM_InitializeNativeTarget(); LLVM_InitializeNativeAsmParser(); LLVM_InitializeNativeAsmParser(); //LLVMInitializeX LLVMCreateExecutionEngineForModule(&mut ee, self.module, &mut out); let addr = LLVMGetFunctionAddress(ee, b"main\0".as_ptr() as *const _); let f: extern "C" fn() -> i32 = mem::transmute(addr); println!("ret = {}", f()); LLVMDisposeExecutionEngine(ee); LLVMContextDispose(self.context); */ } pub unsafe fn gen_func_def(&mut self, func_ty: Box<Type>, func_name: String, body: Box<AST>) { let func_ty = self.type_to_llvmty(&func_ty); let func = LLVMAddFunction(self.module, CString::new(func_name.as_str()).unwrap().as_ptr(), func_ty); let bb_entry = LLVMAppendBasicBlock(func, cstr("entry").as_ptr()); LLVMPositionBuilderAtEnd(self.builder, bb_entry); self.cur_func = Some(func); self.local_varmap.push(HashMap::new()); self.gen(&*body); if !is_exist_terminator(self.builder) { LLVMBuildRet(self.builder, self.make_int(0, false).unwrap().0); } self.local_varmap.pop(); } pub unsafe fn gen(&mut self, ast: &AST) -> Option<(LLVMValueRef, Option<Type>)> { match &ast { AST::Block(ref block) => self.gen_block(block), AST::UnaryOp(ref ast, ref op) => self.gen_unary_op(&**ast, &*op), AST::BinaryOp(ref lhs, ref rhs, ref op) => self.gen_binary_op(&**lhs, &**rhs, &*op), AST::Int(ref n) => self.make_int(*n as u64, false), AST::If(ref cond, ref then, ref els) => self.gen_if(&**cond, &**then, &**els), AST::For(ref init, ref cond, ref step, ref body) => self.gen_for(&**init, &**cond, &**step, &**body), AST::Return(None) => Some((LLVMBuildRetVoid(self.builder), None)), AST::Return(Some(ref val)) => self.gen_return(val), AST::Load(ref expr) => self.gen_load(expr), AST::Variable(ref name) => self.gen_var(name), AST::VariableDecl(ref ty, ref name, ref init_opt) => { self.gen_local_var_decl(ty, name, init_opt) } _ => None, } } pub unsafe fn gen_block(&mut self, block: &Vec<AST>) -> Option<(LLVMValueRef, Option<Type>)> { for ast in block { self.gen(ast); } None } pub unsafe fn gen_local_var_decl( &mut self, ty: &Type, name: &String, init_opt: &Option<Box<AST>>, ) -> Option<(LLVMValueRef, Option<Type>)> { let func = self.cur_func.unwrap(); let builder = LLVMCreateBuilderInContext(self.context); let entry_bb = LLVMGetEntryBasicBlock(func); let first_inst = LLVMGetFirstInstruction(entry_bb); if first_inst == ptr::null_mut() { LLVMPositionBuilderAtEnd(builder, entry_bb); } else { LLVMPositionBuilderBefore(builder, first_inst); } let llvm_ty = self.type_to_llvmty(ty); let var = LLVMBuildAlloca(builder, llvm_ty, CString::new(name.as_str()).unwrap().as_ptr()); self.local_varmap .last_mut() .unwrap() .insert(name.clone(), VarInfo::new(ty.clone(), var)); if let Some(init) = init_opt { self.gen_assign(&AST::Variable(name.clone()), &init); } None } pub unsafe fn gen_unary_op( &mut self, ast: &AST, op: &UnaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { let res = match op { UnaryOps::Plus => self.gen(ast), UnaryOps::Minus => { let val = self.gen(ast).unwrap().0; let neg = LLVMBuildNeg(self.builder, val, cstr("neg").as_ptr()); Some((neg, Some(Type::Int))) } UnaryOps::Addr => self.gen(inside_load(ast)), UnaryOps::Deref => self.gen_load(ast), _ => panic!("Unsupported unary op"), }; res } pub unsafe fn gen_binary_op( &mut self, lhs: &AST, rhs: &AST, op: &BinaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { if let BinaryOps::Assign = op { return self.gen_assign(&inside_load(lhs), rhs); } let (lhs_val, lhs_ty) = self.gen(&*lhs).unwrap(); let (rhs_val, rhs_ty) = self.gen(&*rhs).unwrap(); let lhs_ty = lhs_ty.unwrap(); let rhs_ty = rhs_ty.unwrap(); if matches!(&lhs_ty, Type::Ptr(_)) { return self.gen_ptr_binary_op(lhs_val, rhs_val, lhs_ty, op); } else if matches!(&rhs_ty, Type::Ptr(_)) { return self.gen_ptr_binary_op(lhs_val, rhs_val, rhs_ty, op); } self.gen_int_binary_op(&lhs_val, &rhs_val, lhs_ty, op) } pub unsafe fn gen_ptr_binary_op( &mut self, lhs_val: LLVMValueRef, rhs_val: LLVMValueRef, ty: Type, op: &BinaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { let mut numidx = vec![match *op { BinaryOps::Add => rhs_val, BinaryOps::Sub => LLVMBuildSub( self.builder, self.make_int(0, true).unwrap().0, rhs_val, cstr("sub").as_ptr(), ), _ => panic!(), }]; let ret = LLVMBuildGEP( self.builder, lhs_val, numidx.as_mut_slice().as_mut_ptr(), 1, cstr("add").as_ptr(), ); Some((ret, Some(ty))) } pub unsafe fn gen_int_binary_op( &mut self, lhs_val: &LLVMValueRef, rhs_val: &LLVMValueRef, ty: Type, op: &BinaryOps, ) -> Option<(LLVMValueRef, Option<Type>)> { let res = match op { BinaryOps::Add => LLVMBuildAdd(self.builder, *lhs_val, *rhs_val, cstr("add").as_ptr()), BinaryOps::Sub => LLVMBuildSub(self.builder, *lhs_val, *rhs_val, cstr("sub").as_ptr()), BinaryOps::Mul => LLVMBuildMul(self.builder, *lhs_val, *rhs_val, cstr("mul").as_ptr()), BinaryOps::Div => LLVMBuildSDiv(self.builder, *lhs_val, *rhs_val, cstr("sdiv").as_ptr()), BinaryOps::Eq => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntEQ, *lhs_val, *rhs_val, cstr("eql").as_ptr(), ), BinaryOps::Ne => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntNE, *lhs_val, *rhs_val,
pub unsafe fn gen_load(&mut self, ast: &AST) -> Option<(LLVMValueRef, Option<Type>)> { match ast { AST::Variable(ref name) => { let (val, ty) = self.gen(ast).unwrap(); let ty = ty.unwrap(); let ret = LLVMBuildLoad(self.builder, val, cstr("var").as_ptr()); match ty { Type::Ptr(origin_ty) => Some((ret, Some(*origin_ty))), _ => panic!(), } }, _ => { let (val, ty) = self.gen(ast).unwrap(); let ret = LLVMBuildLoad(self.builder, val, cstr("var").as_ptr()); Some((ret, Some(Type::Ptr(Box::new(ty.unwrap()))))) }, } } pub unsafe fn gen_var(&mut self, name: &String) -> Option<(LLVMValueRef, Option<Type>)> { if self.local_varmap.is_empty() { panic!(); } let mut i = (self.local_varmap.len() - 1) as isize; while i >= 0 { let var_info_opt = self.local_varmap[i as usize].get(name); match var_info_opt { Some(ref var_info) => { return Some(( var_info.llvm_val, Some(Type::Ptr(Box::new(var_info.ty.clone()))), )); } _ => (), } i -= 1; } panic!("local variable not found"); } pub unsafe fn gen_assign( &mut self, lhs: &AST, rhs: &AST, ) -> Option<(LLVMValueRef, Option<Type>)> { let (rhs_val, ty) = self.gen(rhs).unwrap(); let (dst, dst_ty) = self.gen(lhs).unwrap(); LLVMBuildStore(self.builder, rhs_val, dst); let load = LLVMBuildLoad(self.builder, dst, cstr("load").as_ptr()); Some((load, dst_ty)) } pub unsafe fn gen_if(&mut self, cond: &AST, then: &AST, els: &AST) -> Option<(LLVMValueRef, Option<Type>)> { let cond_val = self.gen(cond).unwrap().0; let func = self.cur_func.unwrap(); let bb_then = LLVMAppendBasicBlock(func, cstr("then").as_ptr()); let bb_else = LLVMAppendBasicBlock(func, cstr("else").as_ptr()); let bb_endif = LLVMAppendBasicBlock(func, cstr("endif").as_ptr()); LLVMBuildCondBr(self.builder, cond_val, bb_then, bb_else); LLVMPositionBuilderAtEnd(self.builder, bb_then); self.gen(then); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_endif); } LLVMPositionBuilderAtEnd(self.builder, bb_else); self.gen(els); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_endif); } LLVMPositionBuilderAtEnd(self.builder, bb_endif); None } pub unsafe fn gen_for(&mut self, init: &AST, cond: &AST, step: &AST, body: &AST) -> Option<(LLVMValueRef, Option<Type>)> { self.gen(init); let func = self.cur_func.unwrap(); let bb_begin = LLVMAppendBasicBlock(func, cstr("begin").as_ptr()); let bb_body = LLVMAppendBasicBlock(func, cstr("body").as_ptr()); let bb_update = LLVMAppendBasicBlock(func, cstr("update").as_ptr()); let bb_end = LLVMAppendBasicBlock(func, cstr("end").as_ptr()); LLVMBuildBr(self.builder, bb_begin); LLVMPositionBuilderAtEnd(self.builder, bb_begin); let cond_val = self.gen(cond).unwrap().0; LLVMBuildCondBr(self.builder, cond_val, bb_body, bb_end); LLVMPositionBuilderAtEnd(self.builder, bb_body); self.gen(body); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_update); } LLVMPositionBuilderAtEnd(self.builder, bb_update); self.gen(step); if !is_exist_terminator(self.builder) { LLVMBuildBr(self.builder, bb_begin); } LLVMPositionBuilderAtEnd(self.builder, bb_end); None } pub unsafe fn gen_return(&mut self, ast: &AST) -> Option<(LLVMValueRef, Option<Type>)> { let ret_val = self.gen(ast); LLVMBuildRet(self.builder, ret_val.unwrap().0); None } pub unsafe fn make_int( &mut self, n: u64, is_unsigned: bool, ) -> Option<(LLVMValueRef, Option<Type>)> { Some(( LLVMConstInt(LLVMInt32Type(), n, if is_unsigned { 1 } else { 0 }), Some(Type::Int), )) } }
cstr("ne").as_ptr(), ), BinaryOps::Lt => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntSLT, *lhs_val, *rhs_val, cstr("lt").as_ptr(), ), BinaryOps::Le => LLVMBuildICmp( self.builder, llvm::LLVMIntPredicate::LLVMIntSLE, *lhs_val, *rhs_val, cstr("le").as_ptr(), ), _ => panic!("Unsupported bianry op"), }; Some((res, Some(ty))) }
function_block-function_prefix_line
[ { "content": "pub fn error(line: u32, msg: &str) {\n\n println!(\" Error: line:{} {}\", line, msg);\n\n process::exit(-1);\n\n}", "file_path": "src/error.rs", "rank": 0, "score": 81268.38232700378 }, { "content": "pub fn run(filepath: String, tokens: Vec<Token>) -> Vec<AST> {\n\n let mut parser = Parser::new(filepath, tokens);\n\n let ast = parser.read_program();\n\n ast\n\n}\n\n\n\npub struct Parser {\n\n filepath: String,\n\n pos: usize,\n\n tokens: Vec<Token>,\n\n}\n\n\n\nimpl Parser {\n\n pub fn new(path: String, tok: Vec<Token>) -> Parser {\n\n Parser {\n\n filepath: path,\n\n pos: 0,\n\n tokens: tok,\n\n }\n\n }\n", "file_path": "src/parser.rs", "rank": 1, "score": 67112.11044083413 }, { "content": "pub fn run(filepath: String) -> Vec<Token> {\n\n let mut file = File::open(filepath.clone()).expect(\"File not found\");\n\n let mut content = String::new();\n\n file.read_to_string(&mut content)\n\n .expect(\"Couldn't open the file\");\n\n let mut lexer = Lexer::new(filepath.clone(), content.as_str());\n\n\n\n let mut tokens = Vec::new();\n\n loop {\n\n let token = lexer.read_token();\n\n match token {\n\n Some(Token {\n\n kind: TokenKind::Eof,\n\n ..\n\n }) => {\n\n tokens.push(token.unwrap());\n\n break;\n\n }\n\n Some(_) => {\n\n tokens.push(token.unwrap());\n", "file_path": "src/lexer.rs", "rank": 2, "score": 60628.582810363805 }, { "content": "pub fn show_version() {\n\n println!(\"ironcc version {}\", VERSION_STR);\n\n}\n\n\n", "file_path": "src/version.rs", "rank": 3, "score": 55071.90941825961 }, { "content": "pub fn show_usage() {\n\n println!(\"Usage: ironcc [options] <filepath>\");\n\n}\n", "file_path": "src/version.rs", "rank": 4, "score": 55071.90941825961 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n if args.len() < 2 {\n\n version::show_version();\n\n version::show_usage();\n\n } else {\n\n let filepath = args[1].clone();\n\n // test\n\n // tokenize\n\n let tokens = lexer::run(filepath.clone());\n\n for tok in &tokens {\n\n //println!(\"{:?}\", tok);\n\n }\n\n // parse\n\n let nodes = parser::run(filepath.clone(), tokens);\n\n for node in &nodes {\n\n //println!(\"{:?}\", node);\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 32756.420894582283 }, { "content": "#[derive(Debug, Clone)]\n\npub enum Type {\n\n Int,\n\n Ptr(Box<Type>),\n\n Array(Box<Type>, i32), // type, size\n\n Func(Box<Type>, Vec<Type>, Vec<String>), // ret type, param types, param names\n\n}\n", "file_path": "src/types.rs", "rank": 6, "score": 22362.676771036608 }, { "content": "use crate::types::Type;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum AST {\n\n Int(i32),\n\n Float(f64),\n\n BinaryOp(Box<AST>, Box<AST>, BinaryOps),\n\n UnaryOp(Box<AST>, UnaryOps),\n\n Load(Box<AST>),\n\n Variable(String),\n\n VariableDecl(Type, String, Option<Box<AST>>), // type, name, init val\n\n Return(Option<Box<AST>>),\n\n //ExprStmt(Box<AST>),\n\n Block(Vec<AST>),\n\n If(Box<AST>, Box<AST>, Box<AST>), // cond, then, els\n\n For(Box<AST>, Box<AST>, Box<AST>, Box<AST>), // init, cond, step, body\n\n While(Box<AST>, Box<AST>), // cond, body\n\n FuncCall(String, Vec<AST>), // func-name, args\n\n FuncDef(Box<Type>, String, Box<AST>), // functype, func name, param types, param names, locals, body\n\n Nil, // forのcond、ifのelse、expr-stmtのexprにおいて式や文などが存在しないときに用いる\n", "file_path": "src/node.rs", "rank": 7, "score": 22235.522555019048 }, { "content": " Deref, // *\n\n Sizeof,\n\n}\n\n\n\nimpl AST {\n\n pub fn eval_const_expr(&self) -> i32 {\n\n match &self {\n\n AST::Int(n) => *n,\n\n AST::BinaryOp(l, r, op) => {\n\n let l = l.eval_const_expr();\n\n let r = r.eval_const_expr();\n\n match op {\n\n &BinaryOps::Add => l + r,\n\n &BinaryOps::Sub => l - r,\n\n &BinaryOps::Mul => l * r,\n\n &BinaryOps::Div => l / r,\n\n &BinaryOps::Eq => (l == r) as i32,\n\n &BinaryOps::Ne => (l != r) as i32,\n\n &BinaryOps::Lt => (l < r) as i32,\n\n &BinaryOps::Le => (l <= r) as i32,\n\n _ => panic!(\"Unknown operator\"),\n\n }\n\n }\n\n _ => panic!(\"Expected constant expression\"),\n\n }\n\n }\n\n}\n", "file_path": "src/node.rs", "rank": 8, "score": 22223.102293414267 }, { "content": "}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum BinaryOps {\n\n Add,\n\n Sub,\n\n Mul,\n\n Div,\n\n Eq, // ==\n\n Ne, // !=\n\n Lt, // <\n\n Le, // <=\n\n Assign,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum UnaryOps {\n\n Plus, // +\n\n Minus, // -\n\n Addr, // &\n", "file_path": "src/node.rs", "rank": 9, "score": 22211.187803191708 }, { "content": "# ironcc\n\nA toy C compiler written in Rust which uses LLVM as backend. \n\nironcc is aiming to suppport C99, and C11. \n\n\n\n# Install\n\n```sh\n\n$ git clone https://github.com/tamaroning/ironcc.git\n\n$ cd ironcc\n\n$ cargo build\n\n```\n\n\n\n# Usage\n\n```sh\n\n$ ironcc <file path>\n\n```\n\n\n\nTo show the usage and version, run:\n\n```sh\n\n$ ironcc\n\n```\n\n\n\n# Status\n\nironcc supports the following functions:\n\n\n\n- function definition\n\n- local variable declaration\n\n- return statement\n\n- assignment\n\n- types (int and pointer)\n\n- control syntax (if, else, for)\n\n- numerical literal\n\n- binary operations (+, -, *, /)\n\n- comparison operations (==, !=, <, >, <=, >=)\n\n- unary operations (+, -)\n\n\n\n# Syntax\n\n```\n\nprogram = top-level*\n\n\n\ntop-level = func-def\n\n\n\nfunc-def = declspec declarator \"{\" compound-stmt\n\n\n\nstmt = \"return\" expr \";\"\n\n | \"if\" \"(\" expr \")\" stmt (\"else\" stmt)?\n\n | \"for\" \"(\" expr-stmt expr? \";\" expr? \")\" stmt\n\n | \"while\" \"(\" expr \")\" stmt\n\n | \"{\" compound-stmt\n\n | expr-stmt\n\n\n\ncompound-stmt = (declaration | stmt)* \"}\"\n\n\n\ndeclaration = declspec (declarator (\"=\" expr)? (\",\" declarator (\"=\" expr)?)*)? \";\"\n\n\n\ndeclspec = \"int\"\n\n\n\ndeclarator = \"*\"* <ident> type-suffix\n\n\n\ntype-suffix = \"(\" func-params\n\n |\"[\" <num> \"]\"\n\n | ε\n\n\n\nfunc-params = (param (\",\" param)*)? \")\"\n\n\n\nparam = declspec declarator\n\n\n\ntype-suffix = \"(\" func-params no\n\n | \"[\" <num> \"]\" type-suffix\n\n | ε\n\n\n\nexpr-stmt = expr? \";\"\n\n\n\nexpr = assign\n\n\n\nassign = equality (\"=\" assign)?\n\n\n\nequality = relational (\"==\"|\"!=\" relational)*\n\n\n\nrelational = add ((\"<\"|\">\"|\"<=\"|\">=\") add)*\n\n\n\nadd = mul ((\"+\"|\"-\") mul)*\n\n\n\nmul = unary ((\"*\"|\"/\") unary)*\n\n\n\nunary = (\"+\" | \"-\" | \"*\" | \"&\") unary\n\n | postfix\n\n\n\npostfix = primary (\"[\" expr \"]\")*\n\n\n\nprimary = \"(\" expr \")\"\n\n | \"sizeof\" unary\n\n | <ident> func-args?\n\n | <num>\n\n\n\nfunc-call = <ident> \"(\" (assign (\",\" assign)*)? \")\"\n\n\n\n<XXX> means token.\n\n```\n\n\n\n# Todo\n\n- support arithmetic operations of pointers\n", "file_path": "README.md", "rank": 34, "score": 12195.711788442106 }, { "content": "\n\n fn read_func_def(&mut self) -> AST {\n\n let func_ty = self.read_declspec();\n\n let (func_ty, func_name) = self.read_declarator(func_ty);\n\n self.consume_expected(\"{\");\n\n let body = self.read_compound_stmt();\n\n\n\n return AST::FuncDef(Box::new(func_ty), func_name, Box::new(body));\n\n }\n\n\n\n fn read_stmt(&mut self) -> AST {\n\n if self.consume(\"return\") {\n\n if self.consume(\";\") {\n\n return AST::Return(None);\n\n } else {\n\n let expr = self.read_expr();\n\n let ret_ast = AST::Return(Some(Box::new(expr)));\n\n self.consume_expected(\";\");\n\n return ret_ast;\n\n }\n", "file_path": "src/parser.rs", "rank": 35, "score": 24.086303132367597 }, { "content": " \n\n let (ty, name) = self.read_declarator(declspec.clone());\n\n let mut init_val = None;\n\n if self.consume(\"=\") {\n\n init_val = Some(Box::new(self.read_expr()));\n\n }\n\n decls.push(AST::VariableDecl(ty, name, init_val));\n\n\n\n while self.consume(\",\") {\n\n let (ty, name) = self.read_declarator(declspec.clone());\n\n\n\n let mut init_val = None;\n\n if self.consume(\"=\") {\n\n init_val = Some(Box::new(self.read_expr()));\n\n }\n\n decls.push(AST::VariableDecl(ty, name, init_val));\n\n }\n\n self.consume_expected(\";\");\n\n AST::Block(decls)\n\n }\n", "file_path": "src/parser.rs", "rank": 36, "score": 22.278910414234712 }, { "content": " } else if self.consume(\"if\") {\n\n self.consume_expected(\"(\");\n\n let cond = self.read_expr();\n\n self.consume_expected(\")\");\n\n let then = self.read_stmt();\n\n let mut els = AST::Nil;\n\n if self.consume(\"else\") {\n\n els = self.read_stmt();\n\n }\n\n return AST::If(Box::new(cond), Box::new(then), Box::new(els));\n\n } else if self.consume(\"for\") {\n\n self.consume_expected(\"(\");\n\n let init = self.read_expr_stmt();\n\n let mut cond = AST::Nil;\n\n if !self.consume(\";\") {\n\n cond = self.read_expr();\n\n self.consume_expected(\";\");\n\n }\n\n let mut step = AST::Nil;\n\n if !self.consume(\")\") {\n", "file_path": "src/parser.rs", "rank": 37, "score": 20.111190480800435 }, { "content": " pub val: String,\n\n pub line: u32,\n\n}\n\n\n\nimpl Token {\n\n pub fn matches(&self, s: &str) -> bool {\n\n self.val.as_str() == s\n\n }\n\n\n\n pub fn is_eof(&self) -> bool {\n\n return match self.kind {\n\n TokenKind::Eof => true,\n\n _ => false,\n\n };\n\n }\n\n\n\n pub fn is_ident(&self) -> bool {\n\n return match self.kind {\n\n TokenKind::Ident => true,\n\n _ => false,\n", "file_path": "src/lexer.rs", "rank": 38, "score": 19.99323353305035 }, { "content": " unsafe {\n\n let mut codegen = codegen::Codegen::new(filepath.clone().as_str());\n\n codegen.gen_program(nodes);\n\n codegen.dump_module();\n\n codegen.write_llvm_bc();\n\n }\n\n\n\n \n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 39, "score": 19.595249943214696 }, { "content": "\n\n fn read_declspec(&mut self) -> Type {\n\n match self.next().val.as_str() {\n\n \"int\" => return Type::Int,\n\n _ => panic!(\"Unknown type\"),\n\n }\n\n }\n\n\n\n fn read_declarator(&mut self, mut ty: Type) -> (Type, String) {\n\n while self.consume(\"*\") {\n\n ty = Type::Ptr(Box::new(ty));\n\n }\n\n let name = self.read_ident();\n\n ty = self.read_type_suffix(ty);\n\n (ty, name)\n\n }\n\n\n\n fn read_type_suffix(&mut self, mut ty: Type) -> Type {\n\n if self.consume(\"[\") {\n\n let arr_sz = self.read_num();\n", "file_path": "src/parser.rs", "rank": 40, "score": 19.426377622466628 }, { "content": "use crate::lexer;\n\nuse crate::node;\n\nuse crate::node::UnaryOps;\n\nuse crate::types::Type;\n\n\n\nuse lexer::Token;\n\nuse lexer::TokenKind;\n\nuse node::{BinaryOps, AST};\n\n\n", "file_path": "src/parser.rs", "rank": 41, "score": 19.252155120213658 }, { "content": " self.consume_expected(\"]\");\n\n ty = self.read_type_suffix(ty);\n\n ty = Type::Array(Box::new(ty), arr_sz as i32);\n\n } else if self.consume(\"(\") {\n\n let (types, names) = self.read_func_params();\n\n // ret type, param types\n\n return Type::Func(Box::new(ty), types, names);\n\n }\n\n ty\n\n }\n\n\n\n fn read_func_params(&mut self) -> (Vec<Type>, Vec<String>) {\n\n let mut types = Vec::new();\n\n let mut names = Vec::new();\n\n\n\n if !self.consume(\")\") {\n\n let (ty, name) = self.read_param();\n\n types.push(ty);\n\n names.push(name);\n\n while self.consume(\",\") {\n", "file_path": "src/parser.rs", "rank": 42, "score": 18.764550308946646 }, { "content": " self.consume_expected(\")\");\n\n }\n\n AST::FuncCall(name, args)\n\n }\n\n\n\n fn read_ast_num(&mut self) -> AST {\n\n match self.next() {\n\n Token {\n\n kind: TokenKind::IntNum,\n\n val: n,\n\n ..\n\n } => AST::Int(n.parse::<i32>().unwrap()),\n\n Token {\n\n kind: TokenKind::FloatNum,\n\n val: n,\n\n ..\n\n } => AST::Float(n.parse::<f64>().unwrap()),\n\n _ => panic!(\"Numerical literal is expected\"),\n\n }\n\n }\n", "file_path": "src/parser.rs", "rank": 43, "score": 18.123585517082386 }, { "content": " let (ty, name) = self.read_param();\n\n types.push(ty);\n\n names.push(name);\n\n }\n\n self.consume_expected(\")\");\n\n }\n\n (types, names)\n\n }\n\n\n\n fn read_param(&mut self) -> (Type, String) {\n\n let ty = self.read_declspec();\n\n let (ty, name) = self.read_declarator(ty);\n\n (ty, name)\n\n }\n\n\n\n fn read_expr_stmt(&mut self) -> AST {\n\n if self.consume(\";\") {\n\n return AST::Nil;\n\n } else {\n\n let expr = self.read_expr();\n", "file_path": "src/parser.rs", "rank": 44, "score": 17.570323332791627 }, { "content": "\n\n pub fn next(&mut self) -> Token {\n\n let ret = self.cur();\n\n //println!(\"parse {}\", ret.val.clone());\n\n self.pos += 1;\n\n ret\n\n }\n\n\n\n // read forward\n\n pub fn consume(&mut self, s: &str) -> bool {\n\n if self.cur().matches(s) {\n\n self.next();\n\n return true;\n\n }\n\n false\n\n }\n\n\n\n pub fn consume_expected(&mut self, s: &str) {\n\n if self.cur().matches(s) {\n\n self.next();\n", "file_path": "src/parser.rs", "rank": 45, "score": 17.245286989657174 }, { "content": " return AST::UnaryOp(Box::new(ast), UnaryOps::Sizeof);\n\n } else if self.cur().is_ident() {\n\n if self.peek().matches(\"(\") {\n\n return self.read_func_call();\n\n }\n\n return AST::Load(Box::new(AST::Variable(self.read_ident())));\n\n } else {\n\n return self.read_ast_num();\n\n }\n\n }\n\n\n\n fn read_func_call(&mut self) -> AST {\n\n let name = self.read_ident();\n\n let mut args = Vec::new();\n\n self.consume_expected(\"(\");\n\n if !self.consume(\")\") {\n\n args.push(self.read_assign());\n\n while self.consume(\",\") {\n\n args.push(self.read_assign());\n\n }\n", "file_path": "src/parser.rs", "rank": 46, "score": 16.828033043484307 }, { "content": "\n\n pub fn starts_with(&self, s: &str) -> bool {\n\n String::from_iter(self.peek.clone().take(s.len())) == s.to_string()\n\n }\n\n\n\n pub fn read_symbol(&mut self) -> Token {\n\n // multicharacter symbols\n\n let ops = vec![\"==\", \"!=\", \"<=\", \">=\"];\n\n for op in ops {\n\n if self.starts_with(op) {\n\n self.advance_by(2);\n\n return Token {\n\n kind: TokenKind::Symbol,\n\n val: op.to_string(),\n\n line: self.cur_line,\n\n };\n\n }\n\n }\n\n // single character symbols\n\n let sym = self.peek_next().unwrap().to_string();\n", "file_path": "src/lexer.rs", "rank": 47, "score": 16.66295473704187 }, { "content": " } else {\n\n panic!(\"Expected {}, but found {}\", s, self.cur().val);\n\n }\n\n }\n\n\n\n //\n\n // ---------------- Generate AST ----------------\n\n //\n\n\n\n fn read_program(&mut self) -> Vec<AST> {\n\n let mut ret = Vec::new();\n\n while !self.cur().is_eof() {\n\n ret.push(self.read_top_level());\n\n }\n\n ret\n\n }\n\n\n\n fn read_top_level(&mut self) -> AST {\n\n self.read_func_def()\n\n }\n", "file_path": "src/parser.rs", "rank": 48, "score": 16.14711966766803 }, { "content": " let mut ret = self.read_primary();\n\n // x[y] is short for *(x+y)\n\n if self.consume(\"[\") {\n\n let rhs = self.read_expr();\n\n self.consume_expected(\"]\");\n\n ret = AST::UnaryOp(\n\n Box::new(AST::BinaryOp(Box::new(ret), Box::new(rhs), BinaryOps::Add)),\n\n UnaryOps::Deref,\n\n );\n\n }\n\n ret\n\n }\n\n\n\n fn read_primary(&mut self) -> AST {\n\n if self.consume(\"(\") {\n\n let ast = self.read_expr();\n\n self.consume_expected(\")\");\n\n return ast;\n\n } else if self.consume(\"sizeof\") {\n\n let ast = self.read_unary();\n", "file_path": "src/parser.rs", "rank": 49, "score": 16.034292201274607 }, { "content": " step = self.read_expr();\n\n self.consume(\")\");\n\n }\n\n let body = self.read_stmt();\n\n return AST::For(\n\n Box::new(init),\n\n Box::new(cond),\n\n Box::new(step),\n\n Box::new(body),\n\n );\n\n } else if self.consume(\"while\") {\n\n self.consume_expected(\"(\");\n\n let cond = self.read_expr();\n\n self.consume_expected(\")\");\n\n let body = self.read_stmt();\n\n return AST::While(Box::new(cond), Box::new(body));\n\n } else if self.consume(\"{\") {\n\n return self.read_compound_stmt();\n\n } else {\n\n return self.read_expr_stmt();\n", "file_path": "src/parser.rs", "rank": 50, "score": 15.48123533995976 }, { "content": "\n\n pub fn peek_next(&mut self) -> Option<char> {\n\n //println!(\"lex '{}'\", self.peek.peek().unwrap());\n\n self.peek_pos += 1;\n\n self.peek.next()\n\n }\n\n\n\n // advance by n characters\n\n pub fn advance_by(&mut self, n: usize) {\n\n for _ in 0..n {\n\n self.peek_next();\n\n }\n\n }\n\n\n\n // read forward expected string\n\n pub fn skip_token(&mut self, s: &str) {\n\n if self.read_token().unwrap().val != s {\n\n panic!(\"Expected {}\", s);\n\n }\n\n }\n", "file_path": "src/lexer.rs", "rank": 51, "score": 14.762833039850133 }, { "content": " self.consume_expected(\";\");\n\n return expr;\n\n }\n\n }\n\n\n\n fn read_expr(&mut self) -> AST {\n\n self.read_assign()\n\n }\n\n\n\n fn read_assign(&mut self) -> AST {\n\n let mut ret = self.read_equality();\n\n if self.consume(\"=\") {\n\n let rhs = self.read_assign();\n\n ret = AST::BinaryOp(Box::new(ret), Box::new(rhs), BinaryOps::Assign);\n\n }\n\n ret\n\n }\n\n\n\n fn read_equality(&mut self) -> AST {\n\n let mut ast = self.read_relational();\n", "file_path": "src/parser.rs", "rank": 52, "score": 13.547167154396547 }, { "content": " break;\n\n }\n\n }\n\n ast\n\n }\n\n\n\n fn read_unary(&mut self) -> AST {\n\n if self.consume(\"+\") {\n\n return AST::UnaryOp(Box::new(self.read_unary()), UnaryOps::Plus);\n\n } else if self.consume(\"-\") {\n\n return AST::UnaryOp(Box::new(self.read_unary()), UnaryOps::Minus);\n\n } else if self.consume(\"&\") {\n\n return AST::UnaryOp(Box::new(self.read_unary()), UnaryOps::Addr);\n\n } else if self.consume(\"*\") {\n\n return AST::UnaryOp(Box::new(self.read_unary()), UnaryOps::Deref);\n\n }\n\n self.read_postfix()\n\n }\n\n\n\n fn read_postfix(&mut self) -> AST {\n", "file_path": "src/parser.rs", "rank": 53, "score": 13.427262832865406 }, { "content": "\n\n fn read_num(&mut self) -> f64 {\n\n match self.next() {\n\n Token {\n\n kind: TokenKind::IntNum,\n\n val: n,\n\n ..\n\n } => n.parse::<f64>().unwrap(),\n\n Token {\n\n kind: TokenKind::FloatNum,\n\n val: n,\n\n ..\n\n } => n.parse::<f64>().unwrap(),\n\n _ => panic!(\"Numerical literal is expected\"),\n\n }\n\n }\n\n\n\n fn read_ident(&mut self) -> String {\n\n self.next().val\n\n }\n\n}\n", "file_path": "src/parser.rs", "rank": 54, "score": 12.94133499694958 }, { "content": " };\n\n }\n\n\n\n pub fn is_num(&self) -> bool {\n\n return match self.kind {\n\n TokenKind::IntNum | TokenKind::FloatNum => true,\n\n _ => false,\n\n };\n\n }\n\n}\n\n\n\npub struct Lexer<'a> {\n\n cur_line: u32,\n\n filepath: String,\n\n peek: iter::Peekable<str::Chars<'a>>,\n\n peek_pos: usize,\n\n}\n\n\n", "file_path": "src/lexer.rs", "rank": 55, "score": 12.716054577733363 }, { "content": " }\n\n _ => panic!(\"Lexer error\"),\n\n }\n\n }\n\n tokens\n\n}\n\n\n\nimpl<'a> Lexer<'a> {\n\n pub fn new(path: String, input: &'a str) -> Lexer<'a> {\n\n Lexer {\n\n cur_line: 0,\n\n filepath: path,\n\n peek: input.chars().peekable(),\n\n peek_pos: 0,\n\n }\n\n }\n\n\n\n pub fn get_filepath(&self) -> String {\n\n self.filepath.clone()\n\n }\n", "file_path": "src/lexer.rs", "rank": 56, "score": 11.246589963098582 }, { "content": "extern crate ironcc;\n\nuse ironcc::codegen;\n\nuse ironcc::lexer;\n\nuse ironcc::parser;\n\nuse ironcc::version;\n\n\n\nuse std::env;\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 11.074507914672033 }, { "content": "\n\n fn read_include_directive(&mut self) {\n\n self.skip_token(\"<\");\n\n let mut filename = String::new();\n\n while !(*self.peek.peek().unwrap() == '>') {\n\n filename.push(self.peek_next().unwrap());\n\n }\n\n self.skip_token(\">\");\n\n println!(\"include: {}\", filename);\n\n // TODO: implement #include here\n\n }\n\n\n\n pub fn read_token(&mut self) -> Option<Token> {\n\n match self.peek.peek() {\n\n Some(&c) => match c {\n\n 'a'..='z' | 'A'..='Z' => Some(self.read_string_token()),\n\n '+' | '-' | '*' | '/' | '(' | ')' | '=' | '<' | '>' | '!' | '&' | ',' | ';'\n\n | '{' | '}' | '[' | ']' | '.' => Some(self.read_symbol()),\n\n '0'..='9' => Some(self.read_num()),\n\n ' ' | '\\t' | '\\r' => {\n", "file_path": "src/lexer.rs", "rank": 58, "score": 10.992482160033278 }, { "content": " val: s,\n\n line: self.cur_line,\n\n }\n\n } else {\n\n Token {\n\n kind: TokenKind::IntNum,\n\n val: s,\n\n line: self.cur_line,\n\n }\n\n }\n\n }\n\n\n\n fn read_directive(&mut self) {\n\n let dir_string = self.read_token().unwrap().val;\n\n if dir_string == \"include\" {\n\n self.read_include_directive();\n\n } else {\n\n panic!(\"Unknown direvtive\");\n\n }\n\n }\n", "file_path": "src/lexer.rs", "rank": 59, "score": 10.888148571969067 }, { "content": " loop {\n\n if self.consume(\"+\") {\n\n ast = AST::BinaryOp(Box::new(ast), Box::new(self.read_mul()), BinaryOps::Add);\n\n } else if self.consume(\"-\") {\n\n ast = AST::BinaryOp(Box::new(ast), Box::new(self.read_mul()), BinaryOps::Sub);\n\n } else {\n\n break;\n\n }\n\n }\n\n ast\n\n }\n\n\n\n fn read_mul(&mut self) -> AST {\n\n let mut ast = self.read_unary();\n\n loop {\n\n if self.consume(\"*\") {\n\n ast = AST::BinaryOp(Box::new(ast), Box::new(self.read_unary()), BinaryOps::Mul);\n\n } else if self.consume(\"/\") {\n\n ast = AST::BinaryOp(Box::new(ast), Box::new(self.read_unary()), BinaryOps::Div);\n\n } else {\n", "file_path": "src/parser.rs", "rank": 60, "score": 10.592192478668222 }, { "content": " Token {\n\n kind: TokenKind::Symbol,\n\n val: sym,\n\n line: self.cur_line,\n\n }\n\n }\n\n\n\n pub fn read_newline(&mut self) -> Token {\n\n self.cur_line += 1;\n\n Token {\n\n kind: TokenKind::NewLine,\n\n val: \"\".to_string(),\n\n line: self.cur_line,\n\n }\n\n }\n\n\n\n // ident, keyword\n\n pub fn read_string_token(&mut self) -> Token {\n\n let mut string = String::new();\n\n loop {\n", "file_path": "src/lexer.rs", "rank": 61, "score": 9.833767587266982 }, { "content": " match self.peek.peek() {\n\n Some(&c) => match c {\n\n 'a'..='z' | 'A'..='Z' | '0'..='9' => string.push(c),\n\n _ => break,\n\n },\n\n _ => break,\n\n }\n\n self.peek_next();\n\n }\n\n let tk = match string.as_str() {\n\n \"sizeof\" | \"int\" | \"if\" | \"else\" | \"for\" | \"while\" => TokenKind::Keyword,\n\n _ => TokenKind::Ident,\n\n };\n\n Token {\n\n kind: tk,\n\n val: string,\n\n line: self.cur_line,\n\n }\n\n }\n\n\n", "file_path": "src/lexer.rs", "rank": 62, "score": 9.699602202912239 }, { "content": " }\n\n }\n\n\n\n fn read_compound_stmt(&mut self) -> AST {\n\n let mut v = Vec::new();\n\n while !self.consume(\"}\") {\n\n let ast;\n\n if self.cur().matches(\"int\") {\n\n ast = self.read_declaration();\n\n } else {\n\n ast = self.read_stmt();\n\n }\n\n v.push(ast);\n\n }\n\n AST::Block(v)\n\n }\n\n\n\n fn read_declaration(&mut self) -> AST {\n\n let mut decls = Vec::new();\n\n let declspec = self.read_declspec();\n", "file_path": "src/parser.rs", "rank": 63, "score": 9.651689543504993 }, { "content": " pub fn read_num(&mut self) -> Token {\n\n let mut s = String::new();\n\n let mut is_float = false;\n\n loop {\n\n match self.peek.peek() {\n\n Some(&c) => match c {\n\n '0'..='9' => s.push(c),\n\n '.' => {\n\n s.push(c);\n\n is_float = true;\n\n }\n\n _ => break,\n\n },\n\n _ => break,\n\n }\n\n self.peek_next();\n\n }\n\n if is_float {\n\n Token {\n\n kind: TokenKind::FloatNum,\n", "file_path": "src/lexer.rs", "rank": 64, "score": 8.803749986794967 }, { "content": " fn read_relational(&mut self) -> AST {\n\n let mut ast = self.read_add();\n\n loop {\n\n if self.consume(\"<\") {\n\n ast = AST::BinaryOp(Box::new(ast), Box::new(self.read_add()), BinaryOps::Lt);\n\n } else if self.consume(\"<=\") {\n\n ast = AST::BinaryOp(Box::new(ast), Box::new(self.read_add()), BinaryOps::Le);\n\n } else if self.consume(\">\") {\n\n ast = AST::BinaryOp(Box::new(self.read_add()), Box::new(ast), BinaryOps::Lt);\n\n } else if self.consume(\">=\") {\n\n ast = AST::BinaryOp(Box::new(self.read_add()), Box::new(ast), BinaryOps::Le);\n\n } else {\n\n break;\n\n }\n\n }\n\n ast\n\n }\n\n\n\n fn read_add(&mut self) -> AST {\n\n let mut ast = self.read_mul();\n", "file_path": "src/parser.rs", "rank": 65, "score": 8.532821623658785 }, { "content": "pub mod codegen;\n\npub mod error;\n\npub mod lexer;\n\npub mod node;\n\npub mod parser;\n\npub mod types;\n\npub mod version;\n", "file_path": "src/lib.rs", "rank": 66, "score": 8.04553517259326 }, { "content": "use std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::iter;\n\nuse std::iter::FromIterator;\n\nuse std::str;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum TokenKind {\n\n IntNum,\n\n FloatNum,\n\n Symbol,\n\n Keyword,\n\n Ident,\n\n NewLine, // not used\n\n Eof,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Token {\n\n pub kind: TokenKind,\n", "file_path": "src/lexer.rs", "rank": 67, "score": 7.744375871911039 }, { "content": " self.peek_next();\n\n self.read_token()\n\n }\n\n '\\n' => {\n\n self.peek_next();\n\n self.read_newline();\n\n self.read_token()\n\n }\n\n '#' => {\n\n self.peek_next();\n\n self.read_directive();\n\n self.read_token()\n\n }\n\n // TODO: unexpected character\n\n _ => panic!(\"Unknown character: '{}' code: {}\", c, c as u8),\n\n },\n\n // TODO: None always means Eof?\n\n _ => Some(Token {\n\n kind: TokenKind::Eof,\n\n val: \"\".to_string(),\n\n line: self.cur_line,\n\n }),\n\n }\n\n }\n\n}\n", "file_path": "src/lexer.rs", "rank": 68, "score": 7.249121348588262 }, { "content": "\n\n pub fn get_filepath(&self) -> String {\n\n self.filepath.clone()\n\n }\n\n\n\n // for LL(1)\n\n pub fn cur(&self) -> Token {\n\n if self.pos < self.tokens.len() {\n\n return self.tokens[self.pos].clone();\n\n }\n\n panic!(\"Couldn't read a token\");\n\n }\n\n\n\n // for LL(2)\n\n pub fn peek(&self) -> Token {\n\n if self.pos < self.tokens.len() {\n\n return self.tokens[self.pos + 1].clone();\n\n }\n\n panic!(\"Couldn't read a token\");\n\n }\n", "file_path": "src/parser.rs", "rank": 69, "score": 7.137282293804221 }, { "content": " loop {\n\n if self.consume(\"==\") {\n\n ast = AST::BinaryOp(\n\n Box::new(ast),\n\n Box::new(self.read_relational()),\n\n BinaryOps::Eq,\n\n );\n\n } else if self.consume(\"!=\") {\n\n ast = AST::BinaryOp(\n\n Box::new(ast),\n\n Box::new(self.read_relational()),\n\n BinaryOps::Ne,\n\n );\n\n } else {\n\n break;\n\n }\n\n }\n\n ast\n\n }\n\n\n", "file_path": "src/parser.rs", "rank": 70, "score": 6.28252729123755 }, { "content": "const VERSION_STR: &'static str = env!(\"CARGO_PKG_VERSION\");\n\n\n", "file_path": "src/version.rs", "rank": 71, "score": 6.12197260401022 }, { "content": "use std::process;\n\n\n", "file_path": "src/error.rs", "rank": 72, "score": 3.036181793577017 } ]
Rust
day07/src/main.rs
pkusensei/adventofcode2020
116b462afa8f5d05d221d312644a7b870954fc92
use std::{collections::HashMap, str::FromStr}; fn main() { let input = tools::read_input("input.txt").unwrap(); let rules = parse_rules(&input); println!("To Shiny gold: {}", count_shiny_gold(&rules)); println!( "Shiny gold contains: {}", count_contained(&rules, "shiny gold") - 1 ) } fn count_shiny_gold(rules: &HashMap<&str, Vec<(u32, &str)>>) -> usize { rules .keys() .filter(|k| contains_color(rules, k, "shiny gold")) .count() } fn count_contained(rules: &HashMap<&str, Vec<(u32, &str)>>, start: &str) -> usize { match rules.get(&start) { Some(ncpairs) => { if ncpairs.is_empty() { 1 } else { 1 + ncpairs .iter() .map(|(num, color)| (*num as usize) * count_contained(rules, color)) .sum::<usize>() } } _ => 0, } } fn contains_color(rules: &HashMap<&str, Vec<(u32, &str)>>, start: &str, target: &str) -> bool { match rules.get(&start) { Some(colors) => { if colors.iter().map(|(_, color)| color).any(|&c| c == target) { true } else { colors .iter() .map(|(_, color)| contains_color(rules, color, target)) .fold(false, |acc, i| acc || i) } } _ => false, } } fn parse_rules(lines: &[String]) -> HashMap<&str, Vec<(u32, &str)>> { lines .into_iter() .map(|s| { let mut kvpair = s.split("bags contain"); let key = kvpair.next().unwrap().trim(); let value = kvpair.next().unwrap().trim(); (key, parse_contained(value)) }) .collect() } fn parse_contained(line: &str) -> Vec<(u32, &str)> { if line.starts_with("no other") { vec![] } else { line.split(',') .map(|s| { let mut num_color_pair = s.trim().splitn(2, ' '); let num = u32::from_str(num_color_pair.next().unwrap()).unwrap(); let color = num_color_pair .next() .unwrap() .rsplitn(2, ' ') .skip(1) .next() .unwrap(); (num, color) }) .collect() } } #[cfg(test)] mod tests { use super::*; fn sample() -> Vec<String> { r#"light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags."# .split('\n') .map(|s| s.trim().to_owned()) .collect() } #[test] fn test_count_shiny_gold() { let lines = sample(); let rules = parse_rules(&lines); assert_eq!(4, count_shiny_gold(&rules)) } #[test] fn test_count_contained() { { let lines = sample(); let rules = parse_rules(&lines); assert_eq!(32, count_contained(&rules, "shiny gold") - 1); } { let lines: Vec<_> = r#"shiny gold bags contain 2 dark red bags. dark red bags contain 2 dark orange bags. dark orange bags contain 2 dark yellow bags. dark yellow bags contain 2 dark green bags. dark green bags contain 2 dark blue bags. dark blue bags contain 2 dark violet bags. dark violet bags contain no other bags."# .split('\n') .map(|s| s.trim().to_owned()) .collect(); let rules = parse_rules(&lines); assert_eq!(126, count_contained(&rules, "shiny gold") - 1); } } }
use std::{collections::HashMap, str::FromStr}; fn main() { let input = tools::read_input("input.txt").unwrap(); let rules = parse_rules(&input); println!("To Shiny gold: {}", count_shiny_gold(&rules)); println!( "Shiny gold contains: {}", count_contained(&rules, "shiny gold") - 1 ) } fn count_shiny_gold(rules: &HashMap<&str, Vec<(u32, &str)>>) -> usize { rules .keys() .filter(|k| contains_color(rules, k, "shiny gold")) .count() } fn count_contained(rules: &HashMap<&str, Vec<(u32, &str)>>, start: &str) -> usize { match rules.get(&start) { Some(ncpairs) => { if ncpairs.is_empty() { 1 } else { 1 + ncpairs .iter() .map(|(num, color)| (*num as usize) * count_contained(rules, color)) .sum::<usize>() } } _ => 0, } } fn contains_color(rules: &HashMap<&str, Vec<(u32, &str)>>, start: &str, target: &str) -> bool { match rules.get(&start) { Some(colors) => { if colors.iter().map(|(_, color)| color).any(|&c| c == target) { true } else { colors .iter() .map(|(_, color)| contains_color(rules, color, target)) .fold(false, |acc, i| acc || i) } } _ => false, } } fn parse_rules(lines: &[String]) -> HashMap<&str, Vec<(u32, &str)>> { lines .into_iter() .map(|s| { let mut kvpair = s.split("bags contain"); let key = kvpair.next().unwrap().trim(); let value = kvpair.next().unwrap().trim(); (key, parse_contained(value)) }) .collect() } fn parse_contained(line: &str) -> Vec<(u32, &str)> { if line.starts_with("no other") { vec![] } else { line.split(',') .map(|s| { let mut num_color_pair = s.trim().splitn(2, ' '); let num = u32::from_str(num_color_pair.next().unwrap()).unwrap(); let color = num_color_pair .next() .unwrap() .rsplitn(2, ' ') .skip(1) .next() .unwrap(); (num, color) }) .collect() } } #[cfg(test)] mod tests { use super::*; fn sample() -> Vec<String> { r#"light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags."# .split('\n') .map(|s| s.trim().to_owned()) .collect() } #[test] fn test_count_shiny_gold() { let lines = sample(); let rules = parse_rules(&lines); assert_eq!(4, count_shiny_gold(&rules)) } #[test] fn test_count_contained() { { let lines = sample(); let rules = parse_rules(&lines); assert_eq!(32, count_contained(&rules, "shiny gold") - 1); } { let lines: Vec<_> = r#"shiny gold bags contain 2 dark red bags. dark red bags c
s); assert_eq!(126, count_contained(&rules, "shiny gold") - 1); } } }
ontain 2 dark orange bags. dark orange bags contain 2 dark yellow bags. dark yellow bags contain 2 dark green bags. dark green bags contain 2 dark blue bags. dark blue bags contain 2 dark violet bags. dark violet bags contain no other bags."# .split('\n') .map(|s| s.trim().to_owned()) .collect(); let rules = parse_rules(&line
function_block-random_span
[ { "content": "fn group_answers(lines: &[String]) -> Vec<Vec<&str>> {\n\n let mut groups = vec![];\n\n let mut one_group = vec![];\n\n for line in lines {\n\n if line.trim().is_empty() {\n\n groups.push(one_group.clone());\n\n one_group.clear()\n\n } else {\n\n one_group.push(line.trim())\n\n }\n\n }\n\n\n\n if !one_group.is_empty() {\n\n groups.push(one_group)\n\n }\n\n groups\n\n}\n\n\n", "file_path": "day06/src/main.rs", "rank": 4, "score": 231393.46565235415 }, { "content": "fn read(input: &str) -> (Vec<&str>, Vec<&str>) {\n\n let rules: Vec<_> = input\n\n .lines()\n\n .map(str::trim)\n\n .take_while(|line| !line.is_empty())\n\n .collect();\n\n let messages: Vec<_> = input\n\n .lines()\n\n .map(str::trim)\n\n .skip_while(|line| !line.is_empty())\n\n .skip(1)\n\n .collect();\n\n (rules, messages)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "day19/src/main.rs", "rank": 6, "score": 227291.75185212636 }, { "content": "fn collect_label(mut nums: Vec<u32>) -> Result<String, Error> {\n\n let idx = nums\n\n .iter()\n\n .position(|x| *x == 1)\n\n .ok_or_else(|| format!(\"Cannot find \\\"1\\\" in {:?}\", nums))?;\n\n nums.rotate_left(idx);\n\n nums.remove(0);\n\n let res = nums\n\n .into_iter()\n\n .map(|num| std::char::from_digit(num, 10))\n\n .collect::<Option<Vec<_>>>()\n\n .and_then(|v| Some(v.into_iter().collect::<String>()))\n\n .ok_or_else(|| \"Cannot build string\")?;\n\n Ok(res)\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 7, "score": 223791.7437479996 }, { "content": "fn get_nums(input: &str) -> Vec<u32> {\n\n let mut nums: Vec<_> = input\n\n .split_ascii_whitespace()\n\n .map(|s| u32::from_str(s.trim()).unwrap())\n\n .collect();\n\n nums.sort();\n\n nums.insert(0, 0);\n\n nums.push(nums.last().cloned().unwrap() + 3);\n\n nums\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const SAMPLE1: &str = r#\"16\n\n 10\n\n 15\n\n 5\n\n 1\n", "file_path": "day10/src/main.rs", "rank": 8, "score": 222872.04498764916 }, { "content": "fn read_line(line: &str) -> Result<(Vec<&str>, Vec<&str>), Error> {\n\n let mut pair = line.split(\"(contains\");\n\n let ingredients = pair\n\n .next()\n\n .and_then(|s| Some(s.trim().split_ascii_whitespace().collect()))\n\n .ok_or_else(|| \"Invalid input\")?;\n\n let allergens = pair\n\n .next()\n\n .and_then(|s| s.trim().strip_suffix(')'))\n\n .and_then(|s| Some(s.split(\", \").collect()))\n\n .ok_or_else(|| \"Invalid input\")?;\n\n Ok((ingredients, allergens))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const SAMPLE: &str = r#\"mxmxvkd kfcds sqjhc nhms (contains dairy, fish)\n\n trh fvjkl sbzzf mxmxvkd (contains dairy)\n", "file_path": "day21/src/main.rs", "rank": 9, "score": 221062.57159793394 }, { "content": "fn read(input: &str) -> Result<Vec<(Vec<&str>, Vec<&str>)>, Error> {\n\n input.lines().map(read_line).collect()\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 10, "score": 217767.32009320759 }, { "content": "fn count_valid(rules: &[String], messages: &[&str]) -> usize {\n\n messages\n\n .into_iter()\n\n .filter(|m| rules.iter().any(|r| r == *m))\n\n .count()\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 11, "score": 216534.896044573 }, { "content": "fn read(input: &str) -> Result<(usize, Vec<usize>), Error> {\n\n let mut lines = input.split_terminator('\\n');\n\n let threshold = usize::from_str(\n\n lines\n\n .next()\n\n .ok_or_else(|| -> Error { \"Empty input\".into() })?\n\n .trim(),\n\n )?;\n\n let nums: Vec<_> = lines\n\n .next()\n\n .ok_or_else(|| -> Error { \"Insufficient input\".into() })?\n\n .split(',')\n\n .filter_map(|s| usize::from_str(s.trim()).ok())\n\n .collect();\n\n Ok((threshold, nums))\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 12, "score": 210442.73387057922 }, { "content": "fn read_stamps(input: &str) -> Result<Vec<(usize, usize)>, Error> {\n\n let line = input\n\n .split_terminator('\\n')\n\n .skip(1)\n\n .next()\n\n .ok_or_else(|| -> Error { \"Insufficient input\".into() })?;\n\n let mut stamps: Vec<_> = line\n\n .split(',')\n\n .enumerate()\n\n .filter_map(|(idx, s)| match usize::from_str(s.trim()) {\n\n Ok(num) => Some((idx, num)),\n\n _ => None,\n\n })\n\n .collect();\n\n stamps.sort_unstable_by_key(|(idx, _)| *idx);\n\n Ok(stamps)\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 13, "score": 206559.18321806495 }, { "content": "fn build_rule(rules: &HashMap<u32, Rule>, rule_id: u32) -> Vec<String> {\n\n fn concat_strs(left: Vec<String>, right: Vec<String>) -> Vec<String> {\n\n left.into_iter()\n\n .cartesian_product(right.into_iter())\n\n .map(|(l, r)| {\n\n let mut res = l.clone();\n\n res.push_str(&r);\n\n res\n\n })\n\n .collect()\n\n }\n\n fn build_strs_from_ids(rules: &HashMap<u32, Rule>, ids: &[u32]) -> Vec<String> {\n\n let mut res = build_rule(rules, ids[0]);\n\n for id in &ids[1..] {\n\n let sub_rules = build_rule(rules, *id);\n\n res = concat_strs(res, sub_rules);\n\n }\n\n res\n\n }\n\n\n", "file_path": "day19/src/main.rs", "rank": 14, "score": 204105.88709829852 }, { "content": "fn count_valid_loop(rules: &HashMap<u32, Rule>, messages: &[&str]) -> usize {\n\n messages\n\n .into_iter()\n\n .filter(|message| {\n\n let mut count_42 = 0;\n\n let mut remainder = **message;\n\n let mut result = check_valid(rules, &rules[&42], remainder);\n\n\n\n while let Ok(new_remainder) = result {\n\n count_42 += 1;\n\n remainder = new_remainder;\n\n result = check_valid(rules, &rules[&42], remainder);\n\n }\n\n\n\n if count_42 < 2 {\n\n return false;\n\n }\n\n\n\n let mut count_31 = 0;\n\n result = check_valid(rules, &rules[&31], remainder);\n", "file_path": "day19/src/main.rs", "rank": 15, "score": 201765.72600049843 }, { "content": "fn find_ingredients<'a>(lines: &'a [(Vec<&str>, Vec<&str>)]) -> HashMap<&'a str, HashSet<&'a str>> {\n\n let mut ai_map: HashMap<&str, HashSet<&str>> = HashMap::new();\n\n\n\n for (ingredients, allergens) in lines {\n\n for allergen in allergens {\n\n match ai_map.get_mut(allergen) {\n\n Some(v) => {\n\n let ing: HashSet<_> = ingredients.iter().cloned().collect();\n\n *v = v.intersection(&ing).cloned().collect()\n\n }\n\n None => {\n\n ai_map.insert(*allergen, ingredients.into_iter().cloned().collect());\n\n }\n\n }\n\n }\n\n }\n\n\n\n ai_map\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 16, "score": 200659.57353819086 }, { "content": "fn match_ingredients(ai_map: &mut HashMap<&str, HashSet<&str>>) {\n\n let total = ai_map.len();\n\n let mut unique_ingredients = HashSet::new();\n\n\n\n while unique_ingredients.len() < total {\n\n for (_, ingredients) in ai_map.iter_mut() {\n\n if ingredients.len() == 1 {\n\n let ing = ingredients.iter().next().cloned().unwrap();\n\n unique_ingredients.insert(ing);\n\n } else {\n\n ingredients.retain(|x| !unique_ingredients.contains(x));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 17, "score": 199520.19730114547 }, { "content": "fn get_tiles(input: &str) -> HashMap<(i32, i32, i32), bool> {\n\n let mut tiles: HashMap<_, bool> = HashMap::new();\n\n for line in input.lines() {\n\n let tile = find_tile(line);\n\n match tiles.get_mut(&tile) {\n\n Some(v) => *v = !(*v),\n\n None => {\n\n tiles.insert(tile, true); // true as flipped once -> black\n\n }\n\n }\n\n }\n\n tiles\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 18, "score": 195305.8637719765 }, { "content": "fn read_value(line: &str) -> Result<(usize, usize), Error> {\n\n let mut kvpair = line.split(\" = \");\n\n let k = kvpair\n\n .next()\n\n .ok_or_else(|| -> Error { \"Invalid value input\".into() })?\n\n .strip_prefix(\"mem[\")\n\n .ok_or_else(|| -> Error { \"Invalid value input\".into() })?\n\n .strip_suffix(']')\n\n .ok_or_else(|| -> Error { \"Invalid value input\".into() })?;\n\n let key = usize::from_str(k)?;\n\n let v = kvpair\n\n .next()\n\n .ok_or_else(|| -> Error { \"Invalid value input\".into() })?;\n\n let value = usize::from_str(v)?;\n\n Ok((key, value))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "day14/src/main.rs", "rank": 19, "score": 193350.53790045422 }, { "content": "fn parse_rule(line: &str) -> Result<(u32, Rule), Error> {\n\n let rule_id: u32 = line\n\n .split(':')\n\n .next()\n\n .ok_or_else(|| \"Invalid line of rule\")?\n\n .parse()?;\n\n let rule_str = line\n\n .split(':')\n\n .skip(1)\n\n .next()\n\n .ok_or_else(|| \"Invalid line of rule\")?\n\n .trim();\n\n let rule_set = if rule_str.contains('|') {\n\n let nums: Vec<_> = rule_str\n\n .split('|')\n\n .map(|pair| {\n\n pair.trim()\n\n .split_ascii_whitespace()\n\n .map(|s| s.trim().parse())\n\n .collect::<Result<Vec<_>, _>>()\n", "file_path": "day19/src/main.rs", "rank": 20, "score": 192241.09928869508 }, { "content": "fn parse_line(line: &str) -> Result<Vec<Token>, Error> {\n\n let mut chars = line.chars().filter(|c| !c.is_ascii_whitespace()).peekable();\n\n let mut tokens = vec![];\n\n while let Some(c) = chars.next() {\n\n match c {\n\n '+' => tokens.push(Token::Op(BinaryOp::Add)),\n\n '*' => tokens.push(Token::Op(BinaryOp::Mul)),\n\n '(' => tokens.push(Token::LeftParen),\n\n ')' => tokens.push(Token::RightParen),\n\n '0'..='9' => {\n\n let mut num = String::new();\n\n num.push(c);\n\n while let Some(next_char) = chars.peek() {\n\n if next_char.is_ascii_digit() {\n\n num.push(*next_char);\n\n chars.next();\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "day18/src/main.rs", "rank": 21, "score": 190810.20553045208 }, { "content": "fn find_corners(mut tiles: Vec<Tile>) -> (Vec<usize>, HashMap<usize, Tile>) {\n\n let neighbors: HashMap<_, _> = tiles\n\n .iter()\n\n .map(|tile| tile.find_neighbors(&tiles))\n\n .collect();\n\n for tile in tiles.iter_mut() {\n\n tile.neighbors = neighbors[&tile.id].clone()\n\n }\n\n let corners = tiles\n\n .iter()\n\n .filter_map(|tile| match tile.neighbors.len() {\n\n 2 => Some(tile.id),\n\n _ => None,\n\n })\n\n .collect();\n\n let tiles = tiles.into_iter().map(|t| (t.id, t)).collect();\n\n (corners, tiles)\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 22, "score": 188986.56008815736 }, { "content": "fn read(input: &str) -> Result<(VecDeque<usize>, VecDeque<usize>), Error> {\n\n let p1 = input\n\n .lines()\n\n .take_while(|line| !line.trim().is_empty())\n\n .skip(1)\n\n .map(|num| num.trim().parse())\n\n .collect::<Result<_, _>>()?;\n\n\n\n let p2 = input\n\n .lines()\n\n .skip_while(|line| !line.trim().is_empty())\n\n .skip(2)\n\n .map(|num| num.trim().parse())\n\n .collect::<Result<_, _>>()?;\n\n\n\n Ok((p1, p2))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "day22/src/main.rs", "rank": 23, "score": 185251.6089743559 }, { "content": "fn read(input: &str, split_pattern: &str) -> Result<Vec<Tile>, Error> {\n\n let tiles: Vec<_> = input.split(split_pattern).collect();\n\n\n\n let mut res = vec![];\n\n for tile in tiles {\n\n let lines: Vec<_> = tile.split('\\n').collect();\n\n let id = lines[0]\n\n .strip_prefix(\"Tile \")\n\n .and_then(|s| s.trim().strip_suffix(':'))\n\n .ok_or_else(|| \"Invalid input\")?\n\n .parse()?;\n\n\n\n let mut dots = [[false; 10]; 10];\n\n for (row, line) in lines.into_iter().skip(1).enumerate() {\n\n for (col, ch) in line.trim().char_indices() {\n\n dots[row][col] = ch == '#'; // '#' becomes `true`\n\n }\n\n }\n\n res.push(Tile {\n\n id,\n", "file_path": "day20/src/main.rs", "rank": 24, "score": 184758.35191616172 }, { "content": "fn read(input: &str) -> Result<Vec<u32>, Error> {\n\n Ok(input\n\n .chars()\n\n .map(|c| c.to_digit(10))\n\n .collect::<Option<Vec<_>>>()\n\n .ok_or_else(|| format!(\"Invalid input: {}\", input))?)\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 25, "score": 184462.64512264094 }, { "content": "fn parse_rules(lines: &[&str]) -> Result<HashMap<u32, Rule>, Error> {\n\n lines.into_iter().map(|line| parse_rule(line)).collect()\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 26, "score": 183352.89889903326 }, { "content": "fn parse_ticket(line: &str) -> Result<Vec<u32>, Error> {\n\n Ok(line\n\n .split(',')\n\n .map(|n| n.trim().parse())\n\n .collect::<Result<Vec<u32>, _>>()?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const SAMPLE1: &str = r#\"class: 1-3 or 5-7\n\n row: 6-11 or 33-44\n\n seat: 13-40 or 45-50\n\n \n\n your ticket:\n\n 7,1,14\n\n \n\n nearby tickets:\n\n 7,3,47\n", "file_path": "day16/src/main.rs", "rank": 27, "score": 181473.08123914007 }, { "content": "fn insert_back(mut nums: Vec<u32>, dest_idx: usize, picked: Vec<u32>) -> Vec<u32> {\n\n for num in picked.into_iter().rev() {\n\n nums.insert(dest_idx + 1, num)\n\n }\n\n nums\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 28, "score": 179449.98060945605 }, { "content": "fn order_ingredients(ai_map: &HashMap<&str, HashSet<&str>>) -> String {\n\n ai_map\n\n .into_iter()\n\n .map(|(allergen, ingredients)| {\n\n let ingredient = ingredients.into_iter().cloned().next().unwrap();\n\n (*allergen, ingredient)\n\n })\n\n .sorted_by_key(|(a, _)| *a)\n\n .map(|(_, ing)| ing)\n\n .join(\",\")\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 29, "score": 177270.73435431824 }, { "content": "fn count_black(tiles: &HashMap<(i32, i32, i32), bool>) -> usize {\n\n tiles.values().filter(|&&v| v == true).count()\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 30, "score": 176991.93926416733 }, { "content": "fn get_passports(lines: &[String]) -> Vec<Passport> {\n\n let mut entries = HashMap::new();\n\n let mut res = vec![];\n\n for line in lines {\n\n if line.trim().is_empty() {\n\n res.push(Passport {\n\n entries: entries.clone(),\n\n });\n\n entries.clear()\n\n } else {\n\n line.split_ascii_whitespace().for_each(|s| {\n\n let mut kvpair = s.split(':');\n\n match (kvpair.next(), kvpair.next()) {\n\n (Some(k), Some(v)) => {\n\n entries.insert(k.trim().to_owned(), v.trim().to_owned());\n\n }\n\n _ => (),\n\n }\n\n })\n\n }\n\n }\n\n if !entries.is_empty() {\n\n res.push(Passport { entries })\n\n }\n\n res\n\n}\n\n\n", "file_path": "day04/src/main.rs", "rank": 31, "score": 174364.3113575853 }, { "content": "// Totally stole this\n\nfn play_p2(mut nums: Vec<u32>, moves: u32, mut current: u32) -> Vec<u32> {\n\n for _ in 0..moves {\n\n let a = nums[current as usize];\n\n let b = nums[a as usize];\n\n let c = nums[b as usize];\n\n let mut dest = if current.saturating_sub(1) <= 0 {\n\n nums.len() as u32 - 1\n\n } else {\n\n current - 1\n\n };\n\n\n\n while dest == a || dest == b || dest == c {\n\n dest = dest.saturating_sub(1);\n\n if dest <= 0 {\n\n dest = nums.len() as u32 - 1\n\n }\n\n }\n\n\n\n nums[current as usize] = nums[c as usize];\n\n let tmp = nums[dest as usize];\n\n nums[dest as usize] = a;\n\n nums[c as usize] = tmp;\n\n current = nums[current as usize]\n\n }\n\n nums\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 32, "score": 169589.78798453446 }, { "content": "fn p1(input: &str) -> Result<usize, Error> {\n\n let mut memory = HashMap::new();\n\n let mut and_or = (0, 0);\n\n for line in input.lines() {\n\n let line = line.trim();\n\n match line.strip_prefix(\"mask = \") {\n\n Some(m) => {\n\n and_or =\n\n m.bytes()\n\n .rev()\n\n .enumerate()\n\n .fold((usize::MAX, 0), |(and, or), (idx, byte)| match byte {\n\n b'0' => (and & !(1 << idx), or),\n\n b'1' => (and, or | 1 << idx),\n\n _ => (and, or),\n\n })\n\n }\n\n None => {\n\n let (k, v) = read_value(line)?;\n\n let value = v & and_or.0 | and_or.1;\n\n memory.insert(k, value);\n\n }\n\n }\n\n }\n\n Ok(memory.values().sum())\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 33, "score": 169206.58197066863 }, { "content": "fn p2(input: &str) -> Result<usize, Error> {\n\n let mut memory = HashMap::new();\n\n let mut float_address = vec![];\n\n let mut whitelist = 0;\n\n for line in input.lines() {\n\n let line = line.trim();\n\n match line.strip_prefix(\"mask = \") {\n\n Some(mask) => {\n\n float_address.clear();\n\n let mut float_base = 0;\n\n let mut float_bits = vec![];\n\n whitelist = 0;\n\n\n\n mask.bytes()\n\n .rev()\n\n .enumerate()\n\n .for_each(|(idx, byte)| match byte {\n\n b'0' => whitelist |= 1 << idx,\n\n b'1' => float_base |= 1 << idx,\n\n b'X' => float_bits.push(idx),\n", "file_path": "day14/src/main.rs", "rank": 34, "score": 169206.58197066863 }, { "content": "fn p2(input: &str) -> Result<usize, Error> {\n\n let input_nums = read(input)?;\n\n let mut nums = vec![0; 1_000_001];\n\n\n\n for i in 0..input_nums.len() - 1 {\n\n nums[input_nums[i] as usize] = input_nums[i + 1]\n\n }\n\n nums[*input_nums.last().unwrap() as usize] = 10;\n\n for i in input_nums.len() + 1..1_000_000 {\n\n nums[i] = (i + 1) as u32;\n\n }\n\n nums[1_000_000] = input_nums[0];\n\n nums = play_p2(nums, 10_000_000, input_nums[0]);\n\n Ok(nums[1] as usize * nums[nums[1] as usize] as usize)\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 35, "score": 169206.58197066863 }, { "content": "fn evaluate(tokens: &[Token], mut idx: usize, no_precedence: bool, deepdown: bool) -> (i64, usize) {\n\n let mut res = 0;\n\n let mut op = BinaryOp::Add;\n\n while idx < tokens.len() {\n\n match tokens[idx] {\n\n Token::Num(num) => match op {\n\n BinaryOp::Add => res += num,\n\n BinaryOp::Mul => res *= num,\n\n },\n\n Token::Op(bin_op) => match bin_op {\n\n BinaryOp::Add => op = BinaryOp::Add,\n\n BinaryOp::Mul => {\n\n if no_precedence {\n\n op = BinaryOp::Mul;\n\n } else if deepdown {\n\n return (res, idx - 1);\n\n } else {\n\n let (sub_result, i) = evaluate(tokens, idx + 1, no_precedence, true);\n\n res *= sub_result;\n\n idx = i\n", "file_path": "day18/src/main.rs", "rank": 36, "score": 165111.84376586875 }, { "content": "fn play(mut p1: VecDeque<usize>, mut p2: VecDeque<usize>) -> VecDeque<usize> {\n\n while !p1.is_empty() && !p2.is_empty() {\n\n let card1 = p1.pop_front().unwrap();\n\n let card2 = p2.pop_front().unwrap();\n\n if card1 > card2 {\n\n p1.push_back(card1);\n\n p1.push_back(card2)\n\n } else {\n\n p2.push_back(card2);\n\n p2.push_back(card1)\n\n }\n\n }\n\n if p1.is_empty() {\n\n p2\n\n } else {\n\n p1\n\n }\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 37, "score": 162190.5567141306 }, { "content": "fn scan_error_rate(rules: &HashMap<&str, (u32, u32, u32, u32)>, nearby: &Vec<Vec<u32>>) -> u32 {\n\n nearby\n\n .iter()\n\n .filter_map(|ticket| check_invalid_ticket(rules, ticket))\n\n .sum()\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 38, "score": 161066.10329696306 }, { "content": "fn parse_rule(line: &str) -> Result<(&str, (u32, u32, u32, u32)), Error> {\n\n let mut kvpair = line.split(':');\n\n let field = kvpair.next().ok_or_else(|| \"Invalid rule input\")?.trim();\n\n let limits = kvpair\n\n .next()\n\n .ok_or_else(|| \"Invalid rule input\")?\n\n .trim()\n\n .split(\"or\")\n\n .map(|pair| pair.trim().split('-').map(|num| num.parse()))\n\n .flatten()\n\n .collect::<Result<Vec<_>, _>>()?;\n\n Ok((field, (limits[0], limits[1], limits[2], limits[3])))\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 39, "score": 160468.87875448033 }, { "content": "fn play(mut nums: Vec<u32>, moves: u32) -> Result<Vec<u32>, Error> {\n\n let mut current = nums[0];\n\n for _ in 0..moves {\n\n let (left, picked) = pick_up(nums, current)?;\n\n let (dest_idx, _) = find_dest(&left, current)?;\n\n nums = insert_back(left, dest_idx, picked);\n\n let current_idx = index_of(&nums, current)?;\n\n current = nums[(current_idx + 1) % nums.len()];\n\n }\n\n Ok(nums)\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 40, "score": 159106.26139709612 }, { "content": "fn solve(input: &[usize], target_length: usize) -> usize {\n\n let mut nums = Vec::with_capacity(target_length);\n\n let mut positions = HashMap::new();\n\n for (idx, num) in input.iter().enumerate() {\n\n nums.push(*num);\n\n positions.insert(*num, idx);\n\n }\n\n nums.push(0);\n\n for i in input.len()..target_length {\n\n let v = match positions.get(&nums[i]) {\n\n Some(idx) => i - idx,\n\n None => 0,\n\n };\n\n nums.push(v);\n\n positions.insert(nums[i], i);\n\n }\n\n nums[target_length - 1]\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "day15/src/main.rs", "rank": 41, "score": 157867.13639656277 }, { "content": "fn check_valid(s: &str) -> bool {\n\n let parts: Vec<_> = s.split(' ').collect();\n\n if parts.len() != 3 {\n\n return false;\n\n }\n\n if let Ok((lower, upper)) = find_limits(parts[0]) {\n\n if let Ok(ch) = find_letter(parts[1]) {\n\n let count = parts[2].matches(ch).count() as u8;\n\n return lower <= count && count <= upper;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 42, "score": 157531.8234292791 }, { "content": "fn p1(input: &str) -> i64 {\n\n input\n\n .lines()\n\n .map(parse_line)\n\n .filter_map(Result::ok)\n\n .map(|tokens| evaluate(&tokens, 0, true, false).0)\n\n .sum()\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 43, "score": 156857.20297279512 }, { "content": "fn p2(input: &str) -> i64 {\n\n input\n\n .lines()\n\n .map(parse_line)\n\n .filter_map(Result::ok)\n\n .map(|tokens| evaluate(&tokens, 0, false, false).0)\n\n .sum()\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 44, "score": 156857.20297279512 }, { "content": "fn play_rec(mut p1: VecDeque<usize>, mut p2: VecDeque<usize>) -> (u8, VecDeque<usize>) {\n\n let mut p1seen = HashSet::new();\n\n let mut p2seen = HashSet::new();\n\n\n\n while !p1.is_empty() && !p2.is_empty() {\n\n if p1seen.contains(&p1) || p2seen.contains(&p2) {\n\n return (1, p1);\n\n } else {\n\n p1seen.insert(p1.clone());\n\n p2seen.insert(p2.clone());\n\n }\n\n\n\n let card1 = p1.pop_front().unwrap();\n\n let card2 = p2.pop_front().unwrap();\n\n\n\n let w = if card1 <= p1.len() && card2 <= p2.len() {\n\n let new_p1 = p1.iter().take(card1).cloned().collect();\n\n let new_p2 = p2.iter().take(card2).cloned().collect();\n\n play_rec(new_p1, new_p2).0\n\n } else {\n", "file_path": "day22/src/main.rs", "rank": 45, "score": 156474.5622606495 }, { "content": "fn check_valid_position(s: &str) -> bool {\n\n let parts: Vec<_> = s.split(' ').collect();\n\n if parts.len() != 3 {\n\n return false;\n\n }\n\n if let Ok((lower, upper)) = find_limits(parts[0]) {\n\n if let Ok(ch) = find_letter(parts[1]) {\n\n let count = parts[2]\n\n .char_indices()\n\n .filter(|pair| {\n\n pair.1 == ch && (pair.0 as u8 == lower - 1 || pair.0 as u8 == upper - 1)\n\n })\n\n .count();\n\n return count == 1;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 46, "score": 154360.26924623482 }, { "content": "fn count_intersect(answers: &[&str]) -> usize {\n\n let full: BTreeSet<char> = ('a'..='z').into_iter().collect();\n\n answers\n\n .into_iter()\n\n .map(|answer| answer.chars().collect::<BTreeSet<_>>())\n\n .fold(full, |intersect, s| {\n\n intersect.intersection(&s).cloned().collect::<BTreeSet<_>>()\n\n })\n\n .len()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn sample() -> Vec<String> {\n\n \"abc\n\n\n\n a\n\n b\n", "file_path": "day06/src/main.rs", "rank": 47, "score": 153997.6454317846 }, { "content": "fn get_seat_map(input: &str) -> Result<HashMap<(u8, u8), Status>, Error> {\n\n let seats = input\n\n .split_ascii_whitespace()\n\n .enumerate()\n\n .map(|(row, line)| {\n\n line.trim()\n\n .char_indices()\n\n .map(move |(col, ch)| -> Result<((u8, u8), Status), Error> {\n\n Ok(((col as u8, row as u8), Status::new(ch)?))\n\n })\n\n })\n\n .flatten()\n\n .collect::<Result<HashMap<_, _>, Error>>()?;\n\n Ok(seats)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "day11/src/main.rs", "rank": 48, "score": 151928.69074368678 }, { "content": "fn count_answers_group(answers: &[&str]) -> usize {\n\n answers\n\n .into_iter()\n\n .map(|answer| answer.chars())\n\n .flatten()\n\n .collect::<BTreeSet<_>>()\n\n .len()\n\n}\n\n\n", "file_path": "day06/src/main.rs", "rank": 49, "score": 150130.56399214975 }, { "content": "fn flip(mut tiles: HashMap<(i32, i32, i32), bool>, times: u32) -> HashMap<(i32, i32, i32), bool> {\n\n for _ in 0..times {\n\n let expanded: Vec<_> = tiles.keys().map(get_neighbors).flatten().unique().collect();\n\n let new_tiles = expanded\n\n .into_iter()\n\n .map(|tile| {\n\n let count = get_neighbors(&tile)\n\n .into_iter()\n\n .filter(|t| match tiles.get(t) {\n\n Some(true) => true,\n\n _ => false,\n\n })\n\n .count();\n\n match tiles.get(&tile) {\n\n Some(true) => {\n\n if count == 0 || count > 2 {\n\n (tile, true)\n\n } else {\n\n (tile, false)\n\n }\n", "file_path": "day24/src/main.rs", "rank": 50, "score": 148492.71722729038 }, { "content": "fn count_trees(rows: &[String], dx: usize, dy: usize) -> usize {\n\n let mut x = 0;\n\n rows.iter()\n\n .step_by(dy)\n\n .filter(|row| {\n\n let idx = x;\n\n x = (x + dx) % row.len();\n\n row.chars().nth(idx) == Some('#')\n\n })\n\n .count()\n\n}\n\n\n", "file_path": "day03/src/main.rs", "rank": 51, "score": 148164.8261802942 }, { "content": "// https://www.redblobgames.com/grids/hexagons/#coordinates-cube\n\nfn parse_line(line: &[char]) -> Vec<(i32, i32, i32)> {\n\n if line.is_empty() {\n\n return vec![];\n\n }\n\n match line[0] {\n\n 'w' => {\n\n let mut rest = parse_line(&line[1..]);\n\n rest.insert(0, (-1, 1, 0));\n\n rest\n\n }\n\n 'e' => {\n\n let mut rest = parse_line(&line[1..]);\n\n rest.insert(0, (1, -1, 0));\n\n rest\n\n }\n\n 'n' => {\n\n let mut rest = parse_line(&line[2..]);\n\n match line[1] {\n\n 'w' => rest.insert(0, (0, 1, -1)),\n\n 'e' => rest.insert(0, (1, 0, -1)),\n", "file_path": "day24/src/main.rs", "rank": 52, "score": 145699.43341173517 }, { "content": "fn find_tile(line: &str) -> (i32, i32, i32) {\n\n let line: Vec<_> = line.trim().chars().collect();\n\n parse_line(&line)\n\n .into_iter()\n\n .fold((0, 0, 0), |acc, i| (acc.0 + i.0, acc.1 + i.1, acc.2 + i.2))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_find_tile() {\n\n const S1: &str = \"esew\";\n\n assert_eq!((0, -1, 1), find_tile(&S1));\n\n const S2: &str = \"nwwswee\";\n\n assert_eq!((0, 0, 0), find_tile(&S2));\n\n }\n\n\n\n const SAMPLE: &str = r#\"sesenwnenenewseeswwswswwnenewsewsw\n", "file_path": "day24/src/main.rs", "rank": 53, "score": 141239.82725145618 }, { "content": "fn try_find_window(nums: &[usize], num: usize) -> Option<&[usize]> {\n\n for start in 0..nums.len() - 1 {\n\n for end in start + 2..nums.len() {\n\n let window = &nums[start..end];\n\n let sum = window.into_iter().cloned().sum::<usize>();\n\n if sum == num {\n\n return Some(window);\n\n } else if sum > num {\n\n break;\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 54, "score": 135689.28138484506 }, { "content": "pub fn read_input<P: AsRef<Path>>(p: P) -> Result<Vec<String>, Error> {\n\n let file = File::open(p)?;\n\n let reader = BufReader::new(file);\n\n let lines = reader.lines().collect::<Result<Vec<_>, _>>()?;\n\n Ok(lines)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "tools/src/lib.rs", "rank": 55, "score": 135387.7901227149 }, { "content": "fn get_dimensions(input: &str) -> Result<(u8, u8), Error> {\n\n let row_count = input.split_ascii_whitespace().count();\n\n let col_count = input\n\n .split_ascii_whitespace()\n\n .next()\n\n .ok_or_else(|| -> Error { \"Empty input\".into() })?\n\n .len();\n\n Ok((col_count as u8, row_count as u8))\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 56, "score": 135224.8812075237 }, { "content": "fn reach_stable(mut seats: HashMap<(u8, u8), Status>) -> usize {\n\n let new_seats = get_changed_seats(&seats);\n\n if new_seats.is_empty() {\n\n seats\n\n .iter()\n\n .filter(|(_, status)| **status == Status::Occupied)\n\n .count()\n\n } else {\n\n seats.extend(new_seats.into_iter());\n\n reach_stable(seats)\n\n }\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 57, "score": 130705.76041492358 }, { "content": "fn count_arrangements(nums: &[u32]) -> usize {\n\n fn inner(nums: &[u32], result: &mut HashMap<usize, usize>, idx: usize) -> usize {\n\n if idx >= nums.len() - 1 {\n\n 1\n\n } else if let Some(&r) = result.get(&idx) {\n\n r\n\n } else {\n\n let count = nums\n\n .iter()\n\n .skip(idx + 1)\n\n .enumerate()\n\n .take(3)\n\n .filter_map(|(i, v)| {\n\n if v - nums[idx] <= 3 {\n\n Some(inner(nums, result, idx + i + 1))\n\n } else {\n\n None\n\n }\n\n })\n\n .sum();\n\n result.insert(idx, count);\n\n count\n\n }\n\n }\n\n\n\n let mut result = HashMap::new();\n\n inner(nums, &mut result, 0)\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 58, "score": 129486.61234170286 }, { "content": "fn read_window(nums: &[usize], start_idx: usize, length: usize) -> &[usize] {\n\n &nums[start_idx..start_idx + length]\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 59, "score": 126717.03254464826 }, { "content": "fn pick_up(nums: Vec<u32>, current: u32) -> Result<(Vec<u32>, Vec<u32>), Error> {\n\n let mut picked = vec![];\n\n let current_idx = index_of(&nums, current)?;\n\n for i in 0..=2 {\n\n let idx = (current_idx + i + 1) % nums.len();\n\n picked.push(nums[idx]);\n\n }\n\n let left: Vec<_> = nums\n\n .into_iter()\n\n .filter(|num| !picked.contains(num))\n\n .collect();\n\n\n\n Ok((left, picked))\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 60, "score": 124458.92902397343 }, { "content": "fn index_of(nums: &[u32], num: u32) -> Result<usize, Error> {\n\n Ok(nums\n\n .iter()\n\n .position(|x| *x == num)\n\n .ok_or_else(|| format!(\"Cannot find {} in {:?}\", num, nums))?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const SAMPLE: &str = \"389125467\";\n\n\n\n #[test]\n\n fn test_p1() {\n\n let nums = read(SAMPLE).unwrap();\n\n let move_10 = play(nums.clone(), 10).unwrap();\n\n assert_eq!(\"92658374\", collect_label(move_10).unwrap());\n\n let move_100 = play(nums, 100).unwrap();\n\n assert_eq!(\"67384529\", collect_label(move_100).unwrap());\n\n }\n\n\n\n #[test]\n\n fn test_p2() {\n\n assert_eq!(149245887792, p2(SAMPLE).unwrap())\n\n }\n\n}\n", "file_path": "day23/src/main.rs", "rank": 61, "score": 122150.23757179637 }, { "content": "fn find_loop_size(sub_num: u64, target: u64) -> u64 {\n\n let mut value = 1;\n\n let mut count = 0;\n\n while value != target {\n\n value *= sub_num;\n\n value %= DENOM;\n\n count += 1\n\n }\n\n count\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 62, "score": 114743.30245018125 }, { "content": "fn find_num(s: &str, lower: u32, upper: u32) -> u32 {\n\n match s {\n\n \"F\" | \"L\" => lower,\n\n \"B\" | \"R\" => upper,\n\n\n\n _ => match s.chars().next() {\n\n Some('F') | Some('L') => find_num(&s[1..], lower, (upper + lower) / 2),\n\n Some('B') | Some('R') => find_num(&s[1..], (upper + lower) / 2 + 1, upper),\n\n _ => panic!(\"Invalid input\"),\n\n },\n\n }\n\n}\n\n\n", "file_path": "day05/src/main.rs", "rank": 63, "score": 111075.6726883807 }, { "content": "fn reach_stable_visible(mut seats: HashMap<(u8, u8), Status>, max_col: u8, max_row: u8) -> usize {\n\n let new_seats = get_changed_seats_visible(&seats, max_col, max_row);\n\n if new_seats.is_empty() {\n\n seats\n\n .iter()\n\n .filter(|(_, status)| **status == Status::Occupied)\n\n .count()\n\n } else {\n\n seats.extend(new_seats.into_iter());\n\n reach_stable_visible(seats, max_col, max_row)\n\n }\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 64, "score": 108361.79919516134 }, { "content": "fn find_sum(nums: &[usize]) -> Option<usize> {\n\n if let Some(s) = nums.iter().min() {\n\n if let Some(l) = nums.iter().max() {\n\n return Some(s + l);\n\n }\n\n }\n\n None\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n const sample: &str = r#\"35\n\n 20\n\n 15\n\n 25\n\n 47\n\n 40\n\n 62\n\n 55\n", "file_path": "day09/src/main.rs", "rank": 65, "score": 107638.20806197691 }, { "content": "fn calc_score(p: &VecDeque<usize>) -> usize {\n\n p.into_iter()\n\n .rev()\n\n .enumerate()\n\n .map(|(idx, num)| (idx + 1) * num)\n\n .sum()\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 66, "score": 107275.29085425512 }, { "content": "fn find_invalid(nums: &[usize], length: usize) -> Option<(usize, usize)> {\n\n for idx in length..nums.len() {\n\n let window = read_window(nums, idx - length, length);\n\n let sums: Vec<_> = window\n\n .into_iter()\n\n .combinations(2)\n\n .map(|com| com.iter().cloned().sum::<usize>())\n\n .collect();\n\n let num = nums[idx];\n\n if sums.contains(&num) {\n\n continue;\n\n } else {\n\n return Some((idx, num));\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 67, "score": 105756.97155446289 }, { "content": "fn read_input<P: AsRef<Path>>(p: P) -> Result<Vec<u32>, Error> {\n\n let lines = tools::read_input(p)?;\n\n let mut numbers = lines\n\n .iter()\n\n .map(|s| u32::from_str(s))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n numbers.sort();\n\n Ok(numbers)\n\n}\n\n\n", "file_path": "day01/src/main.rs", "rank": 68, "score": 104104.13310465266 }, { "content": "fn find_lowest(threshold: usize, nums: &[usize]) -> Result<(usize, usize), Error> {\n\n nums.into_iter()\n\n .map(|&num| (num, (threshold / num + 1) * num))\n\n .min_by_key(|(_num, higher_num)| *higher_num)\n\n .ok_or_else(|| \"Cannot find lowest number\".into())\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 69, "score": 103023.3982867647 }, { "content": "fn get_seat_id(s: &str) -> u32 {\n\n let row = find_num(&s[..7], 0, 127);\n\n let col = find_num(&s[7..], 0, 7);\n\n row * 8 + col\n\n}\n\n\n", "file_path": "day05/src/main.rs", "rank": 70, "score": 100426.4140686228 }, { "content": "fn find_diff(nums: &[u32], diff: u32) -> usize {\n\n nums[0..nums.len() - 1]\n\n .into_iter()\n\n .zip(nums[1..].into_iter())\n\n .filter_map(\n\n |(left, right)| {\n\n if right - left == diff {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n },\n\n )\n\n .count()\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 71, "score": 94959.0860179265 }, { "content": "fn find_letter(s: &str) -> Result<char, Error> {\n\n if let Some(res) = s.strip_suffix(':') {\n\n if res.len() == 1 {\n\n if let Some(ch) = res.chars().nth(0) {\n\n return Ok(ch);\n\n }\n\n }\n\n }\n\n Err(\"Invalid policy letter\".into())\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 72, "score": 94006.53757685976 }, { "content": "fn main() {\n\n let loop1 = find_loop_size(7, PUB_KEY1);\n\n let loop2 = find_loop_size(7, PUB_KEY2);\n\n let ek1 = find_encryption_key(PUB_KEY2, loop1);\n\n let ek2 = find_encryption_key(PUB_KEY1, loop2);\n\n assert_eq!(ek1, ek2);\n\n assert_eq!(15467093, ek2);\n\n\n\n println!(\"ALL DONE!\");\n\n}\n\n\n", "file_path": "day25/src/main.rs", "rank": 73, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let (timestamp, ids) = read(input).unwrap();\n\n let (id, time) = find_lowest(timestamp, &ids).unwrap();\n\n debug_assert_eq!(3035, id * (time - timestamp));\n\n let stamps = read_stamps(input).unwrap();\n\n debug_assert_eq!(725169163285238, find_repeat(&stamps).unwrap());\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day13/src/main.rs", "rank": 75, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let tiles = read(&input, \"\\r\\n\\r\\n\").unwrap();\n\n let (corners, _tiles) = find_corners(tiles);\n\n assert_eq!(28057939502729, corners.into_iter().product::<usize>());\n\n}\n\n\n", "file_path": "day20/src/main.rs", "rank": 76, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let nums: Vec<_> = include_str!(r#\"..\\input.txt\"#)\n\n .lines()\n\n .map(|s| usize::from_str(s.trim()).unwrap())\n\n .collect();\n\n let (idx, num) = find_invalid(&nums, 25).unwrap();\n\n debug_assert_eq!(num, 1038347917);\n\n let window = try_find_window(&nums[0..idx], num).unwrap();\n\n debug_assert_eq!(137394018, find_sum(window).unwrap());\n\n println!(\"All done!\")\n\n}\n\n\n", "file_path": "day09/src/main.rs", "rank": 77, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(r#\"..\\input.txt\"#);\n\n let nums = get_nums(&input);\n\n let ones = find_diff(&nums, 1);\n\n let threes = find_diff(&nums, 3);\n\n debug_assert_eq!(2376, ones * threes);\n\n debug_assert_eq!(129586085429248, count_arrangements(&nums));\n\n println!(\"All done.\");\n\n}\n\n\n", "file_path": "day10/src/main.rs", "rank": 78, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = \"389547612\";\n\n let nums = read(input).unwrap();\n\n assert_eq!(\"45286397\", collect_label(play(nums, 100).unwrap()).unwrap());\n\n assert_eq!(836763710, p2(input).unwrap());\n\n println!(\"All done\");\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 79, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n assert_eq!(24650385570008, p1(input));\n\n assert_eq!(158183007916215, p2(input));\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day18/src/main.rs", "rank": 80, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n debug_assert_eq!(5875750429995, p1(input).unwrap());\n\n debug_assert_eq!(5272149590143, p2(input).unwrap());\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 81, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let (p1, p2) = read(input).unwrap();\n\n {\n\n let w = play(p1.clone(), p2.clone());\n\n assert_eq!(31455, calc_score(&w));\n\n }\n\n {\n\n let (_, w) = play_rec(p1, p2);\n\n assert_eq!(32528, calc_score(&w));\n\n }\n\n}\n\n\n", "file_path": "day22/src/main.rs", "rank": 82, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = tools::read_input(\"input.txt\").unwrap();\n\n println!(\n\n \"No. of yes: {}\",\n\n group_answers(&input)\n\n .iter()\n\n .map(|g| count_answers_group(g))\n\n .sum::<usize>()\n\n );\n\n println!(\n\n \"No. of intersect: {}\",\n\n group_answers(&input)\n\n .iter()\n\n .map(|g| count_intersect(g))\n\n .sum::<usize>()\n\n );\n\n}\n\n\n", "file_path": "day06/src/main.rs", "rank": 83, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let (r, messages) = read(input);\n\n let rule_map = parse_rules(&r).unwrap();\n\n let rules = build_rule(&rule_map, 0);\n\n assert_eq!(222, count_valid(&rules, &messages));\n\n assert_eq!(339, count_valid_loop(&rule_map, &messages));\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day19/src/main.rs", "rank": 84, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let (rules, ticket, nearby) = parse(input).unwrap();\n\n assert_eq!(23044, scan_error_rate(&rules, &nearby));\n\n assert_eq!(3765150732757, p2(&rules, &ticket, &nearby));\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 85, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(r#\"..\\input.txt\"#);\n\n let seats = get_seat_map(input).unwrap();\n\n debug_assert_eq!(2406, reach_stable(seats.clone()));\n\n let (col_count, row_count) = get_dimensions(input).unwrap();\n\n debug_assert_eq!(\n\n 2149,\n\n reach_stable_visible(seats, col_count - 1, row_count - 1)\n\n );\n\n\n\n println!(\"All done\");\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 86, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = tools::read_input(\"input.txt\").unwrap();\n\n let ids: BTreeSet<_> = input.into_iter().map(|s| get_seat_id(&s)).collect();\n\n\n\n println!(\"Highest seat id: {}\", ids.iter().max().unwrap());\n\n println!(\"Seat id: {}\", find_seat_id(&ids));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_find_num() {\n\n assert_eq!(find_num(\"FBFBBFF\", 0, 127), 44);\n\n assert_eq!(find_num(\"RLR\", 0, 7), 5)\n\n }\n\n\n\n #[test]\n\n fn test_find_seat_id() {\n\n assert_eq!(get_seat_id(\"FBFBBFFRLR\"), 357);\n\n assert_eq!(get_seat_id(\"BFFFBBFRRR\"), 567);\n\n assert_eq!(get_seat_id(\"FFFBBBFRRR\"), 119);\n\n assert_eq!(get_seat_id(\"BBFFBBFRLL\"), 820);\n\n }\n\n}\n", "file_path": "day05/src/main.rs", "rank": 87, "score": 92053.74991164418 }, { "content": "fn main() {\n\n assert_eq!(1618, solve(&[0, 13, 1, 8, 6, 15], 2020));\n\n assert_eq!(548531, solve(&[0, 13, 1, 8, 6, 15], 30000000));\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 88, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let tiles = get_tiles(input);\n\n assert_eq!(326, count_black(&tiles));\n\n let tiles = flip(tiles, 100);\n\n assert_eq!(3979, count_black(&tiles));\n\n}\n\n\n", "file_path": "day24/src/main.rs", "rank": 89, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n let lines = read(input).unwrap();\n\n let mut ai_map = find_ingredients(&lines);\n\n assert_eq!(1945, count_non_allergy(&ai_map, &lines));\n\n match_ingredients(&mut ai_map);\n\n assert_eq!(\n\n \"pgnpx,srmsh,ksdgk,dskjpq,nvbrx,khqsk,zbkbgp,xzb\",\n\n order_ingredients(&ai_map)\n\n );\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 90, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(r#\"..\\input.txt\"#);\n\n let insts = Instruction::read(input).unwrap();\n\n {\n\n let mut ship = Ship::new();\n\n ship.apply_insts(&insts, Ship::apply_inst).unwrap();\n\n debug_assert_eq!(2879, ship.manhattan_dist())\n\n }\n\n {\n\n let mut ship = Ship::new();\n\n ship.apply_insts(&insts, Ship::apply_inst_wp).unwrap();\n\n debug_assert_eq!(178986, ship.manhattan_dist())\n\n }\n\n println!(\"All done\")\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 91, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = tools::read_input(\"input.txt\").unwrap();\n\n let lines: Vec<&str> = input.iter().map(|s| s.as_str()).collect();\n\n let mut console = Console::from_strings(&lines).unwrap();\n\n println!(\"Acc before loop: {}\", console.run().unwrap());\n\n println!(\"Acc after fix: {}\", console.fix_instruction().unwrap());\n\n}\n\n\n", "file_path": "day08/src/main.rs", "rank": 92, "score": 92053.74991164418 }, { "content": "fn main() {\n\n let input = include_str!(\"..\\\\input.txt\");\n\n assert_eq!(237, p1::run(input));\n\n assert_eq!(2448, p2::run(input));\n\n println!(\"All done\")\n\n}\n\n\n\nmod p1 {\n\n use super::*;\n\n\n\n #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\n struct Coord {\n\n x: i32,\n\n y: i32,\n\n z: i32,\n\n }\n\n\n\n #[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n enum State {\n\n Active,\n", "file_path": "day17/src/main.rs", "rank": 93, "score": 92053.74991164418 }, { "content": "fn find_limits(s: &str) -> Result<(u8, u8), Error> {\n\n let numbers = s\n\n .split('-')\n\n .map(u8::from_str)\n\n .collect::<Result<Vec<_>, _>>()?;\n\n if numbers.len() != 2 {\n\n Err(\"Invalid policy limits\".into())\n\n } else {\n\n Ok((numbers[0], numbers[1]))\n\n }\n\n}\n\n\n", "file_path": "day02/src/main.rs", "rank": 94, "score": 90252.07539811985 }, { "content": "fn find_encryption_key(sub_num: u64, loop_size: u64) -> u64 {\n\n let mut value = 1;\n\n for _ in 0..loop_size {\n\n value *= sub_num;\n\n value %= DENOM;\n\n }\n\n value\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_find_loop_size() {\n\n assert_eq!(8, find_loop_size(7, 5764801));\n\n assert_eq!(11, find_loop_size(7, 17807724));\n\n }\n\n\n\n #[test]\n\n fn test_find_encryption_key() {\n\n assert_eq!(14897079, find_encryption_key(17807724, 8));\n\n assert_eq!(14897079, find_encryption_key(5764801, 11));\n\n }\n\n}\n", "file_path": "day25/src/main.rs", "rank": 95, "score": 88299.24850623427 }, { "content": "fn get_surrouding(col: u8, row: u8) -> Vec<(u8, u8)> {\n\n (col.saturating_sub(1)..=col.saturating_add(1))\n\n .into_iter()\n\n .cartesian_product((row.saturating_sub(1)..=row.saturating_add(1)).into_iter())\n\n .filter(|(x, y)| *x != col || *y != row)\n\n .collect()\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 96, "score": 85873.59422050524 }, { "content": "fn match_fields<'a>(\n\n rules: &'a HashMap<&'a str, (u32, u32, u32, u32)>,\n\n valid_tickets: &'a Vec<Vec<u32>>,\n\n) -> HashMap<&'a str, usize> {\n\n let length = valid_tickets[0].len();\n\n let mut tmp: HashMap<&str, Vec<_>> = HashMap::new();\n\n\n\n for (field, limit) in rules {\n\n for idx in 0..length {\n\n if valid_tickets.iter().all(|ticket| {\n\n let value = ticket[idx];\n\n (limit.0 <= value && value <= limit.1) || (limit.2 <= value && value <= limit.3)\n\n }) {\n\n match tmp.get_mut(field) {\n\n Some(v) => v.push(idx),\n\n None => {\n\n tmp.insert(*field, vec![idx]);\n\n }\n\n }\n\n }\n", "file_path": "day16/src/main.rs", "rank": 97, "score": 85627.50596176874 }, { "content": "fn find_dest(nums: &[u32], current: u32) -> Result<(usize, u32), Error> {\n\n let min = nums\n\n .iter()\n\n .cloned()\n\n .min()\n\n .ok_or_else(|| format!(\"Duplicate values in {:?}\", nums))?;\n\n let max = nums\n\n .iter()\n\n .cloned()\n\n .max()\n\n .ok_or_else(|| format!(\"Duplicate values in {:?}\", nums))?;\n\n\n\n let mut res = current - 1;\n\n while !nums.contains(&res) {\n\n if res < min {\n\n res = max\n\n } else {\n\n res -= 1\n\n }\n\n }\n\n let idx = nums\n\n .into_iter()\n\n .position(|x| *x == res)\n\n .ok_or_else(|| format!(\"Cannot find {} in {:?}\", res, nums))?;\n\n Ok((idx, res))\n\n}\n\n\n", "file_path": "day23/src/main.rs", "rank": 98, "score": 85626.01840651735 }, { "content": "fn count_non_allergy(\n\n ai_map: &HashMap<&str, HashSet<&str>>,\n\n lines: &[(Vec<&str>, Vec<&str>)],\n\n) -> usize {\n\n let allergic: HashSet<_> = ai_map.values().flatten().cloned().collect();\n\n lines\n\n .into_iter()\n\n .map(|(ingredients, _)| {\n\n ingredients\n\n .into_iter()\n\n .filter(|item| !allergic.contains(**item))\n\n .count()\n\n })\n\n .sum()\n\n}\n\n\n", "file_path": "day21/src/main.rs", "rank": 99, "score": 84236.73688331769 } ]
Rust
azure_sdk_cosmos/src/resource_quota.rs
guywaldman/azure-sdk-for-rust
638d0322d1f397be253b609963cc1961324bdf04
use crate::errors::TokenParsingError; #[derive(Debug, Clone, PartialEq, PartialOrd)] pub enum ResourceQuota { Databases(u64), StoredProcedures(u64), Collections(u64), DocumentSize(u64), DocumentsSize(u64), DocumentsCount(i64), CollectionSize(u64), Users(u64), Permissions(u64), Triggers(u64), Functions(u64), ClientEncryptionKeys(u64), } const DATABASES: &str = "databases="; const STORED_PROCEDURES: &str = "storedProcedures="; const COLLECTIONS: &str = "collections="; const DOCUMENT_SIZE: &str = "documentSize="; const DOCUMENTS_SIZE: &str = "documentsSize="; const DOCUMENTS_COUNT: &str = "documentsCount="; const COLLECTION_SIZE: &str = "collectionSize="; const USERS: &str = "users="; const PERMISSIONS: &str = "permissions="; const TRIGGERS: &str = "triggers="; const FUNCTIONS: &str = "functions="; const CLIENT_ENCRYPTION_KEYS: &str = "clientEncryptionKeys="; pub(crate) fn resource_quotas_from_str(s: &str) -> Result<Vec<ResourceQuota>, failure::Error> { debug!("resource_quotas_from_str(\"{}\") called", s); let tokens: Vec<&str> = s.split(';').collect(); let mut v = Vec::with_capacity(tokens.len()); for token in tokens.into_iter().filter(|token| !token.is_empty()) { debug!("processing token == {}", token); if token.starts_with(DATABASES) { v.push(ResourceQuota::Databases(str::parse( &token[DATABASES.len()..], )?)); } else if token.starts_with(STORED_PROCEDURES) { v.push(ResourceQuota::StoredProcedures(str::parse( &token[STORED_PROCEDURES.len()..], )?)); } else if token.starts_with(COLLECTIONS) { v.push(ResourceQuota::Collections(str::parse( &token[COLLECTIONS.len()..], )?)); } else if token.starts_with(DOCUMENT_SIZE) { v.push(ResourceQuota::DocumentSize(str::parse( &token[DOCUMENT_SIZE.len()..], )?)); } else if token.starts_with(DOCUMENTS_SIZE) { v.push(ResourceQuota::DocumentsSize(str::parse( &token[DOCUMENTS_SIZE.len()..], )?)); } else if token.starts_with(DOCUMENTS_COUNT) { v.push(ResourceQuota::DocumentsCount(str::parse( &token[DOCUMENTS_COUNT.len()..], )?)); } else if token.starts_with(COLLECTION_SIZE) { v.push(ResourceQuota::CollectionSize(str::parse( &token[COLLECTION_SIZE.len()..], )?)); } else if token.starts_with(USERS) { v.push(ResourceQuota::Users(str::parse(&token[USERS.len()..])?)); } else if token.starts_with(PERMISSIONS) { v.push(ResourceQuota::Permissions(str::parse( &token[PERMISSIONS.len()..], )?)); } else if token.starts_with(TRIGGERS) { v.push(ResourceQuota::Triggers(str::parse( &token[TRIGGERS.len()..], )?)); } else if token.starts_with(FUNCTIONS) { v.push(ResourceQuota::Functions(str::parse( &token[FUNCTIONS.len()..], )?)); } else if token.starts_with(CLIENT_ENCRYPTION_KEYS) { v.push(ResourceQuota::ClientEncryptionKeys(str::parse( &token[CLIENT_ENCRYPTION_KEYS.len()..], )?)); } else { return Err(TokenParsingError::UnsupportedToken { token: token.to_string(), s: s.to_owned(), } .into()); } debug!("v == {:#?}", v); } Ok(v) } #[cfg(test)] mod tests { use super::*; #[test] fn parse_resource_quota() { let resource_quota = resource_quotas_from_str("storedProcedures=25;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::StoredProcedures(25)]); let resource_quota = resource_quotas_from_str( "databases=100;collections=5000;users=500000;permissions=2000000;clientEncryptionKeys=13;", ) .unwrap(); assert_eq!( resource_quota, vec![ ResourceQuota::Databases(100), ResourceQuota::Collections(5000), ResourceQuota::Users(500000), ResourceQuota::Permissions(2000000), ResourceQuota::ClientEncryptionKeys(13) ] ); let resource_quota = resource_quotas_from_str("collections=27;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Collections(27)]); let resource_quota = resource_quotas_from_str("documentSize=0;documentsSize=2;collectionSize=3;").unwrap(); assert_eq!( resource_quota, vec![ ResourceQuota::DocumentSize(0), ResourceQuota::DocumentsSize(2), ResourceQuota::CollectionSize(3) ] ); let resource_quota = resource_quotas_from_str("users=500000;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Users(500000)]); let resource_quota = resource_quotas_from_str("permissions=2000000;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Permissions(2000000)]); let resource_quota = resource_quotas_from_str("triggers=25;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Triggers(25)]); let resource_quota = resource_quotas_from_str("functions=26;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Functions(26)]); let resource_quota = resource_quotas_from_str("clientEncryptionKeys=13;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::ClientEncryptionKeys(13)]); } }
use crate::errors::TokenParsingError; #[derive(Debug, Clone, PartialEq, PartialOrd)] pub enum ResourceQuota { Databases(u64), StoredProcedures(u64), Collections(u64), DocumentSize(u64), DocumentsSize(u64), DocumentsCount(i64), CollectionSize(u64), Users(u64), Permissions(u64), Triggers(u64), Functions(u64), ClientEncryptionKeys(u64), } const DATABASES: &str = "databases="; const STORED_PROCEDURES: &str = "storedProcedures="; const COLLECTIONS: &str = "collections="; const DOCUMENT_SIZE: &str = "documentSize="; const DOCUMENTS_SIZE: &str = "documentsSize="; const DOCUMENTS_COUNT: &str = "documentsCount="; const COLLECTION_SIZE: &str = "collectionSize="; const USERS: &str = "users="; const PERMISSIONS: &str = "permissions="; const TRIGGERS: &str = "triggers="; const FUNCTIONS: &str = "functions="; const CLIENT_ENCRYPTION_KEYS: &str = "clientEncryptionKeys="; pub(crate) fn resource_quotas_from_str(s: &str) -> Result<Vec<ResourceQuota>, failure::Error> { debug!("resource_quotas_from_str(\"{}\") called", s); let tokens: Vec<&str> = s.split(';').collect(); let mut v = Vec::with_capacity(tokens.len()); for token in tokens.into_iter().filter(|token| !token.is_empty()) { debug!("processing token == {}", token); if token.starts_with(DATABASES) { v.push(ResourceQuota::Databases(str::parse( &token[DATABASES.len()..], )?)); } else if token.starts_with(STORED_PROCEDURES) { v.push(ResourceQuota::StoredProcedures(str::parse( &token[STORED_PROCEDURES.len()..], )?)); } else if token.starts_with(COLLECTIONS) { v.push(ResourceQuota::Collections(str::parse( &token[COLLECTIONS.len()..], )?)); } else if token.starts_with(DOCUMENT_SIZE) { v.push(ResourceQuota::DocumentSize(str::parse( &token[DOCUMENT_SIZE.len()..], )?)); } else if token.starts_with(DOCUMENTS_SIZE) { v.push(ResourceQuota::DocumentsSize(str::parse( &token[DOCUMENTS_SIZE.len()..], )?)); } else if token.starts_with(DOCUMENTS_COUNT) { v.push(ResourceQuota::DocumentsCount(str::parse( &token[DOCUMENTS_COUNT.len()..], )?)); } else if token.starts_with(COLLECTION_SIZE) { v.push(ResourceQuota::CollectionSize(str::parse( &token[COLLECTION_SIZE.len()..], )?)); } else if token.starts_with(USERS) { v.push(ResourceQuota::Users(str::parse(&token[USERS.len()..])?)); } else if token.starts_with(PERMISSIONS) { v.push(ResourceQuota::Permissions(str::parse( &token[PERMISSIONS.len()..], )?)); } else if token.starts_with(TRIGGERS) { v.push(ResourceQuota::Triggers(str::parse( &token[TRIGGERS.len()..], )?)); } else if token.starts_with(FUNCTIONS) { v.push(ResourceQuota::Functions(str::parse( &token[FUNCTIONS.len()..], )?)); } else if token.starts_with(CLIENT_ENCRYPTION_KEYS) { v.push(ResourceQuota::ClientEncryptionKeys(str::parse( &token[CLIENT_ENCRYPTION_KEYS.len()..], )?)); } else { return Err(TokenParsingError::UnsupportedToken { token: token.to_string(), s: s.to_owned(), } .into()); } debug!("v == {:#?}", v); } Ok(v) } #[cfg(test)] mod tests { use super::*; #[test]
}
fn parse_resource_quota() { let resource_quota = resource_quotas_from_str("storedProcedures=25;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::StoredProcedures(25)]); let resource_quota = resource_quotas_from_str( "databases=100;collections=5000;users=500000;permissions=2000000;clientEncryptionKeys=13;", ) .unwrap(); assert_eq!( resource_quota, vec![ ResourceQuota::Databases(100), ResourceQuota::Collections(5000), ResourceQuota::Users(500000), ResourceQuota::Permissions(2000000), ResourceQuota::ClientEncryptionKeys(13) ] ); let resource_quota = resource_quotas_from_str("collections=27;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Collections(27)]); let resource_quota = resource_quotas_from_str("documentSize=0;documentsSize=2;collectionSize=3;").unwrap(); assert_eq!( resource_quota, vec![ ResourceQuota::DocumentSize(0), ResourceQuota::DocumentsSize(2), ResourceQuota::CollectionSize(3) ] ); let resource_quota = resource_quotas_from_str("users=500000;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Users(500000)]); let resource_quota = resource_quotas_from_str("permissions=2000000;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Permissions(2000000)]); let resource_quota = resource_quotas_from_str("triggers=25;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Triggers(25)]); let resource_quota = resource_quotas_from_str("functions=26;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::Functions(26)]); let resource_quota = resource_quotas_from_str("clientEncryptionKeys=13;").unwrap(); assert_eq!(resource_quota, vec![ResourceQuota::ClientEncryptionKeys(13)]); }
function_block-full_function
[ { "content": "pub fn with_azure_sas(account: &str, sas_token: &str) -> KeyClient {\n\n let client = hyper::Client::builder().build(HttpsConnector::new());\n\n let params = get_sas_token_parms(sas_token);\n\n\n\n KeyClient::new(\n\n account.to_owned(),\n\n String::new(),\n\n Some(params),\n\n client,\n\n format!(\"https://{}.blob.core.windows.net\", account),\n\n format!(\"https://{}.table.core.windows.net\", account),\n\n format!(\"https://{}.queue.core.windows.net\", account),\n\n )\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/client.rs", "rank": 0, "score": 261055.4275256693 }, { "content": " function updateMetadataCallback(err, documents, responseOptions) {\n\n if (err) throw new Error(\\\"Error\\\" + err.message);\n\n if (documents.length != 1) throw 'Unable to find metadata document';\n\n var metadataDocument = documents[0];\n\n\n\n // update metadata\n\n metadataDocument.createdDocuments += 1; metadataDocument.createdNames += \\\" \\\" + createdDocument.id;\n\n var accept = collection.replaceDocument(metadataDocument._self,\n\n metadataDocument,\n\n function(err, docReplaced) {\n\n if (err) throw\\ \"Unable to update metadata, abort\\\";\n\n });\n\n if (!accept) throw\\ \"Unable to update metadata, abort\\\";\n\n return;\n\n }\n\n}\"#;\n\n\n\n#[tokio::test]\n\nasync fn trigger() -> Result<(), Box<dyn Error>> {\n\n const DATABASE_NAME: &str = \"test-cosmos-db-trigger\";\n", "file_path": "azure_sdk_cosmos/tests/trigger.rs", "rank": 1, "score": 243601.30982793157 }, { "content": "pub trait UserDefinedFunctionClient<C, D, COLL>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn user_defined_function_name(&self) -> &str;\n\n\n\n fn create_user_defined_function(\n\n &self,\n\n ) -> requests::CreateOrReplaceUserDefinedFunctionBuilder<'_, '_, C, D, COLL, No>;\n\n fn replace_user_defined_function(\n\n &self,\n\n ) -> requests::CreateOrReplaceUserDefinedFunctionBuilder<'_, '_, C, D, COLL, No>;\n\n fn delete_user_defined_function(\n\n &self,\n\n ) -> requests::DeleteUserDefinedFunctionBuilder<'_, '_, C, D, COLL>;\n\n\n\n fn prepare_request(&self, method: hyper::Method) -> http::request::Builder {\n\n self.cosmos_client().prepare_request(\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 2, "score": 223399.09648543177 }, { "content": "pub fn new(account: &str, key: &str) -> KeyClient {\n\n with_access_key(account, key)\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/client.rs", "rank": 3, "score": 222177.11750624285 }, { "content": "pub fn with_access_key(account: &str, key: &str) -> KeyClient {\n\n let client = hyper::Client::builder().build(HttpsConnector::new());\n\n\n\n KeyClient::new(\n\n account.to_owned(),\n\n key.to_owned(),\n\n None,\n\n client,\n\n format!(\"https://{}.blob.core.windows.net\", account),\n\n format!(\"https://{}.table.core.windows.net\", account),\n\n format!(\"https://{}.queue.core.windows.net\", account),\n\n )\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/client.rs", "rank": 4, "score": 220070.16164392472 }, { "content": "pub trait HasUserDefinedFunctionClient<C, D, COLL, UDF>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n UDF: UserDefinedFunctionClient<C, D, COLL>,\n\n{\n\n fn user_defined_function_client(&self) -> &UDF;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 5, "score": 219334.7070287167 }, { "content": "pub trait HasTriggerClient<C, D, COLL, TRIGGER>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n TRIGGER: TriggerClient<C, D, COLL>,\n\n{\n\n fn trigger_client(&self) -> &TRIGGER;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 6, "score": 217827.93899287126 }, { "content": "#[inline]\n\npub fn get_json_mime_fullmetadata() -> &'static str {\n\n \"application/json; odata=fullmetadata\"\n\n}\n\n\n\nmod test {\n\n use super::*;\n\n\n\n struct MockClientEndpoint {\n\n account: String,\n\n key: String,\n\n }\n\n\n\n impl ClientEndpoint for MockClientEndpoint {\n\n fn account(&self) -> &str {\n\n &self.account\n\n }\n\n fn key(&self) -> &str {\n\n &self.key\n\n }\n\n }\n", "file_path": "azure_sdk_storage_core/src/rest_client.rs", "rank": 7, "score": 213345.73270734376 }, { "content": "#[inline]\n\npub fn get_default_json_mime() -> &'static str {\n\n \"application/json; charset=utf-8\"\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/rest_client.rs", "rank": 8, "score": 213345.73270734376 }, { "content": "#[inline]\n\npub fn get_json_mime_nometadata() -> &'static str {\n\n \"application/json; odata=nometadata\"\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/rest_client.rs", "rank": 9, "score": 213345.73270734373 }, { "content": "pub trait HasPermissionClient<C, D, USER, PERMISSION>: HasUserClient<C, D, USER>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n PERMISSION: PermissionClient<C, D, USER>,\n\n{\n\n fn permission_client(&self) -> &PERMISSION;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 10, "score": 204673.9386104652 }, { "content": "pub fn continuation_token_from_headers_optional(\n\n headers: &HeaderMap,\n\n) -> Result<Option<String>, AzureError> {\n\n if let Some(hc) = headers.get(HEADER_CONTINUATION) {\n\n Ok(Some(hc.to_str()?.to_owned()))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 11, "score": 201612.56267157826 }, { "content": "#[inline]\n\npub fn inner_text(node: &Element) -> Result<&str, TraversingError> {\n\n for child in &node.children {\n\n match *child {\n\n CharacterNode(ref txt) => return Ok(txt),\n\n _ => continue,\n\n };\n\n }\n\n\n\n Ok(\"\")\n\n\n\n //debug!(\"\\n!!! node == {}\", node);\n\n //Err(TraversingError::TextNotFound)\n\n}\n\n\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 12, "score": 199326.9498764153 }, { "content": " function updateMetadataCallback(err, documents, responseOptions) {\n\n if (err) throw new Error(\\\"Error\\\" + err.message);\n\n if (documents.length != 1) throw 'Unable to find metadata document';\n\n var metadataDocument = documents[0];\n\n\n\n // update metadata\n\n metadataDocument.createdDocuments += 1; metadataDocument.createdNames += \\\" \\\" + createdDocument.id;\n\n var accept = collection.replaceDocument(metadataDocument._self,\n\n metadataDocument,\n\n function(err, docReplaced) {\n\n if (err) throw\\ \"Unable to update metadata, abort\\\";\n\n });\n\n if (!accept) throw\\ \"Unable to update metadata, abort\\\";\n\n return;\n\n }\n\n}\"#;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n let database = std::env::args()\n", "file_path": "azure_sdk_cosmos/examples/trigger_00.rs", "rank": 13, "score": 197619.4587006807 }, { "content": "pub fn server_from_headers(headers: &HeaderMap) -> Result<&str, AzureError> {\n\n Ok(headers\n\n .get(SERVER)\n\n .ok_or_else(|| AzureError::HeaderNotFound(SERVER.to_owned()))?\n\n .to_str()?)\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 14, "score": 197091.17129799642 }, { "content": "pub fn version_from_headers(headers: &HeaderMap) -> Result<&str, AzureError> {\n\n Ok(headers\n\n .get(VERSION)\n\n .ok_or_else(|| AzureError::HeaderNotFound(VERSION.to_owned()))?\n\n .to_str()?)\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 15, "score": 197091.17129799642 }, { "content": "pub trait PermissionClient<C, D, USER>: HasUserClient<C, D, USER>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n{\n\n fn permission_name(&self) -> &str;\n\n\n\n fn create_permission(&self) -> requests::CreatePermissionBuilder<'_, '_, C, D, USER>;\n\n fn get_permission(&self) -> requests::GetPermissionBuilder<'_, '_, C, D, USER>;\n\n fn replace_permission(&self) -> requests::ReplacePermissionBuilder<'_, '_, C, D, USER>;\n\n fn delete_permission(&self) -> requests::DeletePermissionsBuilder<'_, '_, C, D, USER>;\n\n\n\n fn prepare_request(&self, method: hyper::Method) -> http::request::Builder {\n\n self.cosmos_client().prepare_request(\n\n &format!(\n\n \"dbs/{}/users/{}/permissions\",\n\n self.database_client().database_name(),\n\n self.user_client().user_name()\n\n ),\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 16, "score": 195620.1106364777 }, { "content": "pub fn from_connection_string(connection_string: &str) -> Result<KeyClient, AzureError> {\n\n let client = hyper::Client::builder().build(HttpsConnector::new());\n\n\n\n match ConnectionString::new(connection_string)? {\n\n ConnectionString {\n\n account_name: Some(account),\n\n account_key: Some(_),\n\n sas: Some(sas_token),\n\n ..\n\n } => {\n\n log::warn!(\"Both account key and SAS defined in connection string. Using only the provided SAS.\");\n\n Ok(KeyClient::new(\n\n account.to_owned(),\n\n String::new(),\n\n Some(get_sas_token_parms(sas_token)),\n\n client,\n\n format!(\"https://{}.blob.core.windows.net\", account),\n\n format!(\"https://{}.table.core.windows.net\", account), \n\n format!(\"https://{}.queue.core.windows.net\", account),\n\n ))\n", "file_path": "azure_sdk_storage_core/src/client.rs", "rank": 17, "score": 192837.8103968246 }, { "content": "function updateMetadata() {\n\n var context = getContext();\n\n var collection = context.getCollection();\n\n var response = context.getResponse();\n\n var createdDocument = response.getBody();\n\n\n\n // query for metadata document\n\n var filterQuery = 'SELECT * FROM root r WHERE r.id = \\\"_metadata\\\"';\n\n var accept = collection.queryDocuments(collection.getSelfLink(), filterQuery,\n\n updateMetadataCallback);\n\n if (!accept) throw\\ \"Unable to update metadata, abort\\\";\n\n\n", "file_path": "azure_sdk_cosmos/tests/trigger.rs", "rank": 18, "score": 189384.29918635834 }, { "content": "#[inline]\n\npub fn utc_date_from_rfc2822(date: &str) -> Result<DateTime<Utc>, AzureError> {\n\n let date = DateTime::parse_from_rfc2822(date)?;\n\n Ok(DateTime::from_utc(date.naive_utc(), Utc))\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 19, "score": 189192.04936783997 }, { "content": "pub trait TriggerClient<C, D, COLL>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn trigger_name(&self) -> &str;\n\n\n\n fn create_trigger(&self)\n\n -> requests::CreateOrReplaceTriggerBuilder<'_, C, D, COLL, No, No, No>;\n\n fn replace_trigger(\n\n &self,\n\n ) -> requests::CreateOrReplaceTriggerBuilder<'_, C, D, COLL, No, No, No>;\n\n fn delete_trigger(&self) -> requests::DeleteTriggerBuilder<'_, '_, C, D, COLL>;\n\n\n\n fn prepare_request(&self, method: hyper::Method) -> http::request::Builder {\n\n self.cosmos_client().prepare_request(\n\n &format!(\n\n \"dbs/{}/colls/{}/triggers\",\n\n self.database_client().database_name(),\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 20, "score": 187414.81673966016 }, { "content": "pub trait DocumentClient<C, D, COLL>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn document_name(&self) -> &str;\n\n fn partition_keys(&self) -> &PartitionKeys;\n\n\n\n fn get_document(&self) -> requests::GetDocumentBuilder<'_, '_, C, D, COLL>;\n\n fn delete_document(&self) -> requests::DeleteDocumentBuilder<'_, C, D, COLL>;\n\n fn list_attachments(&self) -> requests::ListAttachmentsBuilder<'_, '_, C, D, COLL>;\n\n\n\n fn prepare_request(&self, method: hyper::Method) -> http::request::Builder {\n\n self.cosmos_client().prepare_request(\n\n &format!(\n\n \"dbs/{}/colls/{}/docs\",\n\n self.database_client().database_name(),\n\n self.collection_client().collection_name()\n\n ),\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 21, "score": 187080.0175476979 }, { "content": "#[inline]\n\npub fn find_subnodes<'a>(node: &'a Element, subnode: &str) -> Vec<&'a Element> {\n\n node.children\n\n .iter()\n\n .filter(|x| match **x {\n\n ElementNode(ref mynode) => mynode.name == subnode,\n\n _ => false,\n\n })\n\n .map(|x| match *x {\n\n ElementNode(ref mynode) => mynode,\n\n _ => unreachable!(),\n\n })\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 22, "score": 186807.80200395104 }, { "content": "pub trait CollectionName: std::fmt::Debug {\n\n fn name(&self) -> &str;\n\n}\n\n\n\nimpl CollectionName for Collection {\n\n fn name(&self) -> &str {\n\n &self.id\n\n }\n\n}\n\n\n\nimpl CollectionName for &str {\n\n fn name(&self) -> &str {\n\n self\n\n }\n\n}\n\n\n\nimpl CollectionName for String {\n\n fn name(&self) -> &str {\n\n self.as_ref()\n\n }\n\n}\n", "file_path": "azure_sdk_cosmos/src/collection.rs", "rank": 23, "score": 184446.05381600064 }, { "content": "pub trait HasDocumentClient<C, D, COLL, DOC>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n DOC: DocumentClient<C, D, COLL>,\n\n{\n\n fn document_client(&self) -> &DOC;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 24, "score": 183500.16828707216 }, { "content": "pub trait PermissionClientRequired<'a, C, D, USER>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n{\n\n fn permission_client(&self) -> &'a dyn PermissionClient<C, D, USER>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 25, "score": 182664.34154006574 }, { "content": "pub trait UserDefinedFunctionName: std::fmt::Debug {\n\n fn name(&self) -> &str;\n\n}\n\n\n\nimpl UserDefinedFunctionName for &str {\n\n fn name(&self) -> &str {\n\n self\n\n }\n\n}\n\n\n\nimpl UserDefinedFunctionName for String {\n\n fn name(&self) -> &str {\n\n self.as_ref()\n\n }\n\n}\n", "file_path": "azure_sdk_cosmos/src/user_defined_function/user_defined_function_name.rs", "rank": 26, "score": 181740.32909933937 }, { "content": "pub fn session_token_from_headers(headers: &HeaderMap) -> Result<SessionToken, AzureError> {\n\n Ok(headers\n\n .get(SESSION_TOKEN)\n\n .ok_or_else(|| AzureError::HeaderNotFound(SESSION_TOKEN.to_owned()))?\n\n .to_str()?\n\n .to_owned())\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 27, "score": 181179.28142408343 }, { "content": "function tax(income) {\n\n if (income == undefined)\n\n throw 'no input';\n\n if (income < 1000)\n\n return income * 0.1;\n\n else if (income < 10000)\n\n return income * 0.2;\n\n else\n\n return income * 0.4;\n\n}\"#;\n\n\n\n#[tokio::test]\n\nasync fn user_defined_function00() -> Result<(), Box<dyn Error>> {\n\n const DATABASE_NAME: &str = \"test-cosmos-db-udf\";\n\n const COLLECTION_NAME: &str = \"test-udf\";\n\n const USER_DEFINED_FUNCTION_NAME: &str = \"test\";\n\n\n\n let client = setup::initialize().unwrap();\n\n\n\n // create a temp database\n", "file_path": "azure_sdk_cosmos/tests/user_defined_function00.rs", "rank": 28, "score": 180429.52558721104 }, { "content": "pub fn with_bearer_token<'a, A, BT>(account: A, bearer_token: BT) -> BearerTokenClient<'a>\n\nwhere\n\n A: Into<Cow<'a, str>>,\n\n BT: Into<Cow<'a, str>>,\n\n{\n\n let client = hyper::Client::builder().build(HttpsConnector::new());\n\n\n\n BearerTokenClient::new(account.into(), bearer_token.into(), client)\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/client.rs", "rank": 29, "score": 179566.497363733 }, { "content": "pub trait WithPermissionClient<'a, C, D, USER, PERMISSION>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n PERMISSION: PermissionClient<C, D, USER>,\n\n{\n\n fn with_permission_client<IntoCowStr>(&'a self, permission_name: IntoCowStr) -> PERMISSION\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 30, "score": 179448.07528557297 }, { "content": "pub trait IntoPermissionClient<'a, C, D, USER, PERMISSION>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n PERMISSION: PermissionClient<C, D, USER>,\n\n{\n\n fn into_permission_client<IntoCowStr>(self, permission_name: IntoCowStr) -> PERMISSION\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 31, "score": 179448.07528557297 }, { "content": "#[inline]\n\n#[cfg(feature = \"azurite_workaround\")]\n\npub fn from_azure_time(s: &str) -> Result<chrono::DateTime<chrono::Utc>, chrono::ParseError> {\n\n if let Ok(dt) = chrono::DateTime::parse_from_rfc2822(s) {\n\n let dt_utc: chrono::DateTime<chrono::Utc> = dt.with_timezone(&chrono::Utc);\n\n Ok(dt_utc)\n\n } else {\n\n log::warn!(\"Received an invalid date: {}, returning now()\", s);\n\n Ok(chrono::Utc::now())\n\n }\n\n}\n\n\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 32, "score": 178294.0956687941 }, { "content": "#[inline]\n\npub fn cast_must<'a, T>(node: &'a Element, path: &[&str]) -> Result<T, TraversingError>\n\nwhere\n\n T: FromStringOptional<T>,\n\n{\n\n let node = traverse_single_must(node, path)?;\n\n let itxt = inner_text(node)?;\n\n Ok(T::from_str_optional(itxt)?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use chrono::{Datelike, Timelike};\n\n use xml::Element;\n\n\n\n const XML: &'static str = \"<?xml version=\\\"1.0\\\" encoding=\\\"UTF-8\\\"?>\n\n<EnumerationResults \\\n\n ServiceEndpoint=\\\"http://mindrust.blob.core.windows.net/\\\">\n\n \\\n\n <Containers>\n\n <Container>\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 33, "score": 177893.86666521424 }, { "content": "#[async_trait::async_trait]\n\npub trait TokenCredential {\n\n /// Gets a `TokenResponse` for the specified resource\n\n async fn get_token(&self, resource: &str) -> Result<TokenResponse, AzureError>;\n\n}\n", "file_path": "azure_sdk_auth_aad/src/token_credentials/mod.rs", "rank": 34, "score": 177629.5837478513 }, { "content": "#[inline]\n\npub fn cast_optional<'a, T>(node: &'a Element, path: &[&str]) -> Result<Option<T>, TraversingError>\n\nwhere\n\n T: FromStringOptional<T>,\n\n{\n\n match traverse_single_optional(node, path)? {\n\n Some(e) => match inner_text(e) {\n\n Ok(txt) => Ok(Some(T::from_str_optional(txt)?)),\n\n Err(_) => Ok(None),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 35, "score": 173653.56195003534 }, { "content": "fn generate_resource_link(u: &str) -> &str {\n\n static ENDING_STRINGS: &[&str] = &[\n\n \"dbs\",\n\n \"colls\",\n\n \"docs\",\n\n \"sprocs\",\n\n \"users\",\n\n \"permissions\",\n\n \"attachments\",\n\n \"pkranges\",\n\n \"udfs\",\n\n \"triggers\",\n\n ];\n\n\n\n // store the element only if it does not end with dbs, colls or docs\n\n let p = u;\n\n let len = p.len();\n\n for str_to_match in ENDING_STRINGS {\n\n let end_len = str_to_match.len();\n\n\n", "file_path": "azure_sdk_cosmos/src/clients/cosmos_struct.rs", "rank": 36, "score": 172982.39761336753 }, { "content": "pub trait CollectionSupport<'a> {\n\n type O;\n\n fn with_collection(self, collection: &'a Collection) -> Self::O;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 37, "score": 171340.6399141554 }, { "content": "pub trait CollectionRequired<'a> {\n\n fn collection(&self) -> &'a Collection;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 38, "score": 171340.6399141554 }, { "content": "pub fn initialize() -> Result<CosmosStruct<'static, DefaultCosmosUri>, AzureError> {\n\n let account = std::env::var(\"COSMOS_ACCOUNT\").expect(\"Set env variable COSMOS_ACCOUNT first!\");\n\n let key =\n\n std::env::var(\"COSMOS_MASTER_KEY\").expect(\"Set env variable COSMOS_MASTER_KEY first!\");\n\n\n\n let authorization_token = AuthorizationToken::new_master(&key)?;\n\n let client = ClientBuilder::new(account, authorization_token)?;\n\n\n\n Ok(client)\n\n}\n", "file_path": "azure_sdk_cosmos/tests/setup.rs", "rank": 39, "score": 170231.61811126312 }, { "content": "fn split(b: &str) -> Result<(&str, &str), AzurePathParseError> {\n\n let slash_pos = match b.find('/') {\n\n Some(p) => p,\n\n None => return Err(AzurePathParseError::PathSeparatorNotFoundError),\n\n };\n\n\n\n if slash_pos == 0 {\n\n return Err(AzurePathParseError::MissingContainerError);\n\n }\n\n\n\n if slash_pos + 1 == b.len() {\n\n return Err(AzurePathParseError::MissingBlobError);\n\n }\n\n\n\n if b[slash_pos + 1..].contains('/') {\n\n return Err(AzurePathParseError::MultiplePathSeparatorsFoundError);\n\n }\n\n\n\n Ok((&b[0..slash_pos], &b[slash_pos + 1..]))\n\n}\n", "file_path": "azure_sdk_storage_core/src/into_azure_path.rs", "rank": 40, "score": 169295.44985433173 }, { "content": "fn encode_str_to_sign(str_to_sign: &str, hmac_key: &str) -> String {\n\n let key = hmac::Key::new(ring::hmac::HMAC_SHA256, &base64::decode(hmac_key).unwrap());\n\n let sig = hmac::sign(&key, str_to_sign.as_bytes());\n\n\n\n // let res = hmac.result();\n\n // debug!(\"{:?}\", res.code());\n\n\n\n base64::encode(sig.as_ref())\n\n}\n\n\n", "file_path": "azure_sdk_storage_core/src/rest_client.rs", "rank": 41, "score": 169166.7066452923 }, { "content": "pub trait CollectionNameSupport<'a> {\n\n type O;\n\n fn with_collection_name(self, collection_name: &'a dyn CollectionName) -> Self::O;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 42, "score": 168576.0617212374 }, { "content": "pub trait CollectionNameRequired<'a> {\n\n fn collection_name(&self) -> &'a dyn CollectionName;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 43, "score": 168576.0617212374 }, { "content": "pub trait UserDefinedFunctionBodyRequired<'a> {\n\n fn body(&self) -> &'a str;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 44, "score": 163365.1442534813 }, { "content": "pub trait UserDefinedFunctionBodySupport<'a> {\n\n type O;\n\n fn with_body(self, body: &'a str) -> Self::O;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 45, "score": 163365.1442534813 }, { "content": "pub trait CollectionClientRequired<'a, C, D>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n{\n\n fn collection_client(&self) -> &'a dyn CollectionClient<C, D>;\n\n}\n\n\n\n//pub trait CollectionRequired<'a> {\n\n// fn collection(&self) -> &'a str;\n\n//}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 46, "score": 160254.3703510799 }, { "content": "pub fn main() {\n\n let master_key =\n\n std::env::var(\"STORAGE_MASTER_KEY\").expect(\"Set env variable STORAGE_MASTER_KEY first!\");\n\n\n\n let start = Utc::now() - Duration::days(1);\n\n let end = Utc::now() + Duration::days(1);\n\n\n\n let path =\n\n Url::parse(\"https://azureskdforrust.blob.core.windows.net/test/ERRORLOG.1.cut\").unwrap();\n\n\n\n let ip_range = IPRange {\n\n start: std::net::IpAddr::V4(<std::net::Ipv4Addr>::new(0, 0, 0, 0)),\n\n end: std::net::IpAddr::V4(<std::net::Ipv4Addr>::new(255, 255, 255, 255)),\n\n };\n\n\n\n let sas = BlobSASBuilder::new(&path)\n\n .with_key(&master_key)\n\n .with_validity_start(&start)\n\n .with_validity_end(&end)\n\n .with_ip_range(&ip_range)\n", "file_path": "azure_sdk_storage_core/examples/sas00.rs", "rank": 47, "score": 157460.31996591913 }, { "content": "pub fn main() {\n\n let client = get_box();\n\n println!(\"client.blob_uri() == {}\", client.blob_uri());\n\n\n\n let client_send = get_box_send();\n\n\n\n let handler = thread::spawn(move || {\n\n println!(\"client_send.blob_uri() == {}\", client_send.blob_uri());\n\n });\n\n handler.join().unwrap();\n\n\n\n let client_arc = get_arc();\n\n println!(\"client_arc.blob_uri() == {}\", client_arc.blob_uri());\n\n}\n", "file_path": "azure_sdk_storage_core/examples/box.rs", "rank": 48, "score": 157460.31996591913 }, { "content": "fn encode_str_to_sign(str_to_sign: &str, key: &[u8]) -> String {\n\n let key = hmac::Key::new(ring::hmac::HMAC_SHA256, key);\n\n let sig = hmac::sign(&key, str_to_sign.as_bytes());\n\n base64::encode(sig.as_ref())\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/clients/cosmos_struct.rs", "rank": 49, "score": 156433.56381061827 }, { "content": "#[inline]\n\npub fn traverse<'a>(\n\n node: &'a Element,\n\n path: &[&str],\n\n ignore_empty_leaf: bool,\n\n) -> Result<Vec<&'a Element>, TraversingError> {\n\n trace!(\n\n \"traverse(node == {:?}, path == {:?}, ignore_empty_leaf == {})\",\n\n node,\n\n path,\n\n ignore_empty_leaf\n\n );\n\n // debug!(\"path.len() == {:?}\", path.len());\n\n\n\n if path.is_empty() {\n\n let mut vec = Vec::new();\n\n vec.push(node);\n\n return Ok(vec);\n\n }\n\n\n\n let mut curnode = node;\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 50, "score": 154907.29537127542 }, { "content": "#![cfg(all(test, feature = \"test_e2e\"))]\n\nuse azure_sdk_cosmos::collection::*;\n\nuse azure_sdk_cosmos::prelude::*;\n\nuse azure_sdk_cosmos::Offer;\n\nmod setup;\n\n\n\n#[tokio::test]\n\nasync fn create_and_delete_collection() {\n\n const DATABASE_NAME: &str = \"test-cosmos-db-create-and-delete-collection\";\n\n const COLLECTION_NAME: &str = \"test-collection-create-and-delete-collection\";\n\n\n\n let client = setup::initialize().unwrap();\n\n\n\n client\n\n .create_database()\n\n .with_database_name(&DATABASE_NAME)\n\n .execute()\n\n .await\n\n .unwrap();\n\n\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 51, "score": 154269.09207291267 }, { "content": " let collections = database_client.list_collections().execute().await.unwrap();\n\n assert!(collections.collections.len() == 0);\n\n\n\n database_client.delete_database().execute().await.unwrap();\n\n}\n\n\n\n#[tokio::test]\n\nasync fn replace_collection() {\n\n let client = setup::initialize().unwrap();\n\n const DATABASE_NAME: &str = \"test-cosmos-db\";\n\n const COLLECTION_NAME: &str = \"test-collection\";\n\n\n\n client\n\n .create_database()\n\n .with_database_name(&DATABASE_NAME)\n\n .execute()\n\n .await\n\n .unwrap();\n\n\n\n let database_client = client.with_database_client(DATABASE_NAME);\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 52, "score": 154259.63642013932 }, { "content": " let collections = database_client.list_collections().execute().await.unwrap();\n\n assert_eq!(collections.collections.len(), 1);\n\n assert_eq!(\n\n collection.collection.indexing_policy,\n\n collections.collections[0].indexing_policy\n\n );\n\n\n\n // Let's change the indexing mode!\n\n let indexes = IncludedPathIndex {\n\n kind: KeyKind::Hash,\n\n data_type: DataType::String,\n\n precision: Some(3),\n\n };\n\n\n\n let ip = IncludedPath {\n\n path: \"/*\".to_owned(),\n\n indexes: Some(vec![indexes]),\n\n };\n\n\n\n let mut new_ip = IndexingPolicy {\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 53, "score": 154256.9371629466 }, { "content": "\n\n // try to get the previously created collection\n\n let collection_client = database_client.with_collection_client(COLLECTION_NAME);\n\n\n\n let collection_after_get = collection_client.get_collection().execute().await.unwrap();\n\n assert!(collection.collection.rid == collection_after_get.collection.rid);\n\n\n\n // check GetPartitionKeyRanges: https://docs.microsoft.com/en-us/rest/api/cosmos-db/get-partition-key-ranges\n\n collection_client\n\n .get_partition_key_ranges()\n\n .execute()\n\n .await\n\n .unwrap();\n\n\n\n // delete the collection\n\n collection_client\n\n .delete_collection()\n\n .execute()\n\n .await\n\n .unwrap();\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 54, "score": 154249.99348854268 }, { "content": "\n\n // create a new collection\n\n let indexing_policy = IndexingPolicy {\n\n automatic: true,\n\n indexing_mode: IndexingMode::Consistent,\n\n included_paths: vec![],\n\n excluded_paths: vec![],\n\n };\n\n let collection = database_client\n\n .create_collection()\n\n .with_collection_name(&COLLECTION_NAME)\n\n .with_offer(Offer::S2)\n\n .with_partition_key(&(\"/id\".into()))\n\n .with_indexing_policy(&indexing_policy)\n\n .execute()\n\n .await\n\n .unwrap();\n\n\n\n let collection_client = database_client.with_collection_client(COLLECTION_NAME);\n\n\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 55, "score": 154249.44972926332 }, { "content": " let database_client = client.with_database_client(DATABASE_NAME);\n\n\n\n // create a new collection\n\n let indexing_policy = IndexingPolicy {\n\n automatic: true,\n\n indexing_mode: IndexingMode::Consistent,\n\n included_paths: vec![],\n\n excluded_paths: vec![],\n\n };\n\n let collection = database_client\n\n .create_collection()\n\n .with_collection_name(&COLLECTION_NAME)\n\n .with_offer(Offer::S2)\n\n .with_partition_key(&(\"/id\".into()))\n\n .with_indexing_policy(&indexing_policy)\n\n .execute()\n\n .await\n\n .unwrap();\n\n let collections = database_client.list_collections().execute().await.unwrap();\n\n assert!(collections.collections.len() == 1);\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 56, "score": 154249.41031303038 }, { "content": " automatic: true,\n\n indexing_mode: IndexingMode::Consistent,\n\n included_paths: vec![ip],\n\n excluded_paths: vec![],\n\n };\n\n\n\n new_ip\n\n .excluded_paths\n\n .push(\"/\\\"excludeme\\\"/?\".to_owned().into());\n\n\n\n let _replace_collection_reponse = collection_client\n\n .replace_collection()\n\n .with_indexing_policy(&new_ip)\n\n .with_partition_key(&(\"/id\".into()))\n\n .execute()\n\n .await\n\n .unwrap();\n\n\n\n let collections = database_client.list_collections().execute().await.unwrap();\n\n assert_eq!(collections.collections.len(), 1);\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 57, "score": 154249.21442291708 }, { "content": " let eps: Vec<&ExcludedPath> = collections.collections[0]\n\n .indexing_policy\n\n .excluded_paths\n\n .iter()\n\n .filter(|excluded_path| excluded_path.path == \"/\\\"excludeme\\\"/?\")\n\n .collect();\n\n assert!(eps.len() > 0);\n\n\n\n database_client.delete_database().execute().await.unwrap();\n\n}\n", "file_path": "azure_sdk_cosmos/tests/cosmos_collection.rs", "rank": 58, "score": 154248.24228044998 }, { "content": "pub fn main() {\n\n let account_name =\n\n std::env::var(\"STORAGE_ACCOUNT\").expect(\"Set env variable STORAGE_ACCOUNT first!\");\n\n let account_key = std::env::var(\"ACCOUNT_KEY\").expect(\"Set env variable ACCOUNT_KEY first!\");\n\n let default_endpoints_protocol = std::env::var(\"DEFAULT_ENDPOINTS_PROTOCOL\")\n\n .expect(\"Set env variable DEFAULT_ENDPOINTS_PROTOCOL first!\");\n\n let default_endpoints_protocol = match &default_endpoints_protocol[..] {\n\n \"https\" => EndpointProtocol::Https,\n\n \"http\" => EndpointProtocol::Http,\n\n _ => panic!(\"Invalid default endpoints protocol\")\n\n };\n\n\n\n let connection_string = ConnectionStringBuilder::new()\n\n .with_account_name(&account_name)\n\n .with_account_key(&account_key)\n\n .with_default_endpoints_protocol(default_endpoints_protocol)\n\n .build();\n\n\n\n println!(\"The connection string is: '{}'\", connection_string);\n\n}\n", "file_path": "azure_sdk_storage_core/examples/connection_string_builder.rs", "rank": 59, "score": 153843.84369846142 }, { "content": "pub fn content_md5_from_headers_optional(\n\n headers: &HeaderMap,\n\n) -> Result<Option<[u8; 16]>, AzureError> {\n\n if headers.contains_key(CONTENT_MD5) {\n\n Ok(Some(content_md5_from_headers(headers)?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct CommonStorageResponseHeaders {\n\n pub request_id: RequestId,\n\n pub client_request_id: Option<String>,\n\n pub version: String,\n\n pub date: DateTime<Utc>,\n\n pub server: String,\n\n}\n\n\n\nimpl TryFrom<&HeaderMap> for CommonStorageResponseHeaders {\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 60, "score": 153843.84369846142 }, { "content": "pub fn last_modified_from_headers_optional(\n\n headers: &HeaderMap,\n\n) -> Result<Option<DateTime<Utc>>, AzureError> {\n\n if headers.contains_key(LAST_MODIFIED) {\n\n Ok(Some(last_modified_from_headers(headers)?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 61, "score": 153843.84369846142 }, { "content": "pub fn naive_server(\n\n auth_obj: &AuthObj,\n\n port: u32,\n\n) -> Result<AuthorizationCode, ServerReceiveError> {\n\n // A very naive implementation of the redirect server.\n\n // A ripoff of https://github.com/ramosbugs/oauth2-rs/blob/master/examples/msgraph.rs, stripped\n\n // down for simplicity.\n\n let listener = TcpListener::bind(format!(\"127.0.0.1:{}\", port)).unwrap();\n\n for stream in listener.incoming() {\n\n if let Ok(mut stream) = stream {\n\n {\n\n let mut reader = BufReader::new(&stream);\n\n\n\n let mut request_line = String::new();\n\n reader.read_line(&mut request_line).unwrap();\n\n\n\n let redirect_url = match request_line.split_whitespace().nth(1) {\n\n Some(redirect_url) => redirect_url,\n\n None => {\n\n return Err(ServerReceiveError::UnexpectedRedirectUrl { url: request_line })\n", "file_path": "azure_sdk_auth_aad/src/naive_server.rs", "rank": 62, "score": 153843.84369846142 }, { "content": "pub fn content_crc64_from_headers_optional(\n\n headers: &HeaderMap,\n\n) -> Result<Option<[u8; 8]>, AzureError> {\n\n if headers.contains_key(CONTENT_CRC64) {\n\n Ok(Some(content_crc64_from_headers(headers)?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "azure_sdk_core/src/lib.rs", "rank": 63, "score": 153843.84369846142 }, { "content": "pub fn authorize_code_flow(\n\n client_id: ClientId,\n\n client_secret: Option<ClientSecret>,\n\n tenant_id: &str,\n\n redirect_url: Url,\n\n resource: &str,\n\n) -> AuthObj {\n\n let auth_url = AuthUrl::from_url(\n\n Url::parse(&format!(\n\n \"https://login.microsoftonline.com/{}/oauth2/v2.0/authorize\",\n\n tenant_id\n\n ))\n\n .expect(\"Invalid authorization endpoint URL\"),\n\n );\n\n let token_url = TokenUrl::from_url(\n\n Url::parse(&format!(\n\n \"https://login.microsoftonline.com/{}/oauth2/v2.0/token\",\n\n tenant_id\n\n ))\n\n .expect(\"Invalid token endpoint URL\"),\n", "file_path": "azure_sdk_auth_aad/src/lib.rs", "rank": 64, "score": 153843.84369846142 }, { "content": "pub trait UserDefinedFunctionClientRequired<'a, C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn user_defined_function_client(&self) -> &'a dyn UserDefinedFunctionClient<C, D, COLL>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/lib.rs", "rank": 65, "score": 151838.68401968438 }, { "content": "#![cfg(all(test, feature = \"test_e2e\"))]\n\nuse azure_sdk_cosmos::prelude::*;\n\nuse azure_sdk_cosmos::PermissionMode;\n\n\n\nmod setup;\n\n\n\n#[tokio::test]\n\nasync fn permissions() {\n\n const DATABASE_NAME: &str = \"cosmos-test-db-permusage\";\n\n const COLLECTION_NAME: &str = \"cosmos-test-db-permusage\";\n\n const USER_NAME: &str = \"[email protected]\";\n\n const PERMISSION: &str = \"sdktest\";\n\n\n\n let client = setup::initialize().unwrap();\n\n\n\n // create a temp database\n\n let _create_database_response = client\n\n .create_database()\n\n .with_database_name(&DATABASE_NAME)\n\n .execute()\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 66, "score": 151455.31419600197 }, { "content": " .await\n\n .unwrap();\n\n\n\n let user_client = database_client.with_user_client(USER_NAME);\n\n user_client.create_user().execute().await.unwrap();\n\n\n\n // create the RO permission\n\n let permission_client = user_client.with_permission_client(PERMISSION);\n\n let permission_mode = PermissionMode::Read(create_collection_response.clone().collection);\n\n\n\n let create_permission_response = permission_client\n\n .create_permission()\n\n .with_expiry_seconds(18000) // 5 hours, max!\n\n .execute_with_permission(&permission_mode)\n\n .await\n\n .unwrap();\n\n\n\n // change the AuthorizationToken using the token\n\n // of the permission.\n\n let new_authorization_token: AuthorizationToken = create_permission_response\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 67, "score": 151454.2683034125 }, { "content": " .permission\n\n .permission_token\n\n .into();\n\n let new_client = client.with_auth_token(new_authorization_token);\n\n let new_database_client = new_client.with_database_client(DATABASE_NAME);\n\n let new_collection_client = new_database_client.with_collection_client(COLLECTION_NAME);\n\n\n\n // let's list the collection content.\n\n // This must succeed.\n\n new_database_client\n\n .with_collection_client(COLLECTION_NAME)\n\n .list_documents()\n\n .execute::<serde_json::Value>()\n\n .await\n\n .unwrap();\n\n\n\n // Now we try to insert a document with the \"read-only\"\n\n // authorization_token just created. It must fail.\n\n let data = r#\"\n\n {\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 68, "score": 151446.09325189554 }, { "content": " .unwrap();\n\n\n\n // All includes read and write.\n\n let permission_mode = PermissionMode::All(create_collection_response.collection);\n\n let create_permission_response = permission_client\n\n .create_permission()\n\n .with_expiry_seconds(18000) // 5 hours, max!\n\n .execute_with_permission(&permission_mode)\n\n .await\n\n .unwrap();\n\n\n\n let new_authorization_token: AuthorizationToken = create_permission_response\n\n .permission\n\n .permission_token\n\n .into();\n\n let new_client = client.with_auth_token(new_authorization_token);\n\n let new_database_client = new_client.with_database_client(DATABASE_NAME);\n\n let new_collection_client = new_database_client.with_collection_client(COLLECTION_NAME);\n\n\n\n // now we have an \"All\" authorization_token\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 69, "score": 151444.22365509748 }, { "content": " \"id\": \"Gianluigi Bombatomica\",\n\n \"age\": 43,\n\n \"phones\": [\n\n \"+39 1234567\",\n\n \"+39 2345678\"\n\n ]\n\n }\"#;\n\n let document = Document::new(serde_json::from_str::<serde_json::Value>(data).unwrap());\n\n new_collection_client\n\n .create_document()\n\n .with_is_upsert(true)\n\n .with_partition_keys(PartitionKeys::new().push(&\"Gianluigi Bombatomica\").unwrap())\n\n .execute_with_document(&document)\n\n .await\n\n .unwrap_err();\n\n\n\n permission_client\n\n .delete_permission()\n\n .execute()\n\n .await\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 70, "score": 151441.894681855 }, { "content": " // so the create_document should succeed!\n\n let create_document_response = new_collection_client\n\n .create_document()\n\n .with_is_upsert(true)\n\n .with_partition_keys(PartitionKeys::new().push(&\"Gianluigi Bombatomica\").unwrap())\n\n .execute_with_document(&document)\n\n .await\n\n .unwrap();\n\n println!(\n\n \"create_document_response == {:#?}\",\n\n create_document_response\n\n );\n\n\n\n // cleanup\n\n database_client.delete_database().execute().await.unwrap();\n\n}\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 71, "score": 151433.64374354153 }, { "content": " .await\n\n .unwrap();\n\n\n\n let database_client = client.with_database_client(DATABASE_NAME);\n\n\n\n // create a new collection\n\n let indexing_policy = IndexingPolicy {\n\n automatic: true,\n\n indexing_mode: IndexingMode::Consistent,\n\n included_paths: vec![],\n\n excluded_paths: vec![],\n\n };\n\n\n\n let create_collection_response = database_client\n\n .create_collection()\n\n .with_collection_name(&COLLECTION_NAME)\n\n .with_offer(Offer::Throughput(400))\n\n .with_partition_key(&(\"/id\".into()))\n\n .with_indexing_policy(&indexing_policy)\n\n .execute()\n", "file_path": "azure_sdk_cosmos/tests/permission_token_usage.rs", "rank": 72, "score": 151431.52231412556 }, { "content": "\n\n // change the AuthorizationToken using the token\n\n // of the permission.\n\n let new_authorization_token: AuthorizationToken = create_permission_response\n\n .permission\n\n .permission_token\n\n .into();\n\n\n\n println!(\n\n \"Replacing authorization_token with {:?}.\",\n\n new_authorization_token\n\n );\n\n let new_client = client.with_auth_token(new_authorization_token);\n\n\n\n // let's list the documents with the new auth token\n\n let list_documents_response = new_client\n\n .with_database_client(&database_name)\n\n .with_collection_client(&collection_name)\n\n .list_documents()\n\n .execute::<serde_json::Value>()\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 73, "score": 151300.61960702593 }, { "content": " .expect(\"please specify the user name as third command line parameter\");\n\n\n\n let authorization_token = AuthorizationToken::new_master(&master_key)?;\n\n\n\n let client = ClientBuilder::new(account, authorization_token)?;\n\n let database_client = client.with_database_client(&database_name);\n\n let collection_client = database_client.with_collection_client(&collection_name);\n\n let user_client = database_client.with_user_client(&user_name);\n\n\n\n let get_collection_response = collection_client.get_collection().execute().await?;\n\n println!(\"get_collection_response == {:#?}\", get_collection_response);\n\n\n\n let create_user_response = user_client.create_user().execute().await?;\n\n println!(\"create_user_response == {:#?}\", create_user_response);\n\n\n\n // test list documents\n\n let list_documents_response = collection_client\n\n .list_documents()\n\n .execute::<serde_json::Value>()\n\n .await\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 74, "score": 151300.59468248827 }, { "content": " .unwrap();\n\n println!(\n\n \"list_documents_response got {} document(s).\",\n\n list_documents_response.documents.len()\n\n );\n\n\n\n // create the first permission!\n\n let permission_client = user_client.with_permission_client(\"matrix\");\n\n\n\n let permission_mode = PermissionMode::Read(get_collection_response.clone().collection);\n\n\n\n let create_permission_response = permission_client\n\n .create_permission()\n\n .with_expiry_seconds(18000) // 5 hours, max!\n\n .execute_with_permission(&permission_mode)\n\n .await?;\n\n println!(\n\n \"create_permission_response == {:#?}\",\n\n create_permission_response\n\n );\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 75, "score": 151298.44067633685 }, { "content": " let document = Document::new(serde_json::from_str::<serde_json::Value>(data)?);\n\n println!(\n\n \"Trying to insert {:#?} into the collection with a read-only authorization_token.\",\n\n document\n\n );\n\n\n\n match new_client\n\n .with_database_client(&database_name)\n\n .with_collection_client(&collection_name)\n\n .create_document()\n\n .with_is_upsert(true)\n\n .with_partition_keys(PartitionKeys::new().push(\"Gianluigi Bombatomica\")?)\n\n .execute_with_document(&document)\n\n .await\n\n {\n\n Ok(_) => panic!(\"this should not happen!\"),\n\n Err(error) => println!(\"Insert failed: {:#?}\", error),\n\n }\n\n\n\n permission_client.delete_permission().execute().await?;\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 76, "score": 151297.61108695518 }, { "content": " new_authorization_token\n\n );\n\n let new_client = client.with_auth_token(new_authorization_token);\n\n\n\n // now we have an \"All\" authorization_token\n\n // so the create_document should succeed!\n\n let create_document_response = new_client\n\n .with_database_client(&database_name)\n\n .with_collection_client(&collection_name)\n\n .create_document()\n\n .with_is_upsert(true)\n\n .with_partition_keys(PartitionKeys::new().push(\"Gianluigi Bombatomica\")?)\n\n .execute_with_document(&document)\n\n .await?;\n\n println!(\n\n \"create_document_response == {:#?}\",\n\n create_document_response\n\n );\n\n\n\n println!(\"Cleaning up user.\");\n\n let delete_user_response = user_client.delete_user().execute().await?;\n\n println!(\"delete_user_response == {:#?}\", delete_user_response);\n\n\n\n Ok(())\n\n}\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 77, "score": 151296.51457889553 }, { "content": "\n\n // All includes read and write.\n\n let permission_mode = PermissionMode::All(get_collection_response.collection);\n\n let create_permission_response = permission_client\n\n .create_permission()\n\n .with_expiry_seconds(18000) // 5 hours, max!\n\n .execute_with_permission(&permission_mode)\n\n .await?;\n\n println!(\n\n \"create_permission_response == {:#?}\",\n\n create_permission_response\n\n );\n\n\n\n let new_authorization_token: AuthorizationToken = create_permission_response\n\n .permission\n\n .permission_token\n\n .into();\n\n\n\n println!(\n\n \"Replacing authorization_token with {:?}.\",\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 78, "score": 151292.5824488558 }, { "content": "use azure_sdk_cosmos::prelude::*;\n\nuse azure_sdk_cosmos::PermissionMode;\n\nuse std::error::Error;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n // First we retrieve the account name and master key from environment variables.\n\n // We expect master keys (ie, not resource constrained)\n\n let master_key =\n\n std::env::var(\"COSMOS_MASTER_KEY\").expect(\"Set env variable COSMOS_MASTER_KEY first!\");\n\n let account = std::env::var(\"COSMOS_ACCOUNT\").expect(\"Set env variable COSMOS_ACCOUNT first!\");\n\n\n\n let database_name = std::env::args()\n\n .nth(1)\n\n .expect(\"please specify the database name as first command line parameter\");\n\n let collection_name = std::env::args()\n\n .nth(2)\n\n .expect(\"please specify the collection name as second command line parameter\");\n\n let user_name = std::env::args()\n\n .nth(3)\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 79, "score": 151287.80489036365 }, { "content": " .await\n\n .unwrap();\n\n println!(\n\n \"second list_documents_response got {} document(s).\",\n\n list_documents_response.documents.len()\n\n );\n\n\n\n // Now we try to insert a document with the \"read-only\"\n\n // authorization_token just created. It will fail.\n\n // The collection should have /id as partition key\n\n // for this example to work.\n\n let data = r#\"\n\n {\n\n \"id\": \"Gianluigi Bombatomica\",\n\n \"age\": 43,\n\n \"phones\": [\n\n \"+39 1234567\",\n\n \"+39 2345678\"\n\n ]\n\n }\"#;\n", "file_path": "azure_sdk_cosmos/examples/user_permission_token.rs", "rank": 80, "score": 151287.22269123144 }, { "content": "#[inline]\n\npub fn traverse_single_must<'a>(\n\n node: &'a Element,\n\n path: &[&str],\n\n) -> Result<&'a Element, TraversingError> {\n\n let vec = traverse(node, path, false)?;\n\n if vec.len() > 1 {\n\n return Err(TraversingError::MultipleNode(\n\n path[path.len() - 1].to_owned(),\n\n ));\n\n }\n\n\n\n Ok(vec[0])\n\n}\n\n\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 81, "score": 151151.632016273 }, { "content": "pub fn traverse_single_optional<'a>(\n\n node: &'a Element,\n\n path: &[&str],\n\n) -> Result<Option<&'a Element>, TraversingError> {\n\n let vec = traverse(node, path, true)?;\n\n if vec.len() > 1 {\n\n return Err(TraversingError::MultipleNode(\n\n path[path.len() - 1].to_owned(),\n\n ));\n\n }\n\n\n\n if vec.is_empty() {\n\n return Ok(None);\n\n }\n\n\n\n Ok(Some(vec[0]))\n\n}\n\n\n", "file_path": "azure_sdk_core/src/parsing.rs", "rank": 82, "score": 151151.632016273 }, { "content": "pub trait CollectionClient<C, D>: HasDatabaseClient<C, D>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n{\n\n fn collection_name(&self) -> &str;\n\n\n\n fn get_collection(&self) -> requests::GetCollectionBuilder<'_, C, D>;\n\n fn delete_collection(&self) -> requests::DeleteCollectionBuilder<'_, C, D>;\n\n fn replace_collection(&self) -> requests::ReplaceCollectionBuilder<'_, '_, C, D, No, No>;\n\n\n\n fn list_triggers(&self) -> requests::ListTriggersBuilder<'_, '_, C, D>;\n\n fn list_stored_procedures(&self) -> requests::ListStoredProceduresBuilder<'_, '_, C, D>;\n\n fn list_user_defined_functions(\n\n &self,\n\n ) -> requests::ListUserDefinedFunctionsBuilder<'_, '_, C, D>;\n\n\n\n fn create_document(&self) -> requests::CreateDocumentBuilder<'_, '_, C, D, No>;\n\n fn replace_document(&self) -> requests::ReplaceDocumentBuilder<'_, '_, C, D, No, No>;\n\n fn list_documents(&self) -> requests::ListDocumentsBuilder<'_, '_, C, D>;\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 83, "score": 150707.42623345877 }, { "content": "pub trait WithCollectionClient<'a, C, D, COLL>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn with_collection_client<IntoCowStr>(&'a self, collection_name: IntoCowStr) -> COLL\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 84, "score": 148829.30587902587 }, { "content": "pub trait IntoCollectionClient<'a, C, D, COLL>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn into_collection_client<IntoCowStr>(self, collection_name: IntoCowStr) -> COLL\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 85, "score": 148829.30587902587 }, { "content": "mod user_defined_function_name;\n\n\n\npub use self::user_defined_function_name::UserDefinedFunctionName;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct UserDefinedFunction {\n\n pub id: String,\n\n #[serde(rename = \"_rid\")]\n\n pub rid: String,\n\n #[serde(rename = \"_ts\")]\n\n pub ts: u64,\n\n #[serde(rename = \"_self\")]\n\n pub _self: String,\n\n #[serde(rename = \"_etag\")]\n\n pub etag: String,\n\n pub body: String,\n\n}\n\n\n\nimpl UserDefinedFunctionName for UserDefinedFunction {\n\n fn name(&self) -> &str {\n\n &self.id\n\n }\n\n}\n", "file_path": "azure_sdk_cosmos/src/user_defined_function/mod.rs", "rank": 86, "score": 148444.2961100019 }, { "content": "pub trait HasCollectionClient<C, D, COLL>: HasDatabaseClient<C, D>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn collection_client(&self) -> &COLL;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 87, "score": 147486.09085465877 }, { "content": "function tax(income) {\n\n if (income == undefined)\n\n throw 'no input';\n\n if (income < 1000)\n\n return income * 0.1;\n\n else if (income < 10000)\n\n return income * 0.2;\n\n else\n\n return income * 0.4;\n\n}\"#;\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn Error>> {\n\n let database = std::env::args()\n\n .nth(1)\n\n .expect(\"please specify database name as first command line parameter\");\n\n let collection = std::env::args()\n\n .nth(2)\n\n .expect(\"please specify collection name as second command line parameter\");\n\n\n", "file_path": "azure_sdk_cosmos/examples/user_defined_function_00.rs", "rank": 88, "score": 143609.2494160074 }, { "content": "pub trait StoredProcedureClient<C, D, COLL>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n{\n\n fn stored_procedure_name(&self) -> &str;\n\n\n\n fn create_stored_procedure(\n\n &self,\n\n ) -> requests::CreateStoredProcedureBuilder<'_, '_, C, D, COLL, No>;\n\n fn delete_stored_procedure(&self)\n\n -> requests::DeleteStoredProcedureBuilder<'_, '_, C, D, COLL>;\n\n fn execute_stored_procedure(\n\n &self,\n\n ) -> requests::ExecuteStoredProcedureBuilder<'_, '_, C, D, COLL>;\n\n fn replace_stored_procedure(\n\n &self,\n\n ) -> requests::ReplaceStoredProcedureBuilder<'_, '_, C, D, COLL, No>;\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 89, "score": 142354.99073904543 }, { "content": "pub trait IntoUserDefinedFunctionClient<'a, C, D, COLL, UDF>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n UDF: UserDefinedFunctionClient<C, D, COLL>,\n\n{\n\n fn into_user_defined_function_client<IntoCowStr>(\n\n self,\n\n user_defined_function_name: IntoCowStr,\n\n ) -> UDF\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 90, "score": 141572.79090106825 }, { "content": "pub trait WithUserDefinedFunctionClient<'a, C, D, COLL, UDF>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n UDF: UserDefinedFunctionClient<C, D, COLL>,\n\n{\n\n fn with_user_defined_function_client<IntoCowStr>(\n\n &'a self,\n\n user_defined_function_name: IntoCowStr,\n\n ) -> UDF\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 91, "score": 141572.79090106825 }, { "content": "pub trait HasUserClient<C, D, USER>: HasDatabaseClient<C, D>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n{\n\n fn user_client(&self) -> &USER;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 92, "score": 139777.92727025313 }, { "content": "pub trait HasStoredProcedureClient<C, D, COLL, SP>: HasCollectionClient<C, D, COLL>\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n SP: StoredProcedureClient<C, D, COLL>,\n\n{\n\n fn stored_procedure_client(&self) -> &SP;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 93, "score": 139535.47147502936 }, { "content": "#[allow(unknown_lints)]\n\npub fn perform_request<HCE: HyperClientEndpoint>(\n\n hyper_client_endpoint: &HCE,\n\n uri: &str,\n\n http_method: &Method,\n\n http_header_adder: &dyn Fn(Builder) -> Builder,\n\n request_body: Option<&[u8]>,\n\n service_type: ServiceType,\n\n) -> Result<hyper::client::ResponseFuture, AzureError> {\n\n let dt = chrono::Utc::now();\n\n let time = format!(\"{}\", dt.format(\"%a, %d %h %Y %T GMT\"));\n\n\n\n let url = url::Url::parse(uri)?;\n\n\n\n // for header in additional_headers.iter() {\n\n // debug!(\"{:?}\", header.value_string());\n\n // h.set();\n\n // }\n\n let mut request = hyper::Request::builder();\n\n request = request.method(http_method.clone()).uri(uri);\n\n\n", "file_path": "azure_sdk_storage_core/src/rest_client.rs", "rank": 94, "score": 139048.93306068945 }, { "content": "pub trait IntoUserClient<'a, C, D, USER>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n{\n\n fn into_user_client<IntoCowStr>(self, user_name: IntoCowStr) -> USER\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 95, "score": 139030.43702269066 }, { "content": "pub trait WithUserClient<'a, C, D, USER>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n USER: UserClient<C, D>,\n\n{\n\n fn with_user_client<IntoCowStr>(&'a self, user_name: IntoCowStr) -> USER\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 96, "score": 139030.43702269066 }, { "content": "function updateMetadata() {\n\n var context = getContext();\n\n var collection = context.getCollection();\n\n var response = context.getResponse();\n\n var createdDocument = response.getBody();\n\n\n\n // query for metadata document\n\n var filterQuery = 'SELECT * FROM root r WHERE r.id = \\\"_metadata\\\"';\n\n var accept = collection.queryDocuments(collection.getSelfLink(), filterQuery,\n\n updateMetadataCallback);\n\n if (!accept) throw\\ \"Unable to update metadata, abort\\\";\n\n\n", "file_path": "azure_sdk_cosmos/examples/trigger_00.rs", "rank": 97, "score": 138936.44265404445 }, { "content": "pub trait WithTriggerClient<'a, C, D, COLL, TRIGGER>: Debug + Send + Sync\n\nwhere\n\n C: CosmosClient,\n\n D: DatabaseClient<C>,\n\n COLL: CollectionClient<C, D>,\n\n TRIGGER: TriggerClient<C, D, COLL>,\n\n{\n\n fn with_trigger_client<IntoCowStr>(&'a self, trigger_name: IntoCowStr) -> TRIGGER\n\n where\n\n IntoCowStr: Into<Cow<'a, str>>;\n\n}\n\n\n", "file_path": "azure_sdk_cosmos/src/traits.rs", "rank": 98, "score": 136034.06541948367 } ]
Rust
eir-fmm/src/entry_functions.rs
ein-lang/eir
70d59589efeb1f6a7e881abb28ce7aebb862018f
use super::error::CompileError; use crate::{closures, expressions, reference_count, types}; use std::collections::HashMap; const CLOSURE_NAME: &str = "_closure"; pub fn compile( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { Ok(if definition.is_thunk() { compile_thunk(module_builder, definition, variables, types)? } else { compile_non_thunk(module_builder, definition, variables, types)? }) } fn compile_non_thunk( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { module_builder.define_anonymous_function( compile_arguments(definition, types), |instruction_builder| { Ok(instruction_builder.return_(compile_body( module_builder, &instruction_builder, definition, variables, types, )?)) }, types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, ) } fn compile_thunk( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { compile_initial_thunk_entry( module_builder, definition, compile_normal_thunk_entry(module_builder, definition, types)?, compile_locked_thunk_entry(module_builder, definition, types)?, variables, types, ) } fn compile_body( module_builder: &fmm::build::ModuleBuilder, instruction_builder: &fmm::build::InstructionBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { let payload_pointer = compile_payload_pointer(definition, types)?; let environment_pointer = if definition.is_thunk() { fmm::build::union_address(payload_pointer, 0)?.into() } else { payload_pointer }; expressions::compile( module_builder, instruction_builder, definition.body(), &variables .clone() .into_iter() .chain( definition .environment() .iter() .enumerate() .map(|(index, free_variable)| -> Result<_, CompileError> { let value = instruction_builder.load(fmm::build::record_address( environment_pointer.clone(), index, )?)?; reference_count::clone_expression( instruction_builder, &value, free_variable.type_(), types, )?; Ok((free_variable.name().into(), value)) }) .collect::<Result<Vec<_>, _>>()?, ) .chain(vec![( definition.name().into(), compile_closure_pointer(definition.type_(), types)?, )]) .chain(definition.arguments().iter().map(|argument| { ( argument.name().into(), fmm::build::variable(argument.name(), types::compile(argument.type_(), types)), ) })) .collect(), types, ) } fn compile_initial_thunk_entry( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, normal_entry_function: fmm::build::TypedExpression, lock_entry_function: fmm::build::TypedExpression, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { let entry_function_name = module_builder.generate_name(); let entry_function_type = types::compile_entry_function(definition, types); let arguments = compile_arguments(definition, types); module_builder.define_function( &entry_function_name, arguments.clone(), |instruction_builder| { let entry_function_pointer = compile_entry_function_pointer(definition, types)?; instruction_builder.if_( instruction_builder.compare_and_swap( entry_function_pointer.clone(), fmm::build::variable(&entry_function_name, entry_function_type.clone()), lock_entry_function.clone(), fmm::ir::AtomicOrdering::Acquire, fmm::ir::AtomicOrdering::Relaxed, ), |instruction_builder| -> Result<_, CompileError> { let value = compile_body( module_builder, &instruction_builder, definition, variables, types, )?; reference_count::clone_expression( &instruction_builder, &value, definition.result_type(), types, )?; instruction_builder.store( value.clone(), compile_thunk_value_pointer(definition, types)?, ); instruction_builder.store( closures::compile_normal_thunk_drop_function( module_builder, definition, types, )?, compile_drop_function_pointer(definition, types)?, ); instruction_builder.atomic_store( normal_entry_function.clone(), entry_function_pointer.clone(), fmm::ir::AtomicOrdering::Release, ); Ok(instruction_builder.return_(value)) }, |instruction_builder| { Ok(instruction_builder.return_( instruction_builder.call( instruction_builder.atomic_load( compile_entry_function_pointer(definition, types)?, fmm::ir::AtomicOrdering::Acquire, )?, arguments .iter() .map(|argument| { fmm::build::variable(argument.name(), argument.type_().clone()) }) .collect(), )?, )) }, )?; Ok(instruction_builder.unreachable()) }, types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, fmm::ir::Linkage::Internal, ) } fn compile_normal_thunk_entry( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { module_builder.define_anonymous_function( compile_arguments(definition, types), |instruction_builder| compile_normal_body(&instruction_builder, definition, types), types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, ) } fn compile_locked_thunk_entry( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { let entry_function_name = module_builder.generate_name(); module_builder.define_function( &entry_function_name, compile_arguments(definition, types), |instruction_builder| { instruction_builder.if_( fmm::build::comparison_operation( fmm::ir::ComparisonOperator::Equal, fmm::build::bit_cast( fmm::types::Primitive::PointerInteger, instruction_builder.atomic_load( compile_entry_function_pointer(definition, types)?, fmm::ir::AtomicOrdering::Acquire, )?, ), fmm::build::bit_cast( fmm::types::Primitive::PointerInteger, fmm::build::variable( &entry_function_name, types::compile_entry_function(definition, types), ), ), )?, |instruction_builder| Ok(instruction_builder.unreachable()), |instruction_builder| compile_normal_body(&instruction_builder, definition, types), )?; Ok(instruction_builder.unreachable()) }, types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, fmm::ir::Linkage::Internal, ) } fn compile_normal_body( instruction_builder: &fmm::build::InstructionBuilder, definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::ir::Block, CompileError> { let value = instruction_builder.load(compile_thunk_value_pointer(definition, types)?)?; reference_count::clone_expression( instruction_builder, &value, definition.result_type(), types, )?; Ok(instruction_builder.return_(value)) } fn compile_entry_function_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { Ok(fmm::build::bit_cast( fmm::types::Pointer::new(types::compile_entry_function(definition, types)), closures::compile_entry_function_pointer(compile_closure_pointer( definition.type_(), types, )?)?, ) .into()) } fn compile_drop_function_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { closures::compile_drop_function_pointer(compile_closure_pointer(definition.type_(), types)?) } fn compile_arguments( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Vec<fmm::ir::Argument> { vec![fmm::ir::Argument::new( CLOSURE_NAME, types::compile_untyped_closure_pointer(), )] .into_iter() .chain(definition.arguments().iter().map(|argument| { fmm::ir::Argument::new(argument.name(), types::compile(argument.type_(), types)) })) .collect() } fn compile_thunk_value_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { Ok(fmm::build::union_address(compile_payload_pointer(definition, types)?, 1)?.into()) } fn compile_payload_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { closures::compile_environment_pointer(fmm::build::bit_cast( fmm::types::Pointer::new(types::compile_sized_closure(definition, types)), compile_untyped_closure_pointer(), )) } fn compile_closure_pointer( function_type: &eir::types::Function, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, fmm::build::BuildError> { Ok(fmm::build::bit_cast( fmm::types::Pointer::new(types::compile_unsized_closure(function_type, types)), compile_untyped_closure_pointer(), ) .into()) } fn compile_untyped_closure_pointer() -> fmm::build::TypedExpression { fmm::build::variable(CLOSURE_NAME, types::compile_untyped_closure_pointer()) }
use super::error::CompileError; use crate::{closures, expressions, reference_count, types}; use std::collections::HashMap; const CLOSURE_NAME: &str = "_closure"; pub fn compile( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { Ok(if definition.is_thunk() { compile_thunk(module_builder, definition, variables, types)? } else { compile_non_thunk(module_builder, definition, variables, types)? }) } fn compile_non_thunk( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { module_builder.define_anonymous_function( compile_arguments(definition, types), |instruction_builder| { Ok(instruction_builder.return_(compile_body( module_builder, &instruction_builder, definition, variables, types, )?)) }, types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, ) } fn compile_thunk( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { compile_initial_thunk_entry( module_builder, definition, compile_normal_thunk_entry(module_builder, definition, types)?, compile_locked_thunk_entry(module_builder, definition, types)?, variables, types, ) } fn compile_body( module_builder: &fmm::build::ModuleBuilder, instruction_builder: &fmm::build::InstructionBuilder, definition: &eir::ir::Definition, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { let payload_pointer = compile_payload_pointer(definition, types)?; let environment_pointer = if definition.is_thunk() { fmm::build::union_address(payload_pointer, 0)?.into() } else { payload_pointer }; expressions::compile( module_builder, instruction_builder, definition.body(), &variables .clone() .into_iter() .chain( definition .environment() .iter() .enumerate() .map(|(index, free_variable)| -> Result<_, CompileError> { let value = instruction_builder.load(fmm::build::record_address( environment_pointer.clone(), index, )?)?; reference_count::clone_expression( instruction_builder, &value, free_variable.type_(), types, )?; Ok((free_variable.name().into(), value)) }) .collect::<Result<Vec<_>, _>>()?, ) .chain(vec![( definition.name().into(), compile_closure_pointer(definition.type_(), types)?, )]) .chain(definition.arguments().iter().map(|argument| { ( argument.name().into(), fmm::build::variable(argument.name(), types::compile(argument.type_(), types)), ) })) .collect(), types, ) } fn compile_initial_thunk_entry( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, normal_entry_function: fmm::build::TypedExpression, lock_entry_function: fmm::build::TypedExpression, variables: &HashMap<String, fmm::build::TypedExpression>, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { let entry_function_name = module_builder.generate_name(); let entry_function_type = types::compile_entry_function(definition, types); let arguments = compile_arguments(definition, types); module_builder.define_function( &entry_function_name, arguments.clone(), |instruction_builder| { let entry_function_pointer = compile_entry_function_pointer(definition, types)?; instruction_builder.if_( instruction_builder.compare_and_swap( entry_function_pointer.clone(), fmm::build::variable(&entry_function_name, entry_function_type.clone()), lock_entry_function.clone(), fmm::ir::AtomicOrdering::Acquire, fmm::ir::AtomicOrdering::Relaxed, ), |instruction_builder| -> Result<_, CompileError> { let value = compile_body( module_builder, &instruction_builder, definition, variables, types, )?; reference_count::clone_expression( &instruction_builder, &value, definition.result_type(), types, )?; instruction_builder.store( value.clone(), compile_thunk_value_pointer(definition, types)?, ); instruction_builder.store( closures::compile_normal_thunk_drop_function( module_builder, definition, types, )?, compile_drop_function_pointer(definition, types)?, ); instruction_builder.atomic_store( normal_entry_function.clone(), entry_function_pointer.clone(), fmm::ir::AtomicOrdering::Release, ); Ok(instruction_builder.return_(value)) }, |instruction_builder| {
}, )?; Ok(instruction_builder.unreachable()) }, types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, fmm::ir::Linkage::Internal, ) } fn compile_normal_thunk_entry( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { module_builder.define_anonymous_function( compile_arguments(definition, types), |instruction_builder| compile_normal_body(&instruction_builder, definition, types), types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, ) } fn compile_locked_thunk_entry( module_builder: &fmm::build::ModuleBuilder, definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { let entry_function_name = module_builder.generate_name(); module_builder.define_function( &entry_function_name, compile_arguments(definition, types), |instruction_builder| { instruction_builder.if_( fmm::build::comparison_operation( fmm::ir::ComparisonOperator::Equal, fmm::build::bit_cast( fmm::types::Primitive::PointerInteger, instruction_builder.atomic_load( compile_entry_function_pointer(definition, types)?, fmm::ir::AtomicOrdering::Acquire, )?, ), fmm::build::bit_cast( fmm::types::Primitive::PointerInteger, fmm::build::variable( &entry_function_name, types::compile_entry_function(definition, types), ), ), )?, |instruction_builder| Ok(instruction_builder.unreachable()), |instruction_builder| compile_normal_body(&instruction_builder, definition, types), )?; Ok(instruction_builder.unreachable()) }, types::compile(definition.result_type(), types), fmm::types::CallingConvention::Source, fmm::ir::Linkage::Internal, ) } fn compile_normal_body( instruction_builder: &fmm::build::InstructionBuilder, definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::ir::Block, CompileError> { let value = instruction_builder.load(compile_thunk_value_pointer(definition, types)?)?; reference_count::clone_expression( instruction_builder, &value, definition.result_type(), types, )?; Ok(instruction_builder.return_(value)) } fn compile_entry_function_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { Ok(fmm::build::bit_cast( fmm::types::Pointer::new(types::compile_entry_function(definition, types)), closures::compile_entry_function_pointer(compile_closure_pointer( definition.type_(), types, )?)?, ) .into()) } fn compile_drop_function_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { closures::compile_drop_function_pointer(compile_closure_pointer(definition.type_(), types)?) } fn compile_arguments( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Vec<fmm::ir::Argument> { vec![fmm::ir::Argument::new( CLOSURE_NAME, types::compile_untyped_closure_pointer(), )] .into_iter() .chain(definition.arguments().iter().map(|argument| { fmm::ir::Argument::new(argument.name(), types::compile(argument.type_(), types)) })) .collect() } fn compile_thunk_value_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { Ok(fmm::build::union_address(compile_payload_pointer(definition, types)?, 1)?.into()) } fn compile_payload_pointer( definition: &eir::ir::Definition, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, CompileError> { closures::compile_environment_pointer(fmm::build::bit_cast( fmm::types::Pointer::new(types::compile_sized_closure(definition, types)), compile_untyped_closure_pointer(), )) } fn compile_closure_pointer( function_type: &eir::types::Function, types: &HashMap<String, eir::types::RecordBody>, ) -> Result<fmm::build::TypedExpression, fmm::build::BuildError> { Ok(fmm::build::bit_cast( fmm::types::Pointer::new(types::compile_unsized_closure(function_type, types)), compile_untyped_closure_pointer(), ) .into()) } fn compile_untyped_closure_pointer() -> fmm::build::TypedExpression { fmm::build::variable(CLOSURE_NAME, types::compile_untyped_closure_pointer()) }
Ok(instruction_builder.return_( instruction_builder.call( instruction_builder.atomic_load( compile_entry_function_pointer(definition, types)?, fmm::ir::AtomicOrdering::Acquire, )?, arguments .iter() .map(|argument| { fmm::build::variable(argument.name(), argument.type_().clone()) }) .collect(), )?, ))
call_expression
[ { "content": "fn infer_in_let(let_: &Let, variables: &HashMap<String, Type>) -> Let {\n\n Let::new(\n\n let_.name(),\n\n let_.type_().clone(),\n\n infer_in_expression(let_.bound_expression(), variables),\n\n infer_in_expression(\n\n let_.expression(),\n\n &variables\n\n .clone()\n\n .drain()\n\n .chain(vec![(let_.name().into(), let_.type_().clone())])\n\n .collect(),\n\n ),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 0, "score": 195462.19586262584 }, { "content": "pub fn compile_environment(\n\n definition: &eir::ir::Definition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Record {\n\n fmm::types::Record::new(\n\n definition\n\n .environment()\n\n .iter()\n\n .map(|argument| compile(argument.type_(), types))\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 1, "score": 191125.02128062054 }, { "content": "pub fn compile_type_information_global_variable(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n module_builder.define_variable(\n\n types::compile_type_id(type_),\n\n fmm::build::record(vec![\n\n reference_count::compile_variant_clone_function(module_builder, type_, types)?,\n\n reference_count::compile_variant_drop_function(module_builder, type_, types)?,\n\n ]),\n\n false,\n\n fmm::ir::Linkage::Weak,\n\n None,\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "eir-fmm/src/type_information.rs", "rank": 2, "score": 182874.63874933188 }, { "content": "pub fn compile_tag(type_: &eir::types::Type) -> fmm::build::TypedExpression {\n\n fmm::build::variable(types::compile_type_id(type_), types::compile_variant_tag())\n\n}\n\n\n", "file_path": "eir-fmm/src/variants.rs", "rank": 3, "score": 180874.45061563363 }, { "content": "pub fn compile_unsized_environment() -> fmm::types::Record {\n\n fmm::types::Record::new(vec![])\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 4, "score": 176560.11239690648 }, { "content": "fn collect_from_definition(definition: &Definition) -> HashSet<Type> {\n\n collect_from_expression(definition.body())\n\n}\n\n\n", "file_path": "eir/src/analysis/variant_type_collection.rs", "rank": 5, "score": 170377.69278675577 }, { "content": "fn infer_in_expression(expression: &Expression, variables: &HashMap<String, Type>) -> Expression {\n\n match expression {\n\n Expression::ArithmeticOperation(operation) => {\n\n infer_in_arithmetic_operation(operation, variables).into()\n\n }\n\n Expression::Case(case) => infer_in_case(case, variables).into(),\n\n Expression::CloneVariables(clone) => infer_in_clone_variables(clone, variables).into(),\n\n Expression::ComparisonOperation(operation) => {\n\n infer_in_comparison_operation(operation, variables).into()\n\n }\n\n Expression::DropVariables(drop) => infer_in_drop_variables(drop, variables).into(),\n\n Expression::FunctionApplication(application) => {\n\n infer_in_function_application(application, variables).into()\n\n }\n\n Expression::If(if_) => infer_in_if(if_, variables).into(),\n\n Expression::Let(let_) => infer_in_let(let_, variables).into(),\n\n Expression::LetRecursive(let_) => infer_in_let_recursive(let_, variables).into(),\n\n Expression::Record(record) => infer_in_record(record, variables).into(),\n\n Expression::RecordElement(element) => infer_in_record_element(element, variables).into(),\n\n Expression::Variant(variant) => infer_in_variant(variant, variables).into(),\n\n Expression::Boolean(_)\n\n | Expression::ByteString(_)\n\n | Expression::Number(_)\n\n | Expression::Variable(_) => expression.clone(),\n\n }\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 6, "score": 170266.7003701168 }, { "content": "fn collect_from_expression(expression: &Expression) -> HashSet<Type> {\n\n match expression {\n\n Expression::ArithmeticOperation(operation) => collect_from_expression(operation.lhs())\n\n .drain()\n\n .chain(collect_from_expression(operation.rhs()))\n\n .collect(),\n\n Expression::Case(case) => collect_from_case(case),\n\n Expression::CloneVariables(clone) => collect_from_expression(clone.expression()),\n\n Expression::ComparisonOperation(operation) => collect_from_expression(operation.lhs())\n\n .drain()\n\n .chain(collect_from_expression(operation.rhs()))\n\n .collect(),\n\n Expression::DropVariables(drop) => collect_from_expression(drop.expression()),\n\n Expression::FunctionApplication(application) => {\n\n collect_from_expression(application.function())\n\n .drain()\n\n .chain(collect_from_expression(application.argument()))\n\n .collect()\n\n }\n\n Expression::If(if_) => collect_from_expression(if_.condition())\n", "file_path": "eir/src/analysis/variant_type_collection.rs", "rank": 7, "score": 169959.10424995777 }, { "content": "pub fn collect_variant_types(module: &Module) -> HashSet<Type> {\n\n module\n\n .definitions()\n\n .iter()\n\n .flat_map(|definition| collect_from_definition(definition))\n\n .collect()\n\n}\n\n\n", "file_path": "eir/src/analysis/variant_type_collection.rs", "rank": 8, "score": 168636.99926950317 }, { "content": "pub fn compile_definition(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n definition: &eir::ir::Definition,\n\n global_variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n module_builder.define_variable(\n\n definition.name(),\n\n fmm::build::record(vec![\n\n entry_functions::compile(module_builder, definition, global_variables, types)?,\n\n closures::compile_drop_function(module_builder, definition, types)?,\n\n expressions::compile_arity(definition.arguments().iter().count()).into(),\n\n fmm::ir::Undefined::new(types::compile_closure_payload(definition, types)).into(),\n\n ]),\n\n definition.is_thunk(),\n\n fmm::ir::Linkage::External,\n\n None,\n\n );\n\n\n\n Ok(())\n\n}\n", "file_path": "eir-fmm/src/definitions.rs", "rank": 9, "score": 168412.089488034 }, { "content": "pub fn compile_type_id(type_: &eir::types::Type) -> String {\n\n format!(\"{:?}\", type_)\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 10, "score": 165329.09442350746 }, { "content": "pub fn compile(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n expression: &eir::ir::Expression,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n let compile = |expression, variables| {\n\n compile(\n\n module_builder,\n\n instruction_builder,\n\n expression,\n\n variables,\n\n types,\n\n )\n\n };\n\n\n\n Ok(match expression {\n\n eir::ir::Expression::ArithmeticOperation(operation) => compile_arithmetic_operation(\n\n module_builder,\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 11, "score": 162613.49119346487 }, { "content": "fn infer_in_let_recursive(let_: &LetRecursive, variables: &HashMap<String, Type>) -> LetRecursive {\n\n LetRecursive::new(\n\n infer_in_local_definition(let_.definition(), variables),\n\n infer_in_expression(\n\n let_.expression(),\n\n &variables\n\n .clone()\n\n .drain()\n\n .chain(vec![(\n\n let_.definition().name().into(),\n\n let_.definition().type_().clone().into(),\n\n )])\n\n .collect(),\n\n ),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 12, "score": 162129.96306712355 }, { "content": "pub fn compile(\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Type {\n\n match type_ {\n\n eir::types::Type::Boolean => fmm::types::Primitive::Boolean.into(),\n\n eir::types::Type::Function(function) => {\n\n fmm::types::Pointer::new(compile_unsized_closure(function, types)).into()\n\n }\n\n eir::types::Type::Number => fmm::types::Primitive::Float64.into(),\n\n eir::types::Type::Record(record) => compile_record(record, types),\n\n eir::types::Type::ByteString => compile_string().into(),\n\n eir::types::Type::Variant => compile_variant().into(),\n\n }\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 13, "score": 161588.29039622642 }, { "content": "pub fn clone_expression(\n\n builder: &fmm::build::InstructionBuilder,\n\n expression: &fmm::build::TypedExpression,\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n match type_ {\n\n eir::types::Type::ByteString => pointers::clone_pointer(builder, expression)?,\n\n eir::types::Type::Function(_) => functions::clone_function(builder, expression)?,\n\n eir::types::Type::Record(record) => {\n\n builder.call(\n\n fmm::build::variable(\n\n record_utilities::get_record_clone_function_name(record.name()),\n\n record_utilities::compile_record_rc_function_type(record, types),\n\n ),\n\n vec![expression.clone()],\n\n )?;\n\n }\n\n eir::types::Type::Variant => {\n\n builder.call(\n", "file_path": "eir-fmm/src/reference_count/expressions.rs", "rank": 14, "score": 161504.9155281156 }, { "content": "pub fn compile_foreign_definition(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n definition: &eir::ir::ForeignDefinition,\n\n function_type: &eir::types::Function,\n\n global_variable: &fmm::build::TypedExpression,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n // TODO Support a target calling convention.\n\n // Blocked by https://github.com/raviqqe/fmm/issues/88\n\n let foreign_function_type =\n\n types::compile_foreign_function(function_type, eir::ir::CallingConvention::Source, types);\n\n let arguments = foreign_function_type\n\n .arguments()\n\n .iter()\n\n .enumerate()\n\n .map(|(index, type_)| fmm::ir::Argument::new(format!(\"arg_{}\", index), type_.clone()))\n\n .collect::<Vec<_>>();\n\n\n\n module_builder.define_function(\n\n definition.foreign_name(),\n", "file_path": "eir-fmm/src/foreign_definitions.rs", "rank": 15, "score": 161014.02215685014 }, { "content": "pub fn compile_record(\n\n record: &eir::types::Record,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Type {\n\n if is_record_boxed(record, types) {\n\n fmm::types::Pointer::new(fmm::types::Record::new(vec![])).into()\n\n } else {\n\n compile_unboxed_record(record, types).into()\n\n }\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 16, "score": 157408.91100787921 }, { "content": "pub fn compile_environment_pointer(\n\n closure_pointer: impl Into<fmm::build::TypedExpression>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(fmm::build::record_address(\n\n reference_count::compile_untagged_pointer(&closure_pointer.into())?,\n\n 3,\n\n )?\n\n .into())\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 17, "score": 154620.26882376193 }, { "content": "pub fn compile_entry_function(\n\n definition: &eir::ir::Definition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Function {\n\n compile_entry_function_from_arguments_and_result(\n\n definition\n\n .arguments()\n\n .iter()\n\n .map(|argument| argument.type_()),\n\n definition.result_type(),\n\n types,\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 18, "score": 153480.46744202074 }, { "content": "pub fn compile_foreign_function(\n\n function: &eir::types::Function,\n\n calling_convention: eir::ir::CallingConvention,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Function {\n\n fmm::types::Function::new(\n\n function\n\n .arguments()\n\n .into_iter()\n\n .map(|type_| compile(type_, types))\n\n .collect(),\n\n compile(function.last_result(), types),\n\n compile_calling_convention(calling_convention),\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 19, "score": 153480.46744202074 }, { "content": "pub fn compile_closure_payload(\n\n definition: &eir::ir::Definition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Type {\n\n if definition.is_thunk() {\n\n fmm::types::Union::new(vec![\n\n compile_environment(definition, types).into(),\n\n compile(definition.result_type(), types),\n\n ])\n\n .into()\n\n } else {\n\n compile_environment(definition, types).into()\n\n }\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 20, "score": 153480.46744202074 }, { "content": "pub fn compile_unsized_closure(\n\n function: &eir::types::Function,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Record {\n\n compile_raw_closure(\n\n compile_entry_function_from_arguments_and_result(\n\n function.arguments(),\n\n function.last_result(),\n\n types,\n\n ),\n\n compile_unsized_environment(),\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 21, "score": 153480.46744202074 }, { "content": "pub fn compile_unboxed_record(\n\n record: &eir::types::Record,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Record {\n\n fmm::types::Record::new(\n\n types[record.name()]\n\n .elements()\n\n .iter()\n\n .map(|type_| compile(type_, types))\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 22, "score": 153480.46744202074 }, { "content": "pub fn compile_sized_closure(\n\n definition: &eir::ir::Definition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Record {\n\n compile_raw_closure(\n\n compile_entry_function(definition, types),\n\n compile_closure_payload(definition, types),\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 23, "score": 153480.46744202074 }, { "content": "pub fn compile_raw_closure(\n\n entry_function: fmm::types::Function,\n\n environment: impl Into<fmm::types::Type>,\n\n) -> fmm::types::Record {\n\n fmm::types::Record::new(vec![\n\n entry_function.into(),\n\n compile_closure_drop_function().into(),\n\n compile_arity().into(),\n\n environment.into(),\n\n ])\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 24, "score": 153480.46744202074 }, { "content": "pub fn compile_curried_entry_function(\n\n function: &fmm::types::Function,\n\n arity: usize,\n\n) -> fmm::types::Function {\n\n if arity == get_arity(function) {\n\n function.clone()\n\n } else {\n\n fmm::types::Function::new(\n\n function.arguments()[..arity + FUNCTION_ARGUMENT_OFFSET].to_vec(),\n\n fmm::types::Pointer::new(compile_raw_closure(\n\n fmm::types::Function::new(\n\n function.arguments()[..FUNCTION_ARGUMENT_OFFSET]\n\n .iter()\n\n .chain(function.arguments()[arity + FUNCTION_ARGUMENT_OFFSET..].iter())\n\n .cloned()\n\n .collect::<Vec<_>>(),\n\n function.result().clone(),\n\n fmm::types::CallingConvention::Source,\n\n ),\n\n compile_unsized_environment(),\n\n )),\n\n fmm::types::CallingConvention::Source,\n\n )\n\n }\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 25, "score": 149781.01857648365 }, { "content": "pub fn compile_variant() -> fmm::types::Record {\n\n fmm::types::Record::new(vec![\n\n compile_variant_tag().into(),\n\n compile_variant_payload().into(),\n\n ])\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 26, "score": 149764.0034590665 }, { "content": "pub fn compile_string() -> fmm::types::Pointer {\n\n fmm::types::Pointer::new(fmm::types::Record::new(vec![\n\n fmm::types::Primitive::PointerInteger.into(),\n\n // The first byte of a string\n\n fmm::types::Primitive::Integer8.into(),\n\n ]))\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 27, "score": 149764.0034590665 }, { "content": "pub fn compile_arity() -> fmm::types::Primitive {\n\n fmm::types::Primitive::PointerInteger\n\n}\n", "file_path": "eir-fmm/src/types.rs", "rank": 28, "score": 149764.0034590665 }, { "content": "pub fn find_free_variables(expression: &Expression) -> HashSet<String> {\n\n find_in_expression(expression)\n\n}\n\n\n", "file_path": "eir/src/analysis/free_variables.rs", "rank": 29, "score": 148499.98363778467 }, { "content": "pub fn compile_variant_payload() -> fmm::types::Primitive {\n\n fmm::types::Primitive::Integer64\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 30, "score": 146558.21750083947 }, { "content": "pub fn compile_variant_tag() -> fmm::types::Pointer {\n\n fmm::types::Pointer::new(fmm::types::Record::new(vec![\n\n // clone function\n\n fmm::types::Function::new(\n\n vec![compile_variant_payload().into()],\n\n fmm::types::VOID_TYPE.clone(),\n\n fmm::types::CallingConvention::Target,\n\n )\n\n .into(),\n\n // drop function\n\n fmm::types::Function::new(\n\n vec![compile_variant_payload().into()],\n\n fmm::types::VOID_TYPE.clone(),\n\n fmm::types::CallingConvention::Target,\n\n )\n\n .into(),\n\n ]))\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 31, "score": 146558.21750083947 }, { "content": "pub fn get_record_clone_function_name(name: &str) -> String {\n\n format!(\"eir_clone_{}\", name)\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/record_utilities.rs", "rank": 32, "score": 144712.8536785387 }, { "content": "pub fn compile_variant_clone_function(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n module_builder.define_function(\n\n format!(\"variant_clone_{}\", types::compile_type_id(type_)),\n\n vec![fmm::ir::Argument::new(\n\n \"_payload\",\n\n types::compile_variant_payload(),\n\n )],\n\n |builder| -> Result<_, CompileError> {\n\n let payload = fmm::build::variable(\"_payload\", types::compile_variant_payload());\n\n\n\n expressions::clone_expression(\n\n &builder,\n\n &crate::variants::compile_unboxed_payload(&builder, &payload, type_, types)?,\n\n type_,\n\n types,\n\n )?;\n\n\n\n Ok(builder.return_(fmm::ir::VOID_VALUE.clone()))\n\n },\n\n fmm::types::VOID_TYPE.clone(),\n\n fmm::types::CallingConvention::Target,\n\n fmm::ir::Linkage::Weak,\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/variants.rs", "rank": 33, "score": 144261.36840434847 }, { "content": "pub fn compile_record_clone_function(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n definition: &eir::ir::TypeDefinition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n let record_type = eir::types::Record::new(definition.name());\n\n let fmm_record_type = types::compile_record(&record_type, types);\n\n\n\n module_builder.define_function(\n\n record_utilities::get_record_clone_function_name(definition.name()),\n\n vec![fmm::ir::Argument::new(\n\n ARGUMENT_NAME,\n\n fmm_record_type.clone(),\n\n )],\n\n |builder| -> Result<_, CompileError> {\n\n let record = fmm::build::variable(ARGUMENT_NAME, fmm_record_type.clone());\n\n\n\n if types::is_record_boxed(&record_type, types) {\n\n pointers::clone_pointer(&builder, &record)?;\n\n } else {\n", "file_path": "eir-fmm/src/reference_count/records.rs", "rank": 34, "score": 144261.36840434847 }, { "content": "// We can't type this strongly as F-- doesn't support recursive types.\n\npub fn compile_untyped_closure_pointer() -> fmm::types::Pointer {\n\n fmm::types::Pointer::new(fmm::types::Record::new(vec![]))\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 35, "score": 143515.240023828 }, { "content": "pub fn compile_closure_drop_function() -> fmm::types::Function {\n\n // The argument is a closure pointer.\n\n fmm::types::Function::new(\n\n vec![fmm::types::Primitive::PointerInteger.into()],\n\n fmm::types::VOID_TYPE.clone(),\n\n fmm::types::CallingConvention::Target,\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 36, "score": 143511.6062463074 }, { "content": "fn compile_let(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n let_: &eir::ir::Let,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n let compile = |expression, variables| {\n\n compile(\n\n module_builder,\n\n instruction_builder,\n\n expression,\n\n variables,\n\n types,\n\n )\n\n };\n\n\n\n compile(\n\n let_.expression(),\n\n &variables\n\n .clone()\n\n .drain()\n\n .chain(vec![(\n\n let_.name().into(),\n\n compile(let_.bound_expression(), variables)?,\n\n )])\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 37, "score": 141019.98960979062 }, { "content": "pub fn compile_record_rc_function_type(\n\n record: &eir::types::Record,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Function {\n\n fmm::types::Function::new(\n\n vec![types::compile_record(record, types)],\n\n fmm::types::VOID_TYPE.clone(),\n\n fmm::types::CallingConvention::Target,\n\n )\n\n}\n", "file_path": "eir-fmm/src/reference_count/record_utilities.rs", "rank": 38, "score": 136914.52855268953 }, { "content": "fn compile_let_recursive(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n let_: &eir::ir::LetRecursive,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n let closure_pointer = reference_count::allocate_heap(\n\n instruction_builder,\n\n types::compile_sized_closure(let_.definition(), types),\n\n )?;\n\n\n\n instruction_builder.store(\n\n closures::compile_closure_content(\n\n entry_functions::compile(module_builder, let_.definition(), variables, types)?,\n\n closures::compile_drop_function(module_builder, let_.definition(), types)?,\n\n let_.definition()\n\n .environment()\n\n .iter()\n\n .map(|free_variable| variables[free_variable.name()].clone())\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 39, "score": 136783.07656283362 }, { "content": "fn infer_in_if(if_: &If, variables: &HashMap<String, Type>) -> If {\n\n If::new(\n\n infer_in_expression(if_.condition(), variables),\n\n infer_in_expression(if_.then(), variables),\n\n infer_in_expression(if_.else_(), variables),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 40, "score": 135878.97815449018 }, { "content": "fn infer_in_clone_variables(\n\n clone: &CloneVariables,\n\n variables: &HashMap<String, Type>,\n\n) -> CloneVariables {\n\n CloneVariables::new(\n\n clone.variables().clone(),\n\n infer_in_expression(clone.expression(), variables),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 41, "score": 133686.29866664903 }, { "content": "pub fn compile_arity(arity: usize) -> fmm::ir::Primitive {\n\n fmm::ir::Primitive::PointerInteger(arity as i64)\n\n}\n\n\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 42, "score": 131733.0758022774 }, { "content": "pub fn drop_expression(\n\n builder: &fmm::build::InstructionBuilder,\n\n expression: &fmm::build::TypedExpression,\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n match type_ {\n\n eir::types::Type::ByteString => pointers::drop_pointer(builder, expression, |_| Ok(()))?,\n\n eir::types::Type::Function(_) => functions::drop_function(builder, expression)?,\n\n eir::types::Type::Record(record) => {\n\n builder.call(\n\n fmm::build::variable(\n\n record_utilities::get_record_drop_function_name(record.name()),\n\n record_utilities::compile_record_rc_function_type(record, types),\n\n ),\n\n vec![expression.clone()],\n\n )?;\n\n }\n\n eir::types::Type::Variant => {\n\n builder.call(\n", "file_path": "eir-fmm/src/reference_count/expressions.rs", "rank": 43, "score": 129518.89465599916 }, { "content": "fn compile_entry_function_from_arguments_and_result<'a>(\n\n arguments: impl IntoIterator<Item = &'a eir::types::Type>,\n\n result: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> fmm::types::Function {\n\n fmm::types::Function::new(\n\n vec![compile_untyped_closure_pointer().into()]\n\n .into_iter()\n\n .chain(arguments.into_iter().map(|type_| compile(type_, types)))\n\n .collect(),\n\n compile(result, types),\n\n fmm::types::CallingConvention::Source,\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 44, "score": 127737.99973687582 }, { "content": "fn collect_from_case(case: &Case) -> HashSet<Type> {\n\n case.alternatives()\n\n .iter()\n\n .flat_map(|alternative| {\n\n vec![alternative.type_().clone()]\n\n .into_iter()\n\n .chain(collect_from_expression(alternative.expression()))\n\n })\n\n .chain(\n\n case.default_alternative()\n\n .map(|alternative| collect_from_expression(alternative.expression()))\n\n .unwrap_or_default(),\n\n )\n\n .collect()\n\n}\n", "file_path": "eir/src/analysis/variant_type_collection.rs", "rank": 45, "score": 126135.0742434179 }, { "content": "pub fn compile_declaration(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n declaration: &eir::ir::Declaration,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) {\n\n module_builder.declare_variable(\n\n declaration.name(),\n\n types::compile_unsized_closure(declaration.type_(), types),\n\n );\n\n}\n", "file_path": "eir-fmm/src/declarations.rs", "rank": 46, "score": 124867.87878717846 }, { "content": "pub fn compile(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n closure_pointer: fmm::build::TypedExpression,\n\n arguments: &[fmm::build::TypedExpression],\n\n argument_types: &[&eir::types::Type],\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n compile_with_min_arity(\n\n module_builder,\n\n instruction_builder,\n\n closure_pointer,\n\n arguments,\n\n 1,\n\n argument_types,\n\n types,\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/function_applications.rs", "rank": 47, "score": 124867.87878717846 }, { "content": "// TODO Unbox small non-recursive records.\n\npub fn is_record_boxed(\n\n record: &eir::types::Record,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> bool {\n\n !types[record.name()].elements().is_empty()\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 49, "score": 124384.97893690573 }, { "content": "fn infer_in_record(record: &Record, variables: &HashMap<String, Type>) -> Record {\n\n Record::new(\n\n record.type_().clone(),\n\n record\n\n .elements()\n\n .iter()\n\n .map(|element| infer_in_expression(element, variables))\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 50, "score": 124210.51932989829 }, { "content": "fn infer_in_case(case: &Case, variables: &HashMap<String, Type>) -> Case {\n\n Case::new(\n\n infer_in_expression(case.argument(), variables),\n\n case.alternatives()\n\n .iter()\n\n .map(|alternative| infer_in_alternative(alternative, variables))\n\n .collect(),\n\n case.default_alternative()\n\n .map(|alternative| infer_in_default_alternative(alternative, variables)),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 51, "score": 124210.51932989829 }, { "content": "fn infer_in_variant(variant: &Variant, variables: &HashMap<String, Type>) -> Variant {\n\n Variant::new(\n\n variant.type_().clone(),\n\n infer_in_expression(variant.payload(), variables),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types;\n\n use pretty_assertions::assert_eq;\n\n\n\n #[test]\n\n fn infer_empty_environment() {\n\n assert_eq!(\n\n infer_in_local_definition(\n\n &Definition::new(\n\n \"f\",\n\n vec![Argument::new(\"x\", Type::Number)],\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 52, "score": 124210.51932989829 }, { "content": "pub fn compile_load_arity(\n\n builder: &fmm::build::InstructionBuilder,\n\n closure_pointer: impl Into<fmm::build::TypedExpression>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(builder.load(fmm::build::record_address(\n\n reference_count::compile_untagged_pointer(&closure_pointer.into())?,\n\n 2,\n\n )?)?)\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 53, "score": 121916.33991531617 }, { "content": "pub fn compile_unboxed_payload(\n\n builder: &fmm::build::InstructionBuilder,\n\n payload: &fmm::build::TypedExpression,\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(compile_union_bit_cast(\n\n builder,\n\n types::compile(type_, types),\n\n payload.clone(),\n\n )?)\n\n}\n\n\n", "file_path": "eir-fmm/src/variants.rs", "rank": 54, "score": 121916.33991531617 }, { "content": "pub fn compile_drop_function(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n definition: &eir::ir::Definition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n compile_drop_function_with_builder(\n\n module_builder,\n\n types,\n\n |builder, environment_pointer| -> Result<_, CompileError> {\n\n let environment = builder.load(fmm::build::bit_cast(\n\n fmm::types::Pointer::new(types::compile_environment(definition, types)),\n\n environment_pointer.clone(),\n\n ))?;\n\n\n\n for (index, free_variable) in definition.environment().iter().enumerate() {\n\n reference_count::drop_expression(\n\n builder,\n\n &builder.deconstruct_record(environment.clone(), index)?,\n\n free_variable.type_(),\n\n types,\n\n )?;\n\n }\n\n\n\n Ok(())\n\n },\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 55, "score": 121916.33991531617 }, { "content": "pub fn compile_boxed_payload(\n\n builder: &fmm::build::InstructionBuilder,\n\n payload: &fmm::build::TypedExpression,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(compile_union_bit_cast(\n\n builder,\n\n types::compile_variant_payload(),\n\n payload.clone(),\n\n )?)\n\n}\n\n\n", "file_path": "eir-fmm/src/variants.rs", "rank": 56, "score": 121916.33991531617 }, { "content": "pub fn compile_closure_content(\n\n entry_function: impl Into<fmm::build::TypedExpression>,\n\n drop_function: impl Into<fmm::build::TypedExpression>,\n\n free_variables: Vec<fmm::build::TypedExpression>,\n\n) -> fmm::build::TypedExpression {\n\n let entry_function = entry_function.into();\n\n\n\n fmm::build::record(vec![\n\n entry_function.clone(),\n\n drop_function.into(),\n\n expressions::compile_arity(types::get_arity(\n\n entry_function.type_().to_function().unwrap(),\n\n ))\n\n .into(),\n\n fmm::build::record(free_variables).into(),\n\n ])\n\n .into()\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 57, "score": 121916.33991531617 }, { "content": "pub fn clone_pointer(\n\n builder: &fmm::build::InstructionBuilder,\n\n expression: &fmm::build::TypedExpression,\n\n) -> Result<(), CompileError> {\n\n if_heap_pointer(builder, expression, |builder| {\n\n builder.atomic_operation(\n\n fmm::ir::AtomicOperator::Add,\n\n get_counter_pointer(expression)?,\n\n fmm::ir::Primitive::PointerInteger(1),\n\n fmm::ir::AtomicOrdering::Relaxed,\n\n )?;\n\n\n\n Ok(())\n\n })?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/pointers.rs", "rank": 58, "score": 120023.95247242044 }, { "content": "pub fn clone_function(\n\n builder: &fmm::build::InstructionBuilder,\n\n closure_pointer: &fmm::build::TypedExpression,\n\n) -> Result<(), CompileError> {\n\n pointers::clone_pointer(builder, closure_pointer)\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/functions.rs", "rank": 59, "score": 120023.95247242044 }, { "content": "pub fn compile_load_entry_function(\n\n builder: &fmm::build::InstructionBuilder,\n\n closure_pointer: impl Into<fmm::build::TypedExpression>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n // Entry functions of thunks need to be loaded atomically\n\n // to make thunk update thread-safe.\n\n Ok(builder.atomic_load(\n\n compile_entry_function_pointer(closure_pointer)?,\n\n fmm::ir::AtomicOrdering::Acquire,\n\n )?)\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 60, "score": 119136.8505455323 }, { "content": "pub fn compile_drop_function_pointer(\n\n closure_pointer: impl Into<fmm::build::TypedExpression>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(fmm::build::record_address(\n\n reference_count::compile_untagged_pointer(&closure_pointer.into())?,\n\n 1,\n\n )?\n\n .into())\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 61, "score": 119136.8505455323 }, { "content": "pub fn compile_foreign_declaration(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n declaration: &eir::ir::ForeignDeclaration,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), fmm::build::BuildError> {\n\n module_builder.define_variable(\n\n declaration.name(),\n\n fmm::build::record(vec![\n\n compile_entry_function(module_builder, declaration, types)?,\n\n fmm::ir::Undefined::new(types::compile_closure_drop_function()).into(),\n\n expressions::compile_arity(declaration.type_().arguments().into_iter().count()).into(),\n\n fmm::ir::Undefined::new(types::compile_unsized_environment()).into(),\n\n ]),\n\n false,\n\n fmm::ir::Linkage::Internal,\n\n None,\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "eir-fmm/src/foreign_declarations.rs", "rank": 62, "score": 119136.85054553229 }, { "content": "pub fn compile_load_drop_function(\n\n builder: &fmm::build::InstructionBuilder,\n\n closure_pointer: impl Into<fmm::build::TypedExpression>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(builder.load(compile_drop_function_pointer(closure_pointer)?)?)\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 63, "score": 119136.8505455323 }, { "content": "pub fn compile_entry_function_pointer(\n\n closure_pointer: impl Into<fmm::build::TypedExpression>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(fmm::build::record_address(\n\n reference_count::compile_untagged_pointer(&closure_pointer.into())?,\n\n 0,\n\n )?\n\n .into())\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 64, "score": 119136.8505455323 }, { "content": "pub fn infer_environment(module: &Module) -> Module {\n\n Module::new(\n\n module.type_definitions().to_vec(),\n\n module.foreign_declarations().to_vec(),\n\n module.foreign_definitions().to_vec(),\n\n module.declarations().to_vec(),\n\n module\n\n .definitions()\n\n .iter()\n\n .map(|definition| infer_in_global_definition(definition))\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 65, "score": 118572.18351231932 }, { "content": "pub fn get_record_drop_function_name(name: &str) -> String {\n\n format!(\"eir_drop_{}\", name)\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/record_utilities.rs", "rank": 66, "score": 118146.83467373416 }, { "content": "pub fn get_arity(type_: &fmm::types::Function) -> usize {\n\n type_.arguments().len() - FUNCTION_ARGUMENT_OFFSET\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 67, "score": 117201.10890648363 }, { "content": "pub fn compile_normal_thunk_drop_function(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n definition: &eir::ir::Definition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n compile_drop_function_with_builder(\n\n module_builder,\n\n types,\n\n |builder, environment_pointer| -> Result<_, CompileError> {\n\n reference_count::drop_expression(\n\n builder,\n\n &builder.load(fmm::build::union_address(\n\n fmm::build::bit_cast(\n\n fmm::types::Pointer::new(types::compile_closure_payload(definition, types)),\n\n environment_pointer.clone(),\n\n ),\n\n 1,\n\n )?)?,\n\n definition.result_type(),\n\n types,\n\n )?;\n\n\n\n Ok(())\n\n },\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/closures.rs", "rank": 68, "score": 116514.79319083385 }, { "content": "pub fn compile_untagged_pointer(\n\n pointer: &fmm::build::TypedExpression,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(fmm::build::bit_cast(\n\n pointer.type_().clone(),\n\n fmm::build::bitwise_operation(\n\n fmm::ir::BitwiseOperator::And,\n\n fmm::build::bit_cast(fmm::types::Primitive::PointerInteger, pointer.clone()),\n\n fmm::build::bitwise_not_operation(fmm::ir::Primitive::PointerInteger(1))?,\n\n )?,\n\n )\n\n .into())\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/pointers.rs", "rank": 69, "score": 116514.79319083385 }, { "content": "pub fn compile_tagged_pointer(\n\n pointer: &fmm::build::TypedExpression,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n Ok(fmm::build::bit_cast(\n\n pointer.type_().clone(),\n\n fmm::build::bitwise_operation(\n\n fmm::ir::BitwiseOperator::Or,\n\n fmm::build::bit_cast(fmm::types::Primitive::PointerInteger, pointer.clone()),\n\n fmm::ir::Primitive::PointerInteger(1),\n\n )?,\n\n )\n\n .into())\n\n}\n\n\n", "file_path": "eir-fmm/src/reference_count/pointers.rs", "rank": 70, "score": 116514.79319083384 }, { "content": "pub fn compile(module: &eir::ir::Module) -> Result<fmm::ir::Module, CompileError> {\n\n eir::analysis::check_types(module)?;\n\n\n\n let module = eir::analysis::infer_environment(module);\n\n let module = eir::analysis::count_references(&module)?;\n\n\n\n eir::analysis::check_types(&module)?;\n\n\n\n let module_builder = fmm::build::ModuleBuilder::new();\n\n let types = module\n\n .type_definitions()\n\n .iter()\n\n .map(|definition| (definition.name().into(), definition.type_().clone()))\n\n .collect();\n\n\n\n for type_ in &eir::analysis::collect_variant_types(&module) {\n\n compile_type_information_global_variable(&module_builder, type_, &types)?;\n\n }\n\n\n\n for definition in module.type_definitions() {\n", "file_path": "eir-fmm/src/lib.rs", "rank": 71, "score": 114609.87584825646 }, { "content": "pub fn compile_record_drop_function(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n definition: &eir::ir::TypeDefinition,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<(), CompileError> {\n\n let record_type = eir::types::Record::new(definition.name());\n\n let fmm_record_type = types::compile_record(&record_type, types);\n\n\n\n module_builder.define_function(\n\n record_utilities::get_record_drop_function_name(definition.name()),\n\n vec![fmm::ir::Argument::new(\n\n ARGUMENT_NAME,\n\n fmm_record_type.clone(),\n\n )],\n\n |builder| -> Result<_, CompileError> {\n\n let record = fmm::build::variable(ARGUMENT_NAME, fmm_record_type.clone());\n\n\n\n if types::is_record_boxed(&record_type, types) {\n\n pointers::drop_pointer(&builder, &record, |builder| {\n\n drop_record_elements(\n", "file_path": "eir-fmm/src/reference_count/records.rs", "rank": 72, "score": 114037.16064769521 }, { "content": "pub fn compile_drop_function_for_partially_applied_closure(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n closure_pointer_type: &fmm::types::Type,\n\n argument_types: &[(&fmm::types::Type, &eir::types::Type)],\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n compile_drop_function_with_builder(\n\n module_builder,\n\n types,\n\n |builder, environment_pointer| -> Result<_, CompileError> {\n\n let environment = builder.load(fmm::build::bit_cast(\n\n fmm::types::Pointer::new(fmm::types::Record::new(\n\n vec![closure_pointer_type.clone()]\n\n .into_iter()\n\n .chain(\n\n argument_types\n\n .iter()\n\n .map(|(fmm_type, _)| fmm_type)\n\n .cloned()\n\n .cloned(),\n", "file_path": "eir-fmm/src/closures.rs", "rank": 73, "score": 114037.16064769523 }, { "content": "pub fn compile_variant_drop_function(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n type_: &eir::types::Type,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n module_builder.define_function(\n\n format!(\"variant_drop_{}\", types::compile_type_id(type_)),\n\n vec![fmm::ir::Argument::new(\n\n \"_payload\",\n\n types::compile_variant_payload(),\n\n )],\n\n |builder| -> Result<_, CompileError> {\n\n let payload = fmm::build::variable(\"_payload\", types::compile_variant_payload());\n\n\n\n expressions::drop_expression(\n\n &builder,\n\n &crate::variants::compile_unboxed_payload(&builder, &payload, type_, types)?,\n\n type_,\n\n types,\n\n )?;\n\n\n\n Ok(builder.return_(fmm::ir::VOID_VALUE.clone()))\n\n },\n\n fmm::types::VOID_TYPE.clone(),\n\n fmm::types::CallingConvention::Target,\n\n fmm::ir::Linkage::Weak,\n\n )\n\n}\n", "file_path": "eir-fmm/src/reference_count/variants.rs", "rank": 74, "score": 114037.16064769523 }, { "content": "pub fn check_types(module: &Module) -> Result<(), TypeCheckError> {\n\n names::check_names(module)?;\n\n\n\n let types = module\n\n .type_definitions()\n\n .iter()\n\n .map(|definition| (definition.name(), definition.type_()))\n\n .collect();\n\n let mut variables = HashMap::<&str, Type>::new();\n\n\n\n for declaration in module.foreign_declarations() {\n\n variables.insert(declaration.name(), declaration.type_().clone().into());\n\n }\n\n\n\n for declaration in module.declarations() {\n\n variables.insert(declaration.name(), declaration.type_().clone().into());\n\n }\n\n\n\n for definition in module.definitions() {\n\n variables.insert(definition.name(), definition.type_().clone().into());\n", "file_path": "eir/src/analysis/type_check/mod.rs", "rank": 76, "score": 112571.67388182142 }, { "content": "fn check_types(definitions: &[TypeDefinition]) -> Result<(), TypeCheckError> {\n\n let mut names = HashSet::new();\n\n\n\n for definition in definitions {\n\n if names.contains(definition.name()) {\n\n return Err(TypeCheckError::DuplicateTypeNames(definition.name().into()));\n\n }\n\n\n\n names.insert(definition.name());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "eir/src/analysis/type_check/names.rs", "rank": 77, "score": 110812.88935738373 }, { "content": "fn infer_in_global_definition(definition: &Definition) -> Definition {\n\n Definition::with_options(\n\n definition.name(),\n\n vec![],\n\n definition.arguments().to_vec(),\n\n infer_in_expression(\n\n definition.body(),\n\n &definition\n\n .arguments()\n\n .iter()\n\n .map(|argument| (argument.name().into(), argument.type_().clone()))\n\n .collect(),\n\n ),\n\n definition.result_type().clone(),\n\n definition.is_thunk(),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 78, "score": 110195.3129493046 }, { "content": "fn compile_if(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n if_: &eir::ir::If,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n let compile = |instruction_builder: &fmm::build::InstructionBuilder, expression| {\n\n compile(\n\n module_builder,\n\n instruction_builder,\n\n expression,\n\n variables,\n\n types,\n\n )\n\n };\n\n\n\n instruction_builder.if_(\n\n compile(instruction_builder, if_.condition())?,\n\n |instruction_builder| {\n\n Ok(instruction_builder.branch(compile(&instruction_builder, if_.then())?))\n\n },\n\n |instruction_builder| {\n\n Ok(instruction_builder.branch(compile(&instruction_builder, if_.else_())?))\n\n },\n\n )\n\n}\n\n\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 79, "score": 109290.19542158663 }, { "content": "pub fn check_names(module: &Module) -> Result<(), TypeCheckError> {\n\n check_types(module.type_definitions())?;\n\n check_functions(module)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "eir/src/analysis/type_check/names.rs", "rank": 80, "score": 107267.83592336817 }, { "content": "fn compile_case(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n case: &eir::ir::Case,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::build::TypedExpression, CompileError> {\n\n let argument = compile(\n\n module_builder,\n\n instruction_builder,\n\n case.argument(),\n\n variables,\n\n types,\n\n )?;\n\n\n\n Ok(compile_alternatives(\n\n module_builder,\n\n instruction_builder,\n\n argument,\n\n case.alternatives(),\n\n case.default_alternative(),\n\n variables,\n\n types,\n\n )?\n\n .unwrap())\n\n}\n\n\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 81, "score": 105931.35897495573 }, { "content": "fn compile_alternatives(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n argument: fmm::build::TypedExpression,\n\n alternatives: &[eir::ir::Alternative],\n\n default_alternative: Option<&eir::ir::DefaultAlternative>,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<Option<fmm::build::TypedExpression>, CompileError> {\n\n Ok(match alternatives {\n\n [] => default_alternative\n\n .map(|alternative| {\n\n compile(\n\n module_builder,\n\n instruction_builder,\n\n alternative.expression(),\n\n &variables\n\n .clone()\n\n .into_iter()\n\n .chain(vec![(alternative.name().into(), argument)])\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 82, "score": 105931.35897495573 }, { "content": "fn find_in_definition(definition: &Definition) -> HashSet<String> {\n\n find_in_expression(definition.body())\n\n .into_iter()\n\n .filter(|variable| {\n\n variable != definition.name()\n\n && definition\n\n .arguments()\n\n .iter()\n\n .all(|argument| variable != argument.name())\n\n })\n\n .collect()\n\n}\n", "file_path": "eir/src/analysis/free_variables.rs", "rank": 83, "score": 104737.73823521685 }, { "content": "fn find_in_expression(expression: &Expression) -> HashSet<String> {\n\n match expression {\n\n Expression::ArithmeticOperation(operation) => find_in_expression(operation.lhs())\n\n .into_iter()\n\n .chain(find_in_expression(operation.rhs()))\n\n .collect(),\n\n Expression::Case(case) => find_in_case(case),\n\n Expression::CloneVariables(clone) => find_in_expression(clone.expression()),\n\n Expression::ComparisonOperation(operation) => find_in_expression(operation.lhs())\n\n .into_iter()\n\n .chain(find_in_expression(operation.rhs()))\n\n .collect(),\n\n Expression::DropVariables(drop) => find_in_expression(drop.expression()),\n\n Expression::FunctionApplication(application) => find_in_expression(application.function())\n\n .into_iter()\n\n .chain(find_in_expression(application.argument()))\n\n .collect(),\n\n Expression::If(if_) => find_in_expression(if_.condition())\n\n .into_iter()\n\n .chain(find_in_expression(if_.then()))\n", "file_path": "eir/src/analysis/free_variables.rs", "rank": 84, "score": 104313.42911992349 }, { "content": "fn compile_global_variables(\n\n module: &eir::ir::Module,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<HashMap<String, fmm::build::TypedExpression>, CompileError> {\n\n module\n\n .foreign_declarations()\n\n .iter()\n\n .map(|declaration| {\n\n (\n\n declaration.name().into(),\n\n fmm::build::variable(\n\n declaration.name(),\n\n fmm::types::Pointer::new(types::compile_unsized_closure(\n\n declaration.type_(),\n\n types,\n\n )),\n\n ),\n\n )\n\n })\n\n .chain(module.declarations().iter().map(|declaration| {\n", "file_path": "eir-fmm/src/lib.rs", "rank": 86, "score": 103038.11162805284 }, { "content": "fn compile_comparison_operation(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n operation: &eir::ir::ComparisonOperation,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::ir::ComparisonOperation, CompileError> {\n\n let compile = |expression| {\n\n compile(\n\n module_builder,\n\n instruction_builder,\n\n expression,\n\n variables,\n\n types,\n\n )\n\n };\n\n\n\n let lhs = compile(operation.lhs())?;\n\n let rhs = compile(operation.rhs())?;\n\n\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 87, "score": 102780.43342897012 }, { "content": "fn compile_arithmetic_operation(\n\n module_builder: &fmm::build::ModuleBuilder,\n\n instruction_builder: &fmm::build::InstructionBuilder,\n\n operation: &eir::ir::ArithmeticOperation,\n\n variables: &HashMap<String, fmm::build::TypedExpression>,\n\n types: &HashMap<String, eir::types::RecordBody>,\n\n) -> Result<fmm::ir::ArithmeticOperation, CompileError> {\n\n let compile = |expression| {\n\n compile(\n\n module_builder,\n\n instruction_builder,\n\n expression,\n\n variables,\n\n types,\n\n )\n\n };\n\n\n\n let lhs = compile(operation.lhs())?;\n\n let rhs = compile(operation.rhs())?;\n\n\n", "file_path": "eir-fmm/src/expressions.rs", "rank": 88, "score": 102780.43342897012 }, { "content": "fn compile_calling_convention(\n\n calling_convention: eir::ir::CallingConvention,\n\n) -> fmm::types::CallingConvention {\n\n match calling_convention {\n\n eir::ir::CallingConvention::Source => fmm::types::CallingConvention::Source,\n\n eir::ir::CallingConvention::Target => fmm::types::CallingConvention::Target,\n\n }\n\n}\n\n\n", "file_path": "eir-fmm/src/types.rs", "rank": 89, "score": 101786.96243228657 }, { "content": "fn should_clone_variable(\n\n variable: &str,\n\n owned_variables: &HashMap<String, Type>,\n\n moved_variables: &HashSet<String>,\n\n) -> bool {\n\n owned_variables.contains_key(variable) && moved_variables.contains(variable)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types::{self, Type};\n\n\n\n #[test]\n\n fn convert_record() {\n\n assert_eq!(\n\n convert_expression(\n\n &Record::new(\n\n types::Record::new(\"a\"),\n\n vec![Variable::new(\"x\").into(), Variable::new(\"x\").into()]\n", "file_path": "eir/src/analysis/reference_count/mod.rs", "rank": 90, "score": 100982.3697582033 }, { "content": "fn clone_variables(\n\n expression: impl Into<Expression>,\n\n cloned_variables: HashSet<String>,\n\n owned_variables: &HashMap<String, Type>,\n\n) -> Expression {\n\n let expression = expression.into();\n\n\n\n if cloned_variables.is_empty() {\n\n expression\n\n } else {\n\n CloneVariables::new(\n\n owned_variables\n\n .clone()\n\n .into_iter()\n\n .filter(|(variable, _)| cloned_variables.contains(variable.as_str()))\n\n .collect(),\n\n expression,\n\n )\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "eir/src/analysis/reference_count/mod.rs", "rank": 91, "score": 100982.3697582033 }, { "content": "fn infer_in_local_definition(\n\n definition: &Definition,\n\n variables: &HashMap<String, Type>,\n\n) -> Definition {\n\n Definition::with_options(\n\n definition.name(),\n\n find_free_variables(definition.body())\n\n .iter()\n\n .filter_map(|name| {\n\n variables\n\n .get(name)\n\n .map(|type_| Argument::new(name, type_.clone()))\n\n })\n\n .collect(),\n\n definition.arguments().to_vec(),\n\n infer_in_expression(\n\n definition.body(),\n\n &variables\n\n .clone()\n\n .drain()\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 92, "score": 100800.7839053528 }, { "content": "fn infer_in_drop_variables(\n\n drop: &DropVariables,\n\n variables: &HashMap<String, Type>,\n\n) -> DropVariables {\n\n DropVariables::new(\n\n drop.variables().clone(),\n\n infer_in_expression(drop.expression(), variables),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/environment_inference.rs", "rank": 93, "score": 100740.03493191215 }, { "content": "fn check_definition(\n\n definition: &Definition,\n\n variables: &HashMap<&str, Type>,\n\n types: &HashMap<&str, &types::RecordBody>,\n\n) -> Result<(), TypeCheckError> {\n\n let mut variables = variables.clone();\n\n\n\n for argument in definition\n\n .environment()\n\n .iter()\n\n .chain(definition.arguments())\n\n {\n\n variables.insert(argument.name(), argument.type_().clone());\n\n }\n\n\n\n check_equality(\n\n &check_expression(definition.body(), &variables, types)?,\n\n &definition.result_type().clone(),\n\n )\n\n}\n\n\n", "file_path": "eir/src/analysis/type_check/mod.rs", "rank": 94, "score": 99660.98252361159 }, { "content": "fn check_variable(\n\n variable: &Variable,\n\n variables: &HashMap<&str, Type>,\n\n) -> Result<Type, TypeCheckError> {\n\n variables\n\n .get(variable.name())\n\n .cloned()\n\n .ok_or_else(|| TypeCheckError::VariableNotFound(variable.clone()))\n\n}\n\n\n", "file_path": "eir/src/analysis/type_check/mod.rs", "rank": 95, "score": 99600.23355017093 }, { "content": "fn check_expression(\n\n expression: &Expression,\n\n variables: &HashMap<&str, Type>,\n\n types: &HashMap<&str, &types::RecordBody>,\n\n) -> Result<Type, TypeCheckError> {\n\n let check_expression = |expression, variables| check_expression(expression, variables, types);\n\n\n\n Ok(match expression {\n\n Expression::ArithmeticOperation(operation) => {\n\n check_equality(\n\n &check_expression(operation.lhs(), variables)?,\n\n &Type::Number,\n\n )?;\n\n check_equality(\n\n &check_expression(operation.rhs(), variables)?,\n\n &Type::Number,\n\n )?;\n\n\n\n Type::Number\n\n }\n", "file_path": "eir/src/analysis/type_check/mod.rs", "rank": 96, "score": 99350.29103201308 }, { "content": "fn check_equality(one: &Type, other: &Type) -> Result<(), TypeCheckError> {\n\n if one == other {\n\n Ok(())\n\n } else {\n\n Err(TypeCheckError::TypesNotMatched(one.clone(), other.clone()))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{check_types, error::*};\n\n use crate::{\n\n ir::*,\n\n types::{self, Type},\n\n };\n\n\n\n fn create_module_from_definitions(definitions: Vec<Definition>) -> Module {\n\n Module::new(vec![], vec![], vec![], vec![], definitions)\n\n }\n\n\n", "file_path": "eir/src/analysis/type_check/mod.rs", "rank": 98, "score": 98135.1523713566 } ]
Rust
src/kgen/src/ast/functions/mod.rs
capra314cabra/kaprino
7b4ce32631194955e9a9cd4e206edadbc5110da4
use inkwell::types::BasicTypeEnum; use inkwell::types::FunctionType; use inkwell::values::FunctionValue; use crate::ast::CodeGen; use crate::ast::functions::expr_function::ExprFunction; use crate::ast::functions::external_function::ExternalFunction; use crate::ast::functions::statement_function::StatementFunction; use crate::error::error_token::{ ErrorToken, FilePosition }; use crate::resolvers::parameter_resolver::KParameter; #[derive(Debug,PartialEq)] pub enum FunctionObject { ExprFunction(Box<ExprFunction>), ExternalFunction(Box<ExternalFunction>), StatementFunction(Box<StatementFunction>) } impl<'ctx> FunctionObject { pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> { match self { FunctionObject::ExprFunction(obj) => obj.codegen(gen), FunctionObject::ExternalFunction(obj) => obj.codegen(gen), FunctionObject::StatementFunction(obj) => obj.codegen(gen) } } } #[derive(Debug,PartialEq)] pub struct FunctionInfo { pub name: String, pub args: Vec<String>, pub types: Vec<String>, pub ret_type: String } impl FunctionInfo { pub fn new(name: String, args: Vec<String>, types: Vec<String>, ret_type: String) -> Self { FunctionInfo{ name, args, types, ret_type } } } pub trait FunctionObjectTrait { fn get_info(&self) -> &FunctionInfo; fn get_arg_types<'ctx>(&self, gen: &CodeGen<'ctx>, pos: FilePosition) -> Result<Vec<BasicTypeEnum<'ctx>>, ErrorToken> { let mut vec: Vec<BasicTypeEnum<'ctx>> = Vec::new(); let mut error_message: Option<String> = None; let type_resolver = gen.type_resolver.borrow(); for arg_type_name in self.get_info().types.iter() { let arg_type = type_resolver.find(arg_type_name); match arg_type { Some(arg_type) => { let arg_type = arg_type.get_type(gen); vec.push(arg_type); }, None => { error_message = Some(format!( "Unknown types {0} were used in declaration of arguments of the function named {1}.", arg_type_name, self.get_info().name )); break; } }; }; match error_message { Some(error_message) => { Err(ErrorToken::error(pos, error_message)) }, None => { Ok(vec) } } } fn get_ret_type<'ctx>(&self, gen: &CodeGen<'ctx>, pos: FilePosition) -> Result<BasicTypeEnum<'ctx>, ErrorToken> { let type_resolver = gen.type_resolver.borrow(); let ret_type = type_resolver .find(&self.get_info().ret_type) .ok_or(ErrorToken::error( pos, format!( "Unknown types were used in declaration of return value of the function named {}.", self.get_info().name ) ))?; Ok(ret_type.get_type(gen)) } fn get_func_type<'ctx>(&self, gen: &CodeGen<'ctx>, pos: FilePosition) -> Result<FunctionType<'ctx>, ErrorToken> { let arg_types = self.get_arg_types(gen, pos.clone())?; let ret_type = self.get_ret_type(gen, pos)?; let ret_type = match ret_type { BasicTypeEnum::ArrayType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::FloatType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::IntType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::PointerType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::StructType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::VectorType(val) => val.fn_type(arg_types.as_slice(), false) }; Ok(ret_type) } fn assign_args<'ctx>(&self, gen: &CodeGen<'ctx>, func: &FunctionValue<'ctx>) { let mut param_resolver = gen.param_resolver.borrow_mut(); param_resolver.add_scope(&self.get_info().name); let params = func.get_params(); for (idx, param_name) in self.get_info().args.iter().enumerate() { let allocated = gen.builder.build_alloca(params[idx].get_type(), ""); gen.builder.build_store(allocated, params[idx]); let param = KParameter::new( self.get_info().args[idx].clone(), allocated.into() ); param_resolver.add(param_name, param); }; } } pub mod expr_function; pub mod external_function; pub mod statement_function;
use inkwell::types::BasicTypeEnum; use inkwell::types::FunctionType; use inkwell::values::FunctionValue; use crate::ast::CodeGen; use crate::ast::functions::expr_function::ExprFunction; use crate::ast::functions::external_function::ExternalFunction; use crate::ast::functions::statement_function::StatementFunction; use crate::error::error_token::{ ErrorToken, FilePosition }; use crate::resolvers::parameter_resolver::KParameter; #[derive(Debug,PartialEq)] pub enum FunctionObject { ExprFunction(Box<ExprFunction>), ExternalFunction(Box<ExternalFunction>), StatementFunction(Box<StatementFunction>) } impl<'ctx> FunctionObject { pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> { match self { FunctionObject::ExprFunction(obj) => obj.codegen(gen), FunctionObject::ExternalFunction(obj) => obj.codegen(gen), FunctionObject::StatementFunction(obj) => obj.codegen(gen) } } } #[derive(Debug,PartialEq)] pub struct FunctionInfo { pub name: String, pub args: Vec<String>, pub types: Vec<String>, pub ret_type: String } impl FunctionInfo { pub fn new(name: String, args: Vec<String>, types: Vec<String>, ret_type: String) -> Self { FunctionInfo{ name, args, types, ret_type } } } pub trait FunctionObjectTrait { fn get_info(&self) -> &FunctionInfo; fn get_arg_types<'ctx>(&self, gen: &CodeGen<'ctx>, pos: FilePosition) -> Result<Vec<BasicTypeEnum<'ctx>>, ErrorToken> { let mut vec: Vec<BasicTypeEnum<'ctx>> = Vec::new(); let mut error_message: Option<String> = None; let type_resolver = gen.type_resolver.borrow(); for arg_type_name in self.get_info().types.iter() { let arg_type = type_resolver.find(arg_type_name); match arg_type { Some(arg_type) => { let arg_type = arg_type.get_type(gen); vec.push(arg_type); }, None => { error_message = Some(format!( "Unknown types {0} were used in declaration of arguments of the function named {1}.", arg_type_name, self.get_info().name )); break; } }; }; match error_message { Some(error_message) => { Err(ErrorToken::error(pos, error_message)) }, None => { Ok(vec) } } } fn get_ret_type<'ctx>(&self, gen: &CodeGen<'ctx>, pos: FilePosition) -> Result<BasicTypeEnum<'ctx>, ErrorToken> { let type_resolver = gen.type_resolver.borrow(); let ret_type = type_resolver .find(&self.get_info().ret_type) .ok_or(ErrorToken::error( pos, format!( "Unknown types w
fn get_func_type<'ctx>(&self, gen: &CodeGen<'ctx>, pos: FilePosition) -> Result<FunctionType<'ctx>, ErrorToken> { let arg_types = self.get_arg_types(gen, pos.clone())?; let ret_type = self.get_ret_type(gen, pos)?; let ret_type = match ret_type { BasicTypeEnum::ArrayType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::FloatType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::IntType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::PointerType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::StructType(val) => val.fn_type(arg_types.as_slice(), false), BasicTypeEnum::VectorType(val) => val.fn_type(arg_types.as_slice(), false) }; Ok(ret_type) } fn assign_args<'ctx>(&self, gen: &CodeGen<'ctx>, func: &FunctionValue<'ctx>) { let mut param_resolver = gen.param_resolver.borrow_mut(); param_resolver.add_scope(&self.get_info().name); let params = func.get_params(); for (idx, param_name) in self.get_info().args.iter().enumerate() { let allocated = gen.builder.build_alloca(params[idx].get_type(), ""); gen.builder.build_store(allocated, params[idx]); let param = KParameter::new( self.get_info().args[idx].clone(), allocated.into() ); param_resolver.add(param_name, param); }; } } pub mod expr_function; pub mod external_function; pub mod statement_function;
ere used in declaration of return value of the function named {}.", self.get_info().name ) ))?; Ok(ret_type.get_type(gen)) }
function_block-function_prefixed
[ { "content": "///\n\n/// Execute a function Just In Time.\n\n///\n\npub fn execute_function(text: &str, func_name: &str, arg: u32) -> Result<u32, ()> {\n\n type TestFunc = unsafe extern \"C\" fn(u32) -> u32;\n\n\n\n let context = &Context::create();\n\n let gen = CodeGen::new(context, \"test\");\n\n\n\n let text = Span::new(text);\n\n let (_, func) = function_parser(text).or(Err(()))?;\n\n\n\n func.codegen(&gen).or(Err(()))?;\n\n\n\n let execution_engine = gen.module.create_jit_execution_engine(OptimizationLevel::None).or(Err(()))?;\n\n\n\n let func: JitFunction<TestFunc> = unsafe { execution_engine.get_function(func_name) }.or(Err(()))?;\n\n let ret = unsafe { func.call(arg) };\n\n\n\n Ok(ret)\n\n}\n", "file_path": "src/kgen/src/jit/execute_function.rs", "rank": 0, "score": 200327.20387502675 }, { "content": "///\n\n/// Look for the position of file where the input value is located.\n\n///\n\n/// Never forget to give a file name before calling this function.\n\n///\n\npub fn get_position(file_name: String) -> impl Fn(Span) -> IResult<Span, FilePosition, GSError> {\n\n move |input| {\n\n let (input, pos) = position(input)?;\n\n let pos = FilePosition::from_span(file_name.clone(), &pos);\n\n Ok((input, pos))\n\n }\n\n}\n\n\n\n///\n\n/// Test for `crate::parsers::utils`.\n\n///\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn identifier_test1() {\n\n use crate::parsers::Span;\n\n use crate::parsers::utils::identifier;\n\n\n\n for name in vec![ \"m_WindowWidth\", \"load3DData!\", \"__Initialize\" ] {\n\n let val = Span::new(name);\n", "file_path": "src/kgen/src/parsers/utils/mod.rs", "rank": 1, "score": 193803.9842087858 }, { "content": "///\n\n/// Parse a type annotation for a function.\n\n///\n\npub fn function_type_parser(text: Span) -> IResult<Span, (Vec<&str>, &str), GSError> {\n\n map(\n\n tuple((\n\n tag(\"(\"),\n\n multispace0,\n\n args_inside_parser,\n\n multispace0,\n\n tag(\"->\"),\n\n multispace0,\n\n opt(identifier),\n\n multispace0,\n\n tag(\")\")\n\n )),\n\n |(_, _, args, _, _, _, ret, _, _): (_, _, Vec<&str>, _, _, _, Option<&str>, _, _)| {\n\n match ret {\n\n Some(ret) => {\n\n (args, ret)\n\n },\n\n None => {\n\n (args, \"\")\n\n }\n\n }\n\n }\n\n )(text)\n\n}\n\n\n\npub mod expr_function;\n\npub mod external_function;\n\npub mod statement_function;\n", "file_path": "src/kgen/src/parsers/functions/mod.rs", "rank": 2, "score": 178099.58585914405 }, { "content": "///\n\n/// Parse arguments with paren.\n\n///\n\npub fn args_parser(text: Span) -> IResult<Span, Vec<&str>, GSError> {\n\n map(\n\n tuple((\n\n tag(\"(\"),\n\n multispace0,\n\n args_inside_parser,\n\n multispace0,\n\n tag(\")\")\n\n )),\n\n |(_, _, args, _, _)| {\n\n args\n\n }\n\n )(text)\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/functions/mod.rs", "rank": 3, "score": 176621.9784978371 }, { "content": "///\n\n/// Parse arguments inside paren.\n\n///\n\npub fn args_inside_parser(text: Span) -> IResult<Span, Vec<&str>, GSError> {\n\n let one_arg_parser =\n\n delimited(\n\n multispace0,\n\n identifier,\n\n multispace0\n\n );\n\n\n\n separated_list0(\n\n tag(\",\"),\n\n one_arg_parser\n\n )(text)\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/functions/mod.rs", "rank": 4, "score": 173699.280868344 }, { "content": "///\n\n/// Parse a whole program.\n\n///\n\npub fn program_parser(text: Span) -> IResult<Span, Vec<FunctionObject>, GSError> {\n\n let function_with_space_parser =\n\n delimited(\n\n multispace0,\n\n function_parser,\n\n multispace0\n\n );\n\n\n\n map(\n\n tuple((\n\n many0(function_with_space_parser),\n\n eof\n\n )),\n\n |(functions, _)| {\n\n functions\n\n }\n\n )(text)\n\n}\n\n\n\npub mod exprs;\n\npub mod factors;\n\npub mod functions;\n\npub mod statements;\n\npub mod utils;\n", "file_path": "src/kgen/src/parsers/mod.rs", "rank": 5, "score": 172541.55165320047 }, { "content": "fn process() -> Result<(), Vec<ErrorToken>> {\n\n let mut app = App::new(\"Kaprino Compiler (kprc)\");\n\n app = KprcApp::generate_args(app);\n\n let matches = app.get_matches();\n\n\n\n let instance = KprcApp::new(&matches)?;\n\n instance.execute()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/kprc/src/main.rs", "rank": 7, "score": 159857.74516222737 }, { "content": "fn process() -> Result<(), Vec<ErrorToken>> {\n\n let matches = args_manager::get_args();\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"compile\") {\n\n let instance = KprcApp::new(matches)?;\n\n instance.execute()?;\n\n\n\n Ok(())\n\n }\n\n else {\n\n Err(\n\n vec![ErrorToken::fatal_error(\"No subcommand given.\".to_string())]\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/kpr/src/main.rs", "rank": 8, "score": 159857.74516222737 }, { "content": "pub fn get_args<'a>() -> ArgMatches<'a> {\n\n let mut app = App::new(\"Kaprino Processing Runner (kpr)\")\n\n .version(crate_version!())\n\n .author(\"Akihisa Yagi <[email protected]>\")\n\n .about(\"This tool provides you many ways to control kaprino-lang.\\nFor more information, please refer to https://github.com/kaprino-lang/kaprino.\");\n\n\n\n let mut kprc_app = SubCommand::with_name(\"compile\");\n\n kprc_app = KprcApp::generate_args(kprc_app);\n\n\n\n app = app.subcommand(kprc_app);\n\n\n\n app.get_matches()\n\n}\n", "file_path": "src/kpr/src/args_manager.rs", "rank": 9, "score": 144794.9524960231 }, { "content": "///\n\n/// Parse a function into `FunctionObject`.\n\n///\n\npub fn function_parser(text: Span) -> IResult<Span, FunctionObject, GSError> {\n\n alt((\n\n expr_function_parser,\n\n external_function_parser,\n\n statement_function_parser,\n\n ))(text)\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/functions/mod.rs", "rank": 10, "score": 144140.34941280016 }, { "content": "///\n\n/// Parse a function which is declared externally.\n\n///\n\npub fn external_function_parser(text: Span) -> IResult<Span, FunctionObject, GSError> {\n\n map(\n\n tuple((\n\n get_position(\"File\".to_string()),\n\n tag(\"#extern\"),\n\n multispace1,\n\n identifier,\n\n multispace0,\n\n args_parser,\n\n multispace0,\n\n function_type_parser\n\n )),\n\n |(pos, _, _, func_name, _, args, _, fn_type)| {\n\n let func_name = func_name.to_string();\n\n let args: Vec<String> = args.iter().map(|s| { s.to_string() }).collect();\n\n let types: Vec<String> = fn_type.0.iter().map(|s| { s.to_string() }).collect();\n\n let ret_type = fn_type.1.to_string();\n\n\n\n FunctionObject::ExternalFunction(\n\n Box::new(\n\n ExternalFunction::new(\n\n pos, func_name, args, types, ret_type\n\n )\n\n )\n\n )\n\n }\n\n )(text)\n\n}\n", "file_path": "src/kgen/src/parsers/functions/external_function.rs", "rank": 11, "score": 143374.8222720782 }, { "content": "///\n\n/// Parse a C-like function into `FunctionObject`.\n\n///\n\npub fn statement_function_parser(text: Span) -> IResult<Span, FunctionObject, GSError> {\n\n let statement_with_spaces_parser = map(\n\n tuple((\n\n statement_parser,\n\n multispace0\n\n )),\n\n |(statement, _)| {\n\n statement\n\n }\n\n );\n\n\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#func\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, function_name) = identifier(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, args) = args_parser(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, function_type) = function_type_parser(text)?;\n\n let (text, _) = multispace0(text)?;\n", "file_path": "src/kgen/src/parsers/functions/statement_function.rs", "rank": 12, "score": 143371.6408393872 }, { "content": "///\n\n/// Parse a function which has only one expression.\n\n///\n\npub fn expr_function_parser(text: Span) -> IResult<Span, FunctionObject, GSError> {\n\n map(\n\n tuple((\n\n get_position(\"File\".to_string()),\n\n identifier,\n\n multispace0,\n\n args_parser,\n\n multispace0,\n\n char('='),\n\n multispace0,\n\n expr_parser,\n\n multispace0,\n\n function_type_parser\n\n )),\n\n |(pos, func_name, _, args, _, _, _, expr, _, fn_type): (FilePosition, &str, _, Vec<&str>, _, _, _, EvaluableObject, _, (Vec<&str>, &str))| {\n\n let func_name = func_name.to_string();\n\n let args: Vec<String> = args.iter().map(|s| { s.to_string() }).collect();\n\n let (types, ret_type) = fn_type;\n\n let types: Vec<String> = types.iter().map(|s| { s.to_string() }).collect();\n\n let ret_type = ret_type.to_string();\n", "file_path": "src/kgen/src/parsers/functions/expr_function.rs", "rank": 13, "score": 143371.13707833458 }, { "content": "///\n\n/// Parse a break statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <break> ::= \"#break\"\n\n/// ```\n\n///\n\npub fn break_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#break\")(text)?;\n\n Ok((\n\n text,\n\n StatementObject::BreakObject(\n\n Box::new(\n\n BreakObject::new(\n\n pos\n\n )\n\n )\n\n )\n\n ))\n\n}\n", "file_path": "src/kgen/src/parsers/statements/break_object.rs", "rank": 14, "score": 119203.31009404428 }, { "content": "///\n\n/// Parse an expression which is enclosed in paren.\n\n///\n\npub fn string_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, val) = delimited(\n\n tag(\"\\\"\"),\n\n escaped(\n\n none_of(\"\\\\\\\"\"),\n\n '\\\\',\n\n tag(\"\\\"\")\n\n ),\n\n tag(\"\\\"\")\n\n )(text)?;\n\n\n\n let obj = EvaluableObject::StringObject(Box::new(\n\n StringObject::new(pos, val.to_string())\n\n ));\n\n\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/factors/string_object.rs", "rank": 15, "score": 119164.94339613891 }, { "content": "///\n\n/// Parse a let statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <let> ::= \"#let\" .* (\":=\" <expr>)* \"(\" \"<-\"\" .* \")\"\n\n/// ```\n\n///\n\npub fn let_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#let\")(text)?;\n\n let (text, _) = multispace1(text)?;\n\n let (text, param_name) = identifier(text)?;\n\n let (text, assign) = opt(assign_parser)(text)?;\n\n let (text, type_name) = type_parser(text)?;\n\n Ok((\n\n text,\n\n StatementObject::LetObject(\n\n Box::new(\n\n LetObject::new(\n\n pos,\n\n param_name.to_string(),\n\n type_name.to_string(),\n\n assign\n\n )\n\n )\n\n )\n\n ))\n\n}\n", "file_path": "src/kgen/src/parsers/statements/let_object.rs", "rank": 16, "score": 119032.76313035999 }, { "content": "///\n\n/// Execute a statement Just In Time.\n\n///\n\npub fn execute_statement(text: &str) -> Result<u32, ()> {\n\n type TestFunc = unsafe extern \"C\" fn() -> u32;\n\n\n\n let context = &Context::create();\n\n let gen = CodeGen::new(context, \"test\");\n\n\n\n let i32_type = gen.context.i32_type();\n\n let fn_type = i32_type.fn_type(&[i32_type.into(), i32_type.into()], false);\n\n\n\n let sum = gen.module.add_function(\"calc\", fn_type, None);\n\n let basic_block = gen.context.append_basic_block(sum, \"entry\");\n\n\n\n gen.builder.position_at_end(basic_block);\n\n\n\n let text = Span::new(text);\n\n let (_, val) = statement_parser(text).or(Err(()))?;\n\n\n\n val.codegen(&gen).or(Err(()))?;\n\n\n\n let execution_engine = gen.module.create_jit_execution_engine(OptimizationLevel::None).or(Err(()))?;\n\n\n\n let func: JitFunction<TestFunc> = unsafe { execution_engine.get_function(\"calc\") }.or(Err(()))?;\n\n let ret: u32 = unsafe { func.call() };\n\n\n\n Ok(ret)\n\n}\n", "file_path": "src/kgen/src/jit/execute_statement.rs", "rank": 17, "score": 118354.32055771447 }, { "content": "///\n\n/// Execute an expression Just In Time.\n\n///\n\npub fn execute_expr(text: &str) -> Result<u32, ()> {\n\n type TestFunc = unsafe extern \"C\" fn() -> u32;\n\n\n\n let context = &Context::create();\n\n let gen = CodeGen::new(context, \"test\");\n\n\n\n let i32_type = gen.context.i32_type();\n\n let fn_type = i32_type.fn_type(&[i32_type.into(), i32_type.into()], false);\n\n\n\n let sum = gen.module.add_function(\"calc\", fn_type, None);\n\n let basic_block = gen.context.append_basic_block(sum, \"entry\");\n\n gen.builder.position_at_end(basic_block);\n\n\n\n let text = Span::new(text);\n\n let (_, val) = expr_parser(text).or(Err(()))?;\n\n let val = val.codegen(&gen).or(Err(()))?;\n\n\n\n gen.builder.build_return(Some(&val));\n\n\n\n let execution_engine = gen.module.create_jit_execution_engine(OptimizationLevel::None).or(Err(()))?;\n\n\n\n let func: JitFunction<TestFunc> = unsafe { execution_engine.get_function(\"calc\") }.or(Err(()))?;\n\n let ret: u32 = unsafe { func.call() };\n\n Ok(ret)\n\n}\n", "file_path": "src/kgen/src/jit/execute_expr.rs", "rank": 18, "score": 118354.32055771447 }, { "content": "///\n\n/// Parse a factor which calls a function. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <function_call> ::= .* \"(\" (<expr> (\",\" <expr>)*)* \")\"\n\n/// ```\n\n///\n\npub fn function_call_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let expr_with_spaces =\n\n map(\n\n tuple((\n\n multispace0,\n\n expr_parser,\n\n multispace0\n\n )),\n\n |(_, statement, _)| {\n\n statement\n\n }\n\n );\n\n\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, function_name) = identifier(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\"(\")(text)?;\n\n let (text, args) = separated_list0(tag(\",\"), expr_with_spaces)(text)?;\n\n let (text, _) = tag(\")\")(text)?;\n\n\n\n let obj = EvaluableObject::FunctionCallObject(Box::new(\n\n FunctionCallObject::new(pos, function_name.to_string(), args)\n\n ));\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/factors/function_call_object.rs", "rank": 19, "score": 114114.27420455501 }, { "content": "///\n\n/// Parse a type object. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <type> ::= \"(\" \"<-\" .* \")\"\n\n/// ```\n\n///\n\nfn type_parser(text: Span) -> IResult<Span, &str, GSError> {\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\"(\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\"<-\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, type_name) = identifier(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\")\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n Ok((text, type_name))\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/statements/let_object.rs", "rank": 20, "score": 99808.6636501471 }, { "content": "///\n\n/// Parse a word which is valid for the identifiers of kaprino.\n\n///\n\npub fn identifier(content: Span) -> IResult<Span, &str, GSError> {\n\n let regex = Regex::new(r\"^[a-zA-Z_]([a-zA-Z0-9_])*(!)*\");\n\n\n\n assert!(regex.is_ok());\n\n\n\n let (_, parsed) = re_capture::<(&str, ErrorKind)>(regex.unwrap())\n\n (content.fragment())\n\n .map_err(|_| {\n\n nom::Err::Error(ParseError::from_error_kind(content, ErrorKind::RegexpCapture))\n\n })?;\n\n\n\n assert_ne!(parsed.len(), 0);\n\n\n\n let parsed = parsed[0];\n\n Ok((content.slice(parsed.len()..), parsed))\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/utils/mod.rs", "rank": 21, "score": 91803.6936933595 }, { "content": "///\n\n/// Parse an if statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <if> ::= \"#if\" <expr> \"|>\" <statements> \"<|\"\n\n/// ```\n\n///\n\npub fn if_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let statement_with_space_parser =\n\n map(\n\n tuple((\n\n statement_parser,\n\n multispace0\n\n )),\n\n |(statement, _)| {\n\n statement\n\n }\n\n );\n\n\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#if\")(text)?;\n\n let (text, _) = multispace1(text)?;\n\n let (text, expr) = expr_parser(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\"|>\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, statements) = many0(statement_with_space_parser)(text)?;\n", "file_path": "src/kgen/src/parsers/statements/if_object.rs", "rank": 22, "score": 90226.99140852015 }, { "content": "///\n\n/// Parse a factor into `EvaluableObject`.\n\n///\n\npub fn factor_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n alt((\n\n function_call_parser,\n\n numbers_parser,\n\n paren_parser,\n\n param_parser,\n\n string_parser\n\n ))(text)\n\n}\n\n\n\npub mod function_call_object;\n\npub mod numbers_object;\n\npub mod param_object;\n\npub mod paren_object;\n\npub mod string_object;\n", "file_path": "src/kgen/src/parsers/factors/mod.rs", "rank": 23, "score": 88719.0314154114 }, { "content": "///\n\n/// Parse an expression into `EvaluableObject`. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <expr> ::= <term> ((\"+\" | \"-\") <term>)*\n\n/// ```\n\n///\n\npub fn expr_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, left_value) = term_parser(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, right_values) = many0(term_with_op_parser)(text)?;\n\n\n\n if right_values.len() == 0 {\n\n Ok((text, left_value))\n\n }\n\n else {\n\n Ok((\n\n text,\n\n EvaluableObject::ExprObject(\n\n Box::new(\n\n ExprObject::new(pos, left_value, right_values)\n\n )\n\n )\n\n ))\n\n }\n\n}\n\n\n\npub mod exponents_object;\n\npub mod term_object;\n", "file_path": "src/kgen/src/parsers/exprs/mod.rs", "rank": 24, "score": 88719.0314154114 }, { "content": "///\n\n/// Parse a statement into `StatementObject`.\n\n///\n\npub fn statement_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n alt((\n\n assign_parser,\n\n break_parser,\n\n call_parser,\n\n if_parser,\n\n let_parser,\n\n loop_parser,\n\n ret_parser\n\n ))(text)\n\n}\n\n\n\npub mod assign_object;\n\npub mod break_object;\n\npub mod call_object;\n\npub mod if_object;\n\npub mod let_object;\n\npub mod loop_object;\n\npub mod ret_object;\n", "file_path": "src/kgen/src/parsers/statements/mod.rs", "rank": 25, "score": 88719.0314154114 }, { "content": "///\n\n/// Parse a parameter.\n\n///\n\npub fn param_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, param) = identifier(text)?;\n\n\n\n let obj = EvaluableObject::ParamObject(Box::new(\n\n ParamObject::new(pos, param.to_string())\n\n ));\n\n\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/factors/param_object.rs", "rank": 26, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse an expression which contains exponentiations into `EvaluableObject`. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <exponents> ::= <factor> (\"^\" <factor>)*\n\n/// ```\n\n///\n\npub fn exponents_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, left_value) = factor_parser(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, right_value) = opt(factor_with_op_parser)(text)?;\n\n let obj = match right_value {\n\n None => {\n\n left_value\n\n },\n\n Some(right_value) => {\n\n EvaluableObject::ExponentsObject(Box::new(\n\n ExponentsObject::new(pos, left_value, right_value)\n\n ))\n\n }\n\n };\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/exprs/exponents_object.rs", "rank": 27, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse a ret statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <ret> ::= \"#ret\" <expr>\"\n\n/// ```\n\n///\n\npub fn ret_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#ret\")(text)?;\n\n let (text, _) = multispace1(text)?;\n\n let (text, expr) = expr_parser(text)?;\n\n Ok((\n\n text,\n\n StatementObject::RetObject(Box::new(\n\n RetObject::new(pos, expr)\n\n ))\n\n ))\n\n}\n", "file_path": "src/kgen/src/parsers/statements/ret_object.rs", "rank": 28, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse a loop statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <loop> ::= \"#loop\" \"|>\" <statements> \"<|\"\n\n/// ```\n\n///\n\npub fn loop_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let statement_with_space_parser =\n\n map(\n\n tuple((\n\n statement_parser,\n\n multispace0\n\n )),\n\n |(statement, _)| {\n\n statement\n\n }\n\n );\n\n\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#loop\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\"|>\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, statements) = many0(statement_with_space_parser)(text)?;\n\n let (text, _) = tag(\"|<\")(text)?;\n\n Ok((\n", "file_path": "src/kgen/src/parsers/statements/loop_object.rs", "rank": 29, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse a call statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <call> ::= \"#call\" <expr>\n\n/// ```\n\n///\n\npub fn call_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, _) = tag(\"#call\")(text)?;\n\n let (text, _) = multispace1(text)?;\n\n let (text, expr) = expr_parser(text)?;\n\n Ok((\n\n text,\n\n StatementObject::CallObject(\n\n Box::new(\n\n CallObject::new(\n\n pos,\n\n expr\n\n )\n\n )\n\n )\n\n ))\n\n}\n", "file_path": "src/kgen/src/parsers/statements/call_object.rs", "rank": 30, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse an expression which is enclosed in paren.\n\n///\n\npub fn paren_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, _ ) = char('(')(text)?;\n\n let (text, expr) = expr_parser(text)?;\n\n let (text, _) = char(')')(text)?;\n\n\n\n Ok((text, expr))\n\n}\n", "file_path": "src/kgen/src/parsers/factors/paren_object.rs", "rank": 31, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse a number.\n\n///\n\npub fn numbers_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, digit) = digit1(text)?;\n\n\n\n let obj = EvaluableObject::NumberObject(\n\n Box::new(NumberObject::new(\n\n pos,\n\n digit.fragment().parse().unwrap_or(0)\n\n ))\n\n );\n\n\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/factors/numbers_object.rs", "rank": 32, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse an assign statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <assign> ::= .* \":=\" <expr>\n\n/// ```\n\n///\n\npub fn assign_parser(text: Span) -> IResult<Span, StatementObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, param_name) = identifier(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\":=\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, expr) = expr_parser(text)?;\n\n Ok((\n\n text,\n\n StatementObject::AssignObject(\n\n Box::new(\n\n AssignObject::new(\n\n pos,\n\n param_name.to_string(),\n\n expr\n\n )\n\n )\n\n )\n\n ))\n\n}\n", "file_path": "src/kgen/src/parsers/statements/assign_object.rs", "rank": 33, "score": 87275.41400519722 }, { "content": "///\n\n /// Parse a terminal into `EvaluableObject`. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <term> ::= <exponents> ((\"*\" | \"/\") <exponents>)*\n\n/// ```\n\n///\n\npub fn term_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, pos) = get_position(\"File\".to_string())(text)?;\n\n let (text, left_value) = exponents_parser(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, right_values) = many0(exponents_with_op_parser)(text)?;\n\n\n\n let obj =\n\n if right_values.len() == 0 {\n\n left_value\n\n }\n\n else {\n\n EvaluableObject::TermObject(Box::new(\n\n TermObject::new(pos, left_value, right_values)\n\n ))\n\n };\n\n\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/exprs/term_object.rs", "rank": 34, "score": 87275.41400519722 }, { "content": "///\n\n/// Parse an assign object. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <assign> ::= \":=\" <expr>\n\n/// ```\n\n///\n\nfn assign_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, _) = multispace0(text)?;\n\n let (text, _) = tag(\":=\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, expr) = expr_parser(text)?;\n\n Ok((text, expr))\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/statements/let_object.rs", "rank": 35, "score": 74307.83413090935 }, { "content": "#[test]\n\nfn jit_execute_function_test3() {\n\n use super::execute_function::execute_function;\n\n\n\n assert_eq!(execute_function(\"#func f(x) (Z -> Z) |> #ret x * x + 10 |<\", \"f\", 5), Ok(5 * 5 + 10));\n\n}\n", "file_path": "src/kgen/src/jit/test/mod.rs", "rank": 36, "score": 60437.7659444093 }, { "content": "#[test]\n\nfn jit_execute_function_test1() {\n\n use super::execute_function::execute_function;\n\n\n\n assert_eq!(execute_function(\"f(x) = 35 (Z -> Z)\", \"f\", 0), Ok(35));\n\n}\n\n\n", "file_path": "src/kgen/src/jit/test/mod.rs", "rank": 37, "score": 60437.7659444093 }, { "content": "#[test]\n\nfn jit_execute_function_test2() {\n\n use super::execute_function::execute_function;\n\n\n\n assert_eq!(execute_function(\"f(x) = x * x + 10 (Z -> Z)\", \"f\", 5), Ok(5 * 5 + 10));\n\n}\n\n\n", "file_path": "src/kgen/src/jit/test/mod.rs", "rank": 38, "score": 60437.7659444093 }, { "content": "///\n\n/// Parse a factor with an exponentiation operator. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <factor_with_op> ::= \"^\" <factor>\n\n/// ```\n\n///\n\nfn factor_with_op_parser(text: Span) -> IResult<Span, EvaluableObject, GSError> {\n\n let (text, _) = char('^')(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, factor) = factor_parser(text)?;\n\n Ok((text, factor))\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/exprs/exponents_object.rs", "rank": 39, "score": 49972.76047352582 }, { "content": "///\n\n/// Parse a term with one operator. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <term_with_op> ::= (\"+\" | \"-\") <term>\n\n/// ```\n\n///\n\nfn term_with_op_parser(text: Span) -> IResult<Span, (ExprOpKind, EvaluableObject), GSError> {\n\n let (text, op) = alt((char('+'), char('-')))(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, term) = term_parser(text)?;\n\n\n\n let op = match op {\n\n '+' => ExprOpKind::Add,\n\n '-' => ExprOpKind::Sub,\n\n _ => panic!(\"YOU FOUND A BUG IN RUST NOM!\")\n\n };\n\n\n\n Ok((text, (op, term)))\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/exprs/mod.rs", "rank": 40, "score": 47963.64112201427 }, { "content": "///\n\n/// Parse an expression which contains exponentiations with one operator. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <exponents_with_op> ::= (\"*\" | \"/\") <exponents>\n\n/// ```\n\n///\n\nfn exponents_with_op_parser(text: Span) -> IResult<Span, (TermOpKind, EvaluableObject), GSError> {\n\n let (text, op) = alt((char('*'), char('/')))(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, exp) = exponents_parser(text)?;\n\n\n\n let op = match op {\n\n '*' => TermOpKind::Mul,\n\n '/' => TermOpKind::Div,\n\n _ => panic!(\"YOU FOUND A BUG IN RUST NOM!\")\n\n };\n\n\n\n Ok((text, (op, exp)))\n\n}\n\n\n", "file_path": "src/kgen/src/parsers/exprs/term_object.rs", "rank": 41, "score": 47118.42408250895 }, { "content": "fn main() {\n\n match process() {\n\n Ok(_) => {\n\n println!(\"Tasks completed! (^^)/\");\n\n },\n\n Err(messages) => {\n\n for message in messages {\n\n println!(\"{}\", message);\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/kpr/src/main.rs", "rank": 42, "score": 37893.956589162386 }, { "content": "fn main() {\n\n match process() {\n\n Ok(_) => {\n\n println!(\"Tasks completed! (^^)/\");\n\n },\n\n Err(messages) => {\n\n for message in messages {\n\n println!(\"{}\", message);\n\n }\n\n }\n\n };\n\n}\n", "file_path": "src/kprc/src/main.rs", "rank": 43, "score": 37893.956589162386 }, { "content": " }\n\n}\n\n\n\nimpl<'ctx> ExternalFunction {\n\n ///\n\n /// Create an `ExternalFunction` instance.\n\n ///\n\n pub fn new(\n\n pos: FilePosition,\n\n func_name: String,\n\n args: Vec<String>,\n\n types: Vec<String>,\n\n ret_type: String) -> Self {\n\n let info = FunctionInfo::new(\n\n func_name, args, types, ret_type\n\n );\n\n Self {\n\n pos, info\n\n }\n\n }\n", "file_path": "src/kgen/src/ast/functions/external_function.rs", "rank": 44, "score": 36365.058045726655 }, { "content": " &self.info\n\n }\n\n}\n\n\n\nimpl<'ctx> StatementFunction {\n\n ///\n\n /// Create a `StatementFunction` instance.\n\n ///\n\n pub fn new(\n\n pos: FilePosition,\n\n func_name: String,\n\n args: Vec<String>,\n\n types: Vec<String>,\n\n ret_type: String,\n\n statements: Vec<StatementObject>) -> Self {\n\n let info = FunctionInfo::new(\n\n func_name, args, types, ret_type\n\n );\n\n Self {\n\n pos, info, statements\n", "file_path": "src/kgen/src/ast/functions/statement_function.rs", "rank": 45, "score": 36364.27423029789 }, { "content": " &self.info\n\n }\n\n}\n\n\n\nimpl<'ctx> ExprFunction {\n\n ///\n\n /// Create an `ExprFunction` instance.\n\n ///\n\n pub fn new(\n\n pos: FilePosition,\n\n func_name: String,\n\n args: Vec<String>,\n\n types: Vec<String>,\n\n ret_type: String,\n\n expr: EvaluableObject) -> Self {\n\n let info = FunctionInfo::new(\n\n func_name, args, types, ret_type\n\n );\n\n Self {\n\n pos, info, expr\n", "file_path": "src/kgen/src/ast/functions/expr_function.rs", "rank": 46, "score": 36364.006131578426 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> {\n\n let func_type = self.get_func_type(gen, self.pos.clone())?;\n\n\n\n let func = gen.module.add_function(&self.get_info().name, func_type, None);\n\n let basic_block = gen.context.append_basic_block(func, \"entry\");\n\n\n\n gen.builder.position_at_end(basic_block);\n\n\n\n self.assign_args(gen, &func);\n\n\n\n match self.expr.codegen(gen) {\n\n Ok(expr) => {\n\n gen.builder.build_return(Some(&expr));\n\n\n", "file_path": "src/kgen/src/ast/functions/expr_function.rs", "rank": 47, "score": 36357.35309471242 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> {\n\n let func_type = self.get_func_type(gen, self.pos.clone())?;\n\n\n\n let func = gen.module.add_function(&self.get_info().name, func_type, None);\n\n let basic_block = gen.context.append_basic_block(func, \"entry\");\n\n\n\n gen.builder.position_at_end(basic_block);\n\n\n\n self.assign_args(gen, &func);\n\n\n\n for st in &self.statements {\n\n st.codegen(gen)?;\n\n };\n\n\n\n gen.param_resolver.borrow_mut().remove_scope();\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/functions/statement_function.rs", "rank": 48, "score": 36357.08635893627 }, { "content": "use crate::ast::CodeGen;\n\nuse crate::ast::functions::{ FunctionInfo, FunctionObjectTrait };\n\nuse crate::ast::statements::StatementObject;\n\nuse crate::error::error_token::{ ErrorToken, FilePosition };\n\n\n\n///\n\n/// `StatementFunction` is an object which represents a function declared in C-like style.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct StatementFunction {\n\n pos: FilePosition,\n\n info: FunctionInfo,\n\n statements: Vec<StatementObject>\n\n}\n\n\n\nimpl<'ctx> FunctionObjectTrait for StatementFunction {\n\n ///\n\n /// Get information of this function object.\n\n ///\n\n fn get_info(&self) -> &FunctionInfo {\n", "file_path": "src/kgen/src/ast/functions/statement_function.rs", "rank": 49, "score": 36355.38722062101 }, { "content": "\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> {\n\n let func_type = self.get_func_type(gen, self.pos.clone())?;\n\n\n\n gen.module.add_function(&self.get_info().name, func_type, Some(Linkage::External));\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/functions/external_function.rs", "rank": 50, "score": 36354.39427172674 }, { "content": " let (text, _) = tag(\"|>\")(text)?;\n\n let (text, _) = multispace0(text)?;\n\n let (text, statements) = many0(statement_with_spaces_parser)(text)?;\n\n let (text, _) = tag(\"|<\")(text)?;\n\n\n\n let function_name = function_name.to_string();\n\n let args = args.iter().map(|s| { s.to_string() }).collect();\n\n let (types, ret_type) = function_type;\n\n let types = types.iter().map(|s| { s.to_string() }).collect();\n\n let ret_type = ret_type.to_string();\n\n\n\n let obj = FunctionObject::StatementFunction(Box::new(\n\n StatementFunction::new(pos, function_name, args, types, ret_type, statements)\n\n ));\n\n Ok((text, obj))\n\n}\n", "file_path": "src/kgen/src/parsers/functions/statement_function.rs", "rank": 51, "score": 36354.02542018131 }, { "content": "use crate::ast::CodeGen;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::ast::functions::{ FunctionInfo, FunctionObjectTrait };\n\nuse crate::error::error_token::{ ErrorToken, FilePosition };\n\n\n\n///\n\n/// `ExprFunction` is an object which represents a function declared with a mathematical style, like `f(x) = x * x`.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct ExprFunction {\n\n pos: FilePosition,\n\n info: FunctionInfo,\n\n expr: EvaluableObject\n\n}\n\n\n\nimpl<'ctx> FunctionObjectTrait for ExprFunction {\n\n ///\n\n /// Get information of this function object.\n\n ///\n\n fn get_info(&self) -> &FunctionInfo {\n", "file_path": "src/kgen/src/ast/functions/expr_function.rs", "rank": 52, "score": 36353.848527641036 }, { "content": "use inkwell::module::Linkage;\n\nuse crate::ast::CodeGen;\n\nuse crate::ast::functions::{ FunctionInfo, FunctionObjectTrait };\n\nuse crate::error::error_token::{ ErrorToken, FilePosition };\n\n\n\n///\n\n/// `ExternalFunction` is an object which represents a function implemented in external resources.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct ExternalFunction {\n\n pos: FilePosition,\n\n info: FunctionInfo\n\n}\n\n\n\nimpl<'ctx> FunctionObjectTrait for ExternalFunction {\n\n ///\n\n /// Get information of this function object.\n\n ///\n\n fn get_info(&self) -> &FunctionInfo {\n\n &self.info\n", "file_path": "src/kgen/src/ast/functions/external_function.rs", "rank": 53, "score": 36352.65843775512 }, { "content": "use nom::bytes::complete::tag;\n\nuse nom::character::complete::multispace0;\n\nuse nom::character::complete::multispace1;\n\nuse nom::combinator::map;\n\nuse nom::IResult;\n\nuse nom::sequence::tuple;\n\nuse crate::ast::functions::external_function::ExternalFunction;\n\nuse crate::ast::functions::FunctionObject;\n\nuse crate::parsers::functions::args_parser;\n\nuse crate::parsers::functions::function_type_parser;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::utils::{ identifier, get_position, GSError };\n\n\n\n///\n\n/// Parse a function which is declared externally.\n\n///\n", "file_path": "src/kgen/src/parsers/functions/external_function.rs", "rank": 54, "score": 36351.156492852344 }, { "content": "\n\n FunctionObject::ExprFunction(\n\n Box::new(\n\n ExprFunction::new(\n\n pos, func_name, args, types, ret_type, expr\n\n )\n\n )\n\n )\n\n }\n\n )(text)\n\n}\n", "file_path": "src/kgen/src/parsers/functions/expr_function.rs", "rank": 55, "score": 36350.55623322869 }, { "content": "use nom::bytes::complete::tag;\n\nuse nom::character::complete::multispace0;\n\nuse nom::combinator::map;\n\nuse nom::IResult;\n\nuse nom::multi::many0;\n\nuse nom::sequence::tuple;\n\nuse crate::ast::functions::FunctionObject;\n\nuse crate::ast::functions::statement_function::StatementFunction;\n\nuse crate::parsers::functions::args_parser;\n\nuse crate::parsers::functions::function_type_parser;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::statements::statement_parser;\n\nuse crate::parsers::utils::{ identifier, get_position, GSError };\n\n\n\n///\n\n/// Parse a C-like function into `FunctionObject`.\n\n///\n", "file_path": "src/kgen/src/parsers/functions/statement_function.rs", "rank": 56, "score": 36349.47974525514 }, { "content": "use nom::character::complete::char;\n\nuse nom::character::complete::multispace0;\n\nuse nom::combinator::map;\n\nuse nom::IResult;\n\nuse nom::sequence::tuple;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::ast::functions::expr_function::ExprFunction;\n\nuse crate::ast::functions::FunctionObject;\n\nuse crate::error::error_token::FilePosition;\n\nuse crate::parsers::exprs::expr_parser;\n\nuse crate::parsers::functions::args_parser;\n\nuse crate::parsers::functions::function_type_parser;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::utils::{ identifier, get_position, GSError };\n\n\n\n///\n\n/// Parse a function which has only one expression.\n\n///\n", "file_path": "src/kgen/src/parsers/functions/expr_function.rs", "rank": 57, "score": 36348.809406192835 }, { "content": " gen.param_resolver.borrow_mut().remove_scope();\n\n\n\n Ok(())\n\n },\n\n Err(error_message) => {\n\n gen.param_resolver.borrow_mut().remove_scope();\n\n\n\n Err(error_message)\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/functions/expr_function.rs", "rank": 58, "score": 36344.72580386585 }, { "content": "#[test]\n\nfn dictionary_test1() {\n\n use super::super::dictionary::Dictionary;\n\n\n\n let mut dic: Dictionary<i32> = Dictionary::new();\n\n dic.add_scope(\"scope1\");\n\n\n\n dic.add(\"a\", 0);\n\n\n\n dic.add_scope(\"scope2\");\n\n\n\n dic.add(\"b\", 1);\n\n\n\n assert_eq!(Some(&0), dic.find(\"a\"));\n\n assert_eq!(Some(&1), dic.find(\"b\"));\n\n\n\n dic.remove_scope(); // Remove scope2\n\n\n\n assert_eq!(Some(&0), dic.find(\"a\"));\n\n assert_eq!(None, dic.find(\"b\"));\n\n\n\n dic.remove_scope(); // Remove scope1\n\n\n\n assert_eq!(None, dic.find(\"a\"));\n\n assert_eq!(None, dic.find(\"b\"));\n\n}\n", "file_path": "src/kgen/src/resolvers/test/dictionary_test.rs", "rank": 59, "score": 34704.22891836884 }, { "content": "#[test]\n\nfn jit_execute_statement_test1() {\n\n use super::execute_statement::execute_statement;\n\n\n\n assert_eq!(execute_statement(\"#ret (6 - 2) * 3 + 5\"), Ok((6 - 2) * 3 + 5));\n\n}\n\n\n", "file_path": "src/kgen/src/jit/test/mod.rs", "rank": 60, "score": 34009.36627428114 }, { "content": "#[test]\n\nfn jit_execute_expr_test1() {\n\n use super::execute_expr::execute_expr;\n\n\n\n assert_eq!(execute_expr(\"(6 - 2) * 3 + 5\"), Ok((6 - 2) * 3 + 5));\n\n}\n\n\n", "file_path": "src/kgen/src/jit/test/mod.rs", "rank": 61, "score": 34009.36627428114 }, { "content": "#[test]\n\nfn jit_execute_expr_test2() {\n\n use super::execute_expr::execute_expr;\n\n\n\n assert_eq!(execute_expr(\"6 - 2 * 3 + 5\"), Ok(6 - 2 * 3 + 5));\n\n}\n\n\n", "file_path": "src/kgen/src/jit/test/mod.rs", "rank": 62, "score": 34009.36627428114 }, { "content": "use clap::App;\n\nuse clap::ArgMatches;\n\nuse clap::SubCommand;\n\nuse super::kprc::KprcApp;\n\n\n", "file_path": "src/kpr/src/args_manager.rs", "rank": 63, "score": 31804.648371673302 }, { "content": "use inkwell::types::BasicTypeEnum;\n\nuse inkwell::AddressSpace;\n\nuse crate::ast::CodeGen;\n\nuse crate::resolvers::dictionary::Dictionary;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct KMember {\n\n pub name: String,\n\n pub type_name: String\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct KType {\n\n pub type_name: String,\n\n pub members: Vec<KMember>\n\n}\n\n\n\nimpl<'ctx> KMember {\n\n pub fn new(name: String, type_name: String) -> KMember {\n\n KMember { name, type_name }\n", "file_path": "src/kgen/src/resolvers/type_resolver.rs", "rank": 64, "score": 30873.58505539851 }, { "content": " }\n\n}\n\n\n\nimpl<'ctx> KType {\n\n pub fn get_member(&self, name: String) -> Option<&KMember> {\n\n self.members\n\n .iter()\n\n .find(|member| member.name == name)\n\n }\n\n\n\n pub fn get_type(&self, gen: &CodeGen<'ctx>) -> BasicTypeEnum<'ctx> {\n\n match self.type_name.as_str() {\n\n \"Z\" => {\n\n BasicTypeEnum::IntType(\n\n gen.context.i32_type()\n\n )\n\n },\n\n \"R\" => {\n\n BasicTypeEnum::FloatType(\n\n gen.context.f64_type()\n", "file_path": "src/kgen/src/resolvers/type_resolver.rs", "rank": 65, "score": 30872.44132915706 }, { "content": " )\n\n },\n\n \"Text\" => {\n\n BasicTypeEnum::PointerType(\n\n gen.context.i8_type().ptr_type(AddressSpace::Generic)\n\n )\n\n },\n\n _ => {\n\n BasicTypeEnum::IntType(\n\n gen.context.i32_type()\n\n )\n\n }\n\n }\n\n }\n\n}\n\n\n\npub type TypeResolver<'ctx> = Dictionary<KType>;\n\n\n\nimpl<'ctx> TypeResolver<'_> {\n\n pub fn init_default_types(&mut self) {\n", "file_path": "src/kgen/src/resolvers/type_resolver.rs", "rank": 66, "score": 30867.463511454745 }, { "content": " self.add(\n\n \"Z\",\n\n KType {\n\n type_name: \"Z\".to_string(),\n\n members: Vec::new()\n\n }\n\n );\n\n\n\n self.add(\n\n \"R\",\n\n KType {\n\n type_name: \"R\".to_string(),\n\n members: Vec::new()\n\n }\n\n );\n\n\n\n self.add(\n\n \"Text\",\n\n KType {\n\n type_name: \"Text\".to_string(),\n\n members: Vec::new()\n\n }\n\n );\n\n }\n\n}\n", "file_path": "src/kgen/src/resolvers/type_resolver.rs", "rank": 67, "score": 30864.705359799973 }, { "content": "use crate::ast::CodeGen;\n\nuse crate::error::error_token::{ FilePosition, ErrorToken };\n\n\n\n///\n\n/// `BreakObject` is an object which represents a statement with the keyword `#break`.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct BreakObject {\n\n pos: FilePosition\n\n}\n\n\n\nimpl<'ctx> BreakObject {\n\n ///\n\n /// Create a `BreakObject` instance.\n\n ///\n\n pub fn new(pos: FilePosition) -> Self {\n\n Self {\n\n pos\n\n }\n\n }\n", "file_path": "src/kgen/src/ast/statements/break_object.rs", "rank": 68, "score": 30070.227029714646 }, { "content": "\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> {\n\n if let Some(dest) = gen.loop_destinations.borrow().last() {\n\n gen.builder.build_unconditional_branch(*dest);\n\n\n\n Ok(())\n\n }\n\n else {\n\n Err(ErrorToken::error(\n\n self.pos.clone(),\n\n \"Cannot exit a loop while you aren't in loop.\".to_string()\n\n ))\n\n }\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/statements/break_object.rs", "rank": 69, "score": 30063.96390646138 }, { "content": "use nom::bytes::complete::tag;\n\nuse nom::IResult;\n\nuse crate::ast::statements::break_object::BreakObject;\n\nuse crate::ast::statements::StatementObject;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::utils::{ get_position, GSError };\n\n\n\n///\n\n/// Parse a break statement. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <break> ::= \"#break\"\n\n/// ```\n\n///\n", "file_path": "src/kgen/src/parsers/statements/break_object.rs", "rank": 70, "score": 30063.92036704349 }, { "content": "use inkwell::values::BasicValueEnum;\n\nuse crate::error::error_token::{ ErrorToken, FilePosition };\n\nuse crate::ast::CodeGen;\n\n\n\n///\n\n/// `StringObject` is an object which stores a text.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct StringObject {\n\n pos: FilePosition,\n\n text: String\n\n}\n\n\n\nimpl<'ctx> StringObject {\n\n ///\n\n /// Create a `StringObject` instance.\n\n ///\n\n pub fn new(pos: FilePosition, text: String) -> Self {\n\n Self {\n\n pos, text\n", "file_path": "src/kgen/src/ast/exprs/string_object.rs", "rank": 71, "score": 30039.46069982579 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<BasicValueEnum<'ctx>, ErrorToken> {\n\n let global_val = gen.builder.build_global_string_ptr(&self.text, \"\");\n\n\n\n Ok(BasicValueEnum::PointerValue(global_val.as_pointer_value()))\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/exprs/string_object.rs", "rank": 72, "score": 30034.82371893689 }, { "content": "use nom::bytes::complete::escaped;\n\nuse nom::bytes::complete::tag;\n\nuse nom::character::complete::none_of;\n\nuse nom::IResult;\n\nuse nom::sequence::delimited;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::ast::exprs::string_object::StringObject;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::utils::{ get_position, GSError };\n\n\n\n///\n\n/// Parse an expression which is enclosed in paren.\n\n///\n", "file_path": "src/kgen/src/parsers/factors/string_object.rs", "rank": 73, "score": 30032.536755613375 }, { "content": "use inkwell::values::BasicValueEnum;\n\nuse crate::ast::CodeGen;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::error::error_token::{ FilePosition, ErrorToken };\n\nuse crate::resolvers::parameter_resolver::KParameter;\n\n\n\n///\n\n/// `LetObject` is an object which represents a statement with the keyword `#let`.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct LetObject {\n\n pos: FilePosition,\n\n param_name: String,\n\n type_name: String,\n\n expr: Option<EvaluableObject>\n\n}\n\n\n\nimpl<'ctx> LetObject {\n\n ///\n\n /// Create a `LetObject` instance.\n", "file_path": "src/kgen/src/ast/statements/let_object.rs", "rank": 74, "score": 29911.821803586896 }, { "content": " ///\n\n pub fn new(pos: FilePosition, param_name: String, type_name: String, expr: Option<EvaluableObject>) -> Self {\n\n Self {\n\n pos,\n\n param_name,\n\n type_name,\n\n expr\n\n }\n\n }\n\n\n\n ///\n\n /// Allocate memories for storing parameters.\n\n ///\n\n fn allocate_param(&self, gen: &CodeGen<'ctx>) -> Result<KParameter<'ctx>, ErrorToken> {\n\n let type_value = gen.type_resolver.borrow()\n\n .find(&self.type_name)\n\n .ok_or(\n\n ErrorToken::error(\n\n self.pos.clone(),\n\n format!(\"Not found a type named {}\", self.type_name)\n", "file_path": "src/kgen/src/ast/statements/let_object.rs", "rank": 75, "score": 29909.543811208452 }, { "content": "\n\n let mut param_mut = gen.param_resolver.borrow_mut();\n\n param_mut.add(&self.param_name, param);\n\n\n\n if let Some(expr) = &self.expr {\n\n let param = param_mut.find(&self.param_name)\n\n .ok_or(\n\n ErrorToken::error(\n\n self.pos.clone(),\n\n format!(\"An unknown parameter named {} is referenced here\", self.param_name)\n\n )\n\n )?;\n\n\n\n match param.value {\n\n BasicValueEnum::PointerValue(val) => {\n\n gen.builder.build_store(val, expr.codegen(gen)?);\n\n Ok(())\n\n },\n\n _ => {\n\n Err(ErrorToken::error(\n", "file_path": "src/kgen/src/ast/statements/let_object.rs", "rank": 76, "score": 29909.201692530318 }, { "content": " )\n\n )?\n\n .get_type(gen);\n\n\n\n let allocated = gen.builder.build_alloca(\n\n type_value,\n\n &self.param_name\n\n );\n\n\n\n Ok(KParameter {\n\n type_id: self.type_name.clone(),\n\n value: BasicValueEnum::PointerValue(allocated)\n\n })\n\n }\n\n\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<(), ErrorToken> {\n\n let param = self.allocate_param(gen)?;\n", "file_path": "src/kgen/src/ast/statements/let_object.rs", "rank": 77, "score": 29908.35078546529 }, { "content": "use nom::bytes::complete::tag;\n\nuse nom::character::complete::multispace0;\n\nuse nom::character::complete::multispace1;\n\nuse nom::combinator::opt;\n\nuse nom::IResult;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::ast::statements::let_object::LetObject;\n\nuse crate::ast::statements::StatementObject;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::exprs::expr_parser;\n\nuse crate::parsers::utils::{ identifier, get_position, GSError };\n\n\n\n///\n\n/// Parse an assign object. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <assign> ::= \":=\" <expr>\n\n/// ```\n\n///\n", "file_path": "src/kgen/src/parsers/statements/let_object.rs", "rank": 78, "score": 29900.789808640664 }, { "content": " self.pos.clone(),\n\n \"Cannot assign some value to immutable one\".to_string()\n\n ))\n\n }\n\n }\n\n }\n\n else {\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/statements/let_object.rs", "rank": 79, "score": 29896.800133235884 }, { "content": "use super::dictionary::Dictionary;\n\nuse inkwell::values::FunctionValue;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct KFunction<'ctx> {\n\n pub type_id: String,\n\n pub func: FunctionValue<'ctx>\n\n}\n\n\n\nimpl<'ctx> KFunction<'ctx> {\n\n pub fn new(type_id: String, func: FunctionValue<'ctx>) -> KFunction<'ctx> {\n\n KFunction { type_id, func }\n\n }\n\n}\n\n\n\npub type FunctionResolver<'ctx> = Dictionary<KFunction<'ctx>>;\n", "file_path": "src/kgen/src/resolvers/function_resolver.rs", "rank": 86, "score": 29474.775934125773 }, { "content": "use nom::branch::alt;\n\nuse nom::bytes::complete::tag;\n\nuse nom::character::complete::multispace0;\n\nuse nom::combinator::map;\n\nuse nom::combinator::opt;\n\nuse nom::IResult;\n\nuse nom::multi::separated_list0;\n\nuse nom::sequence::delimited;\n\nuse nom::sequence::tuple;\n\nuse crate::ast::functions::FunctionObject;\n\nuse crate::parsers::functions::expr_function::expr_function_parser;\n\nuse crate::parsers::functions::external_function::external_function_parser;\n\nuse crate::parsers::functions::statement_function::statement_function_parser;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::utils::{ identifier, GSError };\n\n\n\n///\n\n/// Parse a function into `FunctionObject`.\n\n///\n", "file_path": "src/kgen/src/parsers/functions/mod.rs", "rank": 89, "score": 29468.981970825716 }, { "content": "use inkwell::context::Context;\n\nuse inkwell::OptimizationLevel;\n\nuse inkwell::execution_engine::JitFunction;\n\nuse crate::ast::CodeGen;\n\nuse crate::parsers::functions::function_parser;\n\nuse crate::parsers::Span;\n\n\n\n///\n\n/// Execute a function Just In Time.\n\n///\n", "file_path": "src/kgen/src/jit/execute_function.rs", "rank": 90, "score": 29468.27838508385 }, { "content": "use inkwell::values::BasicValueEnum;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::error::error_token::{ ErrorToken, FilePosition };\n\nuse crate::ast::CodeGen;\n\n\n\n///\n\n/// `FunctionCallObject` is an object which represents an expression which tries to call a function.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct FunctionCallObject {\n\n pos: FilePosition,\n\n func_name: String,\n\n args: Vec<EvaluableObject>\n\n}\n\n\n\nimpl<'ctx> FunctionCallObject {\n\n ///\n\n /// Create a `FunctionCallObject` instance.\n\n ///\n\n pub fn new(pos: FilePosition, func_name: String, args: Vec<EvaluableObject>) -> Self {\n", "file_path": "src/kgen/src/ast/exprs/function_call_object.rs", "rank": 91, "score": 27885.99818472532 }, { "content": " Self {\n\n pos, func_name, args\n\n }\n\n }\n\n\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n fn get_args(&self, gen: &CodeGen<'ctx>) -> Result<Vec<BasicValueEnum<'ctx>>, ErrorToken> {\n\n let mut values: Vec<BasicValueEnum<'ctx>> = Vec::new();\n\n\n\n for arg in &self.args {\n\n let value = arg.codegen(gen)?;\n\n values.push(value);\n\n };\n\n\n\n Ok(values)\n\n }\n\n\n\n ///\n", "file_path": "src/kgen/src/ast/exprs/function_call_object.rs", "rank": 92, "score": 27883.61702383607 }, { "content": " /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<BasicValueEnum<'ctx>, ErrorToken> {\n\n let func = gen.module.get_function(&self.func_name)\n\n .ok_or(\n\n ErrorToken::error(\n\n self.pos.clone(),\n\n format!(\"Not found a function named {}\", self.func_name)\n\n )\n\n )?;\n\n\n\n let ret_val = gen.builder.build_call(func, self.get_args(gen)?.as_slice(), \"funccall\");\n\n\n\n ret_val.try_as_basic_value()\n\n .left()\n\n .ok_or(\n\n ErrorToken::error(\n\n self.pos.clone(),\n\n \"Not found a value which will be returned.\".to_string()\n\n )\n\n )\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/exprs/function_call_object.rs", "rank": 93, "score": 27878.6572500483 }, { "content": "use nom::bytes::complete::tag;\n\nuse nom::character::complete::multispace0;\n\nuse nom::combinator::map;\n\nuse nom::IResult;\n\nuse nom::multi::separated_list0;\n\nuse nom::sequence::tuple;\n\nuse crate::ast::exprs::EvaluableObject;\n\nuse crate::ast::exprs::function_call_object::FunctionCallObject;\n\nuse crate::parsers::exprs::expr_parser;\n\nuse crate::parsers::Span;\n\nuse crate::parsers::utils::{ identifier, get_position, GSError };\n\n\n\n///\n\n/// Parse a factor which calls a function. Can be written in BNF as follow.\n\n///\n\n/// ```bnf\n\n/// <function_call> ::= .* \"(\" (<expr> (\",\" <expr>)*)* \")\"\n\n/// ```\n\n///\n", "file_path": "src/kgen/src/parsers/factors/function_call_object.rs", "rank": 94, "score": 27868.871254776517 }, { "content": "use std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse clap::App;\n\nuse clap::Arg;\n\nuse clap::ArgMatches;\n\nuse inkwell::context::Context;\n\nuse kgen::ast::CodeGen;\n\nuse kgen::error::error_token::ErrorToken;\n\n\n\npub struct KprcApp {\n\n source_file: String,\n\n output_file: Option<String>,\n\n llvm: bool\n\n}\n\n\n\nimpl KprcApp {\n\n pub fn new(args: &ArgMatches) -> Result<Self, Vec<ErrorToken>> {\n\n let source_file = args.value_of(\"SOURCES\");\n\n let output_file = match args.value_of(\"output\") {\n", "file_path": "src/kpr/src/kprc/mod.rs", "rank": 95, "score": 22.533819553433556 }, { "content": "use inkwell::values::BasicValueEnum;\n\nuse crate::error::error_token::{ ErrorToken, FilePosition };\n\nuse crate::ast::CodeGen;\n\n\n\n///\n\n/// `ParamObject` is an object which represents a parameter.\n\n///\n\n#[derive(Debug,PartialEq)]\n\npub struct ParamObject {\n\n pos: FilePosition,\n\n param_name: String\n\n}\n\n\n\nimpl<'ctx> ParamObject {\n\n ///\n\n /// Create a `ParamObject` instance.\n\n ///\n\n pub fn new(pos: FilePosition, param_name: String) -> Self {\n\n Self {\n\n pos, param_name\n", "file_path": "src/kgen/src/ast/exprs/param_object.rs", "rank": 96, "score": 18.949056761584494 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Generate the part of the abstract syntax tree.\n\n ///\n\n pub fn codegen(&self, gen: &CodeGen<'ctx>) -> Result<BasicValueEnum<'ctx>, ErrorToken> {\n\n let mut resolver = gen.param_resolver.borrow_mut();\n\n let param = resolver.find_mut(&self.param_name)\n\n .ok_or(\n\n ErrorToken::error(\n\n self.pos.clone(),\n\n \"Unknown parameters\".to_string()\n\n )\n\n )?;\n\n let loaded_value = gen.builder.build_load(param.value.into_pointer_value(), \"\");\n\n\n\n Ok(loaded_value)\n\n }\n\n}\n", "file_path": "src/kgen/src/ast/exprs/param_object.rs", "rank": 97, "score": 17.611681246091223 }, { "content": " type_resolver.borrow_mut().init_default_types();\n\n\n\n CodeGen {\n\n context,\n\n module,\n\n builder,\n\n param_resolver,\n\n type_resolver,\n\n function_resolver,\n\n loop_destinations\n\n }\n\n }\n\n\n\n ///\n\n /// Parse a program and catch errors if exists.\n\n ///\n\n pub fn parse(&self, text: &str) -> Result<Vec<FunctionObject>, Vec<ErrorToken>> {\n\n let text = Span::new(text);\n\n let parsed = program_parser(text);\n\n match parsed {\n", "file_path": "src/kgen/src/ast/mod.rs", "rank": 98, "score": 17.294176873315823 }, { "content": "impl fmt::Display for FilePosition {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n if self.file_name == \"__internal\" {\n\n Ok(())\n\n }\n\n else {\n\n write!(f, \"{}:{}:{}\", self.file_name, self.line, self.pos)\n\n }\n\n }\n\n}\n\n\n\nimpl FilePosition {\n\n ///\n\n /// Create a `FilePosition` instance.\n\n ///\n\n pub fn new(file_name: String, line: u64, pos: u64, length: u64) -> Self {\n\n Self {\n\n file_name, line, pos, length\n\n }\n\n }\n", "file_path": "src/kgen/src/error/error_token.rs", "rank": 99, "score": 17.273859884784542 } ]
Rust
async-coap/src/message/write.rs
Luro02/rust-async-coap
6a7b592a23de0c9d86ca399bf40ecfbf0bff6e62
use super::*; pub trait MessageWrite: OptionInsert { fn set_msg_type(&mut self, tt: MsgType); fn set_msg_id(&mut self, msg_id: MsgId); fn set_msg_code(&mut self, code: MsgCode); fn set_msg_token(&mut self, token: MsgToken); fn append_payload_bytes(&mut self, body: &[u8]) -> Result<(), Error>; fn append_payload_string(&mut self, body: &str) -> Result<(), Error> { self.append_payload_bytes(body.as_bytes()) } fn append_payload_u8(&mut self, b: u8) -> Result<(), Error> { self.append_payload_bytes(&[b]) } fn append_payload_char(&mut self, c: char) -> Result<(), Error> { self.append_payload_string(c.encode_utf8(&mut [0; 4])) } fn clear(&mut self); } impl<'a> core::fmt::Write for dyn MessageWrite + 'a { fn write_str(&mut self, s: &str) -> Result<(), core::fmt::Error> { self.append_payload_string(s)?; Ok(()) } fn write_char(&mut self, c: char) -> Result<(), core::fmt::Error> { self.append_payload_char(c)?; Ok(()) } } impl<'a> std::io::Write for dyn MessageWrite + 'a { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.append_payload_bytes(buf) .map(|_| buf.len()) .map_err(|_| std::io::ErrorKind::Other.into()) } fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { self.append_payload_bytes(buf) .map_err(|_| std::io::ErrorKind::Other.into()) } } impl<'a> std::io::Write for BufferMessageEncoder<'a> { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.append_payload_bytes(buf) .map(|_| buf.len()) .map_err(|_| std::io::ErrorKind::Other.into()) } fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { self.append_payload_bytes(buf) .map_err(|_| std::io::ErrorKind::Other.into()) } } impl std::io::Write for VecMessageEncoder { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.append_payload_bytes(buf) .map(|_| buf.len()) .map_err(|_| std::io::ErrorKind::Other.into()) } fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { self.append_payload_bytes(buf) .map_err(|_| std::io::ErrorKind::Other.into()) } }
use super::*; pub trait MessageWrite: OptionInsert { fn set_msg_type(&mut self, tt: MsgType); fn set_msg_id(&mut self, msg_id: MsgId); fn set_msg_code(&mut self, code: MsgCode); fn set_msg_token(&mut self, token: MsgToken); fn append_payload_bytes(&mut self, body: &[u8]) -> Result<(), Error>; fn append_payload_string(&mut self, body: &str) -> Result<(), Error> { self.append_payload_bytes(body
Ok(()) } fn write_char(&mut self, c: char) -> Result<(), core::fmt::Error> { self.append_payload_char(c)?; Ok(()) } } impl<'a> std::io::Write for dyn MessageWrite + 'a { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.append_payload_bytes(buf) .map(|_| buf.len()) .map_err(|_| std::io::ErrorKind::Other.into()) } fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { self.append_payload_bytes(buf) .map_err(|_| std::io::ErrorKind::Other.into()) } } impl<'a> std::io::Write for BufferMessageEncoder<'a> { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.append_payload_bytes(buf) .map(|_| buf.len()) .map_err(|_| std::io::ErrorKind::Other.into()) } fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { self.append_payload_bytes(buf) .map_err(|_| std::io::ErrorKind::Other.into()) } } impl std::io::Write for VecMessageEncoder { fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> { self.append_payload_bytes(buf) .map(|_| buf.len()) .map_err(|_| std::io::ErrorKind::Other.into()) } fn flush(&mut self) -> Result<(), std::io::Error> { Ok(()) } fn write_all(&mut self, buf: &[u8]) -> Result<(), std::io::Error> { self.append_payload_bytes(buf) .map_err(|_| std::io::ErrorKind::Other.into()) } }
.as_bytes()) } fn append_payload_u8(&mut self, b: u8) -> Result<(), Error> { self.append_payload_bytes(&[b]) } fn append_payload_char(&mut self, c: char) -> Result<(), Error> { self.append_payload_string(c.encode_utf8(&mut [0; 4])) } fn clear(&mut self); } impl<'a> core::fmt::Write for dyn MessageWrite + 'a { fn write_str(&mut self, s: &str) -> Result<(), core::fmt::Error> { self.append_payload_string(s)?;
random
[ { "content": "/// Extension trait for option iterators that provide additional convenient accessors.\n\npub trait OptionIteratorExt<'a>: Iterator<Item = Result<(OptionNumber, &'a [u8]), Error>> {\n\n /// Moves the iterator forward until it finds a matching key or the\n\n /// spot where it should have been.\n\n ///\n\n /// If found, returns the option number and a byte slice of the value.\n\n ///\n\n /// Does not consume any options after\n\n /// the matching key.\n\n fn find_next(&mut self, key: OptionNumber) -> Option<Result<(OptionNumber, &'a [u8]), Error>>;\n\n\n\n /// Typed version of [`OptionIteratorExt::find_next`].\n\n ///\n\n /// Moves the iterator forward until it finds a matching key or the\n\n /// spot where it should have been.\n\n ///\n\n /// If found, returns the value of the option key.\n\n ///\n\n /// Does not consume any options after\n\n /// the matching key.\n\n fn find_next_of<T>(&mut self, key: OptionKey<T>) -> Option<Result<T, Error>>\n", "file_path": "async-coap/src/option/iter.rs", "rank": 0, "score": 215218.33753395686 }, { "content": "fn assert_uri_str(uri_str: &str) -> Result<(), Error> {\n\n let captures = RFC3986_APPENDIX_B\n\n .captures(uri_str)\n\n .ok_or(Error::MalformedStructure)?;\n\n\n\n let has_scheme = captures.get(2).is_some();\n\n let has_authority = captures.get(4).is_some();\n\n\n\n if !has_scheme && !has_authority {\n\n return Err(Error::MalformedStructure);\n\n }\n\n\n\n if let Some(scheme) = captures.get(2) {\n\n // Do an additional syntax check on the scheme to make sure it is valid.\n\n URI_CHECK_SCHEME\n\n .captures(scheme.as_str())\n\n .ok_or(Error::MalformedScheme)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 1, "score": 212840.52330881412 }, { "content": "fn assert_uri_ref_str(uri_str: &str) -> Result<(), Error> {\n\n // Not sure what additional checks to do in this case.\n\n RFC3986_APPENDIX_B\n\n .captures(uri_str)\n\n .ok_or(Error::MalformedStructure)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 2, "score": 210039.71010868304 }, { "content": "fn assert_rel_ref_str(uri_str: &str) -> Result<(), Error> {\n\n // We should not be able to parse as a URI.\n\n assert_uri_str(uri_str)\n\n .err()\n\n .map(|_| ())\n\n .ok_or(Error::Degenerate)?;\n\n\n\n // We should be able to parse as a URI-Reference\n\n assert_uri_ref_str(uri_str)\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 3, "score": 210039.71010868304 }, { "content": "/// Trait for `str` adding URI percent encoding/decoding\n\n///\n\n/// See the [module-level](index.html) documentation for more details.\n\n///\n\npub trait StrExt {\n\n /// Gets an iterator that performs general-purpose URI percent-encoding.\n\n ///\n\n /// By default, all characters described by [`IETF-RFC3986`] as `pchar`s will be escaped,\n\n /// which is appropriate for escaping path segments.\n\n /// This behavior can be modified by appending the following modifiers:\n\n ///\n\n /// * [`full()`]: Escapes all characters except those which are `unreserved`.\n\n /// * [`for_query()`]: Escaping appropriate for the query component.\n\n /// * [`for_fragment()`]: Escaping appropriate for the fragment component.\n\n ///\n\n /// The returned iterator will escape ASCII control characters.\n\n ///\n\n /// [`full()`]: struct.EscapeUri#method.full\n\n /// [`for_query()`]: struct.EscapeUri#method.for_query\n\n /// [`for_fragment()`]: struct.EscapeUri#method.for_fragment\n\n fn escape_uri(&self) -> EscapeUri<'_, EscapeUriSegment>;\n\n\n\n /// Gets an iterator that performs URI percent-decoding.\n\n ///\n", "file_path": "async-coap-uri/src/escape/mod.rs", "rank": 4, "score": 153604.57026219182 }, { "content": "/// Encodes an unsigned 32-bit number into the given buffer, returning\n\n/// the resized buffer. The returned buffer may be smaller than the\n\n/// `dst`, and may even be empty. The returned buffer is only as large\n\n/// as it needs to be to represent the given value.\n\npub fn encode_u32(value: u32, dst: &mut [u8]) -> &mut [u8] {\n\n if value == 0 {\n\n &mut []\n\n } else if value <= 0xFF {\n\n dst[0] = value as u8;\n\n &mut dst[..1]\n\n } else if value <= 0xFFFF {\n\n dst[0] = (value >> 8) as u8;\n\n dst[1] = value as u8;\n\n &mut dst[..2]\n\n } else if value <= 0xFFFFFF {\n\n dst[0] = (value >> 16) as u8;\n\n dst[1] = (value >> 8) as u8;\n\n dst[2] = value as u8;\n\n &mut dst[..3]\n\n } else {\n\n dst[0] = (value >> 24) as u8;\n\n dst[1] = (value >> 16) as u8;\n\n dst[2] = (value >> 8) as u8;\n\n dst[3] = value as u8;\n\n &mut dst[..4]\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/util.rs", "rank": 5, "score": 152789.47830551976 }, { "content": "/// Attempts to decode the given little-endian-encoded integer to a `u32`.\n\n/// Input may be up to four bytes long. If the input is larger than four\n\n/// bytes long, returns `None`.\n\npub fn try_decode_u32(src: &[u8]) -> Option<u32> {\n\n match src.len() {\n\n 0 => Some(0u32),\n\n 1 => Some(src[0] as u32),\n\n 2 => Some(((src[0] as u32) << 8) + src[1] as u32),\n\n 3 => Some(((src[0] as u32) << 16) + ((src[1] as u32) << 8) + src[2] as u32),\n\n 4 => Some(\n\n ((src[0] as u32) << 24)\n\n + ((src[1] as u32) << 16)\n\n + ((src[2] as u32) << 8)\n\n + src[3] as u32,\n\n ),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/util.rs", "rank": 6, "score": 147040.21683011408 }, { "content": "/// Attempts to decode the given little-endian-encoded integer to a `u16`.\n\n/// Input may be up to two bytes long. If the input is larger than two\n\n/// bytes long, returns `None`.\n\npub fn try_decode_u16(src: &[u8]) -> Option<u16> {\n\n match src.len() {\n\n 0 => Some(0u16),\n\n 1 => Some(src[0] as u16),\n\n 2 => Some(((src[0] as u16) << 8) + src[1] as u16),\n\n _ => None,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::util::*;\n\n\n\n #[test]\n\n fn encode_decode_u32() {\n\n for i in vec![\n\n 0x00, 0x01, 0x0FF, 0x100, 0x0FFFF, 0x10000, 0x0FFFFFF, 0x1000000, 0xFFFFFFFF,\n\n ] {\n\n assert_eq!(try_decode_u32(encode_u32(i, &mut [0; 4])).unwrap(), i);\n\n }\n", "file_path": "async-coap/src/util.rs", "rank": 7, "score": 147040.2168301141 }, { "content": "/// Trait for types that allow you to insert CoAP options into them.\n\npub trait OptionInsert {\n\n /// Inserts an option into the message with the given bytes as the value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_with_bytes(&mut self, key: OptionNumber, value: &[u8]) -> Result<(), Error>;\n\n\n\n /// Inserts an option into the message with no value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_empty(&mut self, key: OptionNumber) -> Result<(), Error> {\n\n self.insert_option_with_bytes(key, &[])\n\n }\n\n\n\n /// Inserts an option into the message with a string value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_with_str(&mut self, key: OptionNumber, value: &str) -> Result<(), Error> {\n\n self.insert_option_with_bytes(key, value.as_bytes())\n\n }\n\n\n\n /// Inserts an option into the message with an integer value.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option_with_u32(&mut self, key: OptionNumber, value: u32) -> Result<(), Error> {\n\n self.insert_option_with_bytes(key, encode_u32(value, &mut [0; 4]))\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/option/insert.rs", "rank": 8, "score": 140820.40279977222 }, { "content": "#[proc_macro_hack]\n\npub fn assert_uri_literal(input: TokenStream) -> TokenStream {\n\n let uri_str = string_literal_from_token_stream(input);\n\n\n\n if let Some(err_pos) = UnescapeUri::new(&uri_str).first_error() {\n\n panic!(\"Malformed percent encoding at index {}\", err_pos);\n\n }\n\n\n\n if let Err(err) = assert_uri_str(&uri_str) {\n\n panic!(\"Malformed uri literal; {:?}\", err);\n\n }\n\n\n\n let gen = quote! { () };\n\n gen.into()\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 9, "score": 136111.31117951008 }, { "content": "#[proc_macro_hack]\n\npub fn assert_uri_ref_literal(input: TokenStream) -> TokenStream {\n\n let uri_str = string_literal_from_token_stream(input);\n\n\n\n if let Some(err_pos) = UnescapeUri::new(&uri_str).first_error() {\n\n panic!(\"Malformed percent encoding at index {}\", err_pos);\n\n }\n\n\n\n if let Err(err) = assert_uri_ref_str(&uri_str) {\n\n panic!(\"Malformed uri_ref literal; {:?}\", err);\n\n }\n\n\n\n let gen = quote! { () };\n\n gen.into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn check_uri_str(uri_str: &str) -> Result<(), Error> {\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 10, "score": 134044.02787539476 }, { "content": "#[proc_macro_hack]\n\npub fn assert_rel_ref_literal(input: TokenStream) -> TokenStream {\n\n let uri_str = string_literal_from_token_stream(input);\n\n\n\n if let Some(err_pos) = UnescapeUri::new(&uri_str).first_error() {\n\n panic!(\"Malformed percent encoding at index {}\", err_pos);\n\n }\n\n\n\n if let Err(err) = assert_rel_ref_str(&uri_str) {\n\n panic!(\"Malformed rel_ref literal; {:?}\", err);\n\n }\n\n\n\n let gen = quote! { () };\n\n gen.into()\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 11, "score": 134044.02787539476 }, { "content": "/// A flavor of `std::net::ToSocketAddrs` that allows the implementation of\n\n/// `SocketAddr` to be replaced.\n\n///\n\n/// This is necessary to enable support for things like\n\n/// CoAP-over-SMS, where socket addresses are telephone numbers.\n\npub trait ToSocketAddrs {\n\n /// Analogous to [`std::net::ToSocketAddrs::Iter`]\n\n type Iter: Iterator<Item = Self::SocketAddr>;\n\n\n\n /// The `SocketAddr` type returned by the above iterator.\n\n type SocketAddr: SocketAddrExt + Copy;\n\n\n\n /// The error type to use for errors while resolving.\n\n type Error: core::fmt::Debug;\n\n\n\n /// Analogous to [`std::net::ToSocketAddrs::to_socket_addrs`]\n\n fn to_socket_addrs(&self) -> Result<Self::Iter, Self::Error>;\n\n}\n\n\n\n/// Blanket implementation of `ToSocketAddrs` for all implementations of `std::net::ToSocketAddrs`.\n\n#[cfg(feature = \"std\")]\n\nimpl<T, I> ToSocketAddrs for T\n\nwhere\n\n T: std::net::ToSocketAddrs<Iter = I>,\n\n I: Iterator<Item = std::net::SocketAddr>,\n\n{\n\n type Iter = I;\n\n type SocketAddr = std::net::SocketAddr;\n\n type Error = std::io::Error;\n\n\n\n fn to_socket_addrs(&self) -> Result<Self::Iter, Self::Error> {\n\n std::net::ToSocketAddrs::to_socket_addrs(self)\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/socketaddr.rs", "rank": 13, "score": 120211.46515447646 }, { "content": "/// An object that represents a remote CoAP endpoint with a default, overridable path.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// #\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint,AllowStdUdpSocket};\n\n/// #\n\n/// # // Create our asynchronous socket. In this case, it is just an\n\n/// # // (inefficient) wrapper around the standard rust `UdpSocket`,\n\n/// # // but that is quite adequate in this case.\n\n/// # let socket = AllowStdUdpSocket::bind(\"[::]:0\").expect(\"UDP bind failed\");\n\n/// #\n\n/// # // Create a new local endpoint from the socket we just created,\n\n/// # // wrapping it in a `Arc<>` to ensure it can live long enough.\n\n/// # let local_endpoint = Arc::new(DatagramLocalEndpoint::new(socket));\n\n/// #\n\n/// # // Create a local execution pool for running our local endpoint.\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # // Add our local endpoint to the pool, so that it\n\n/// # // can receive packets.\n\n/// # pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(null_receiver!())\n\n/// # .map(|err| panic!(\"Receive loop terminated: {}\", err))\n\n/// # );\n\n/// #\n\n/// # let future = async move {\n\n/// // Create a remote endpoint instance to represent the\n\n/// // device we wish to interact with.\n\n/// let remote_endpoint = local_endpoint\n\n/// .remote_endpoint_from_uri(uri!(\"coap://coap.me\"))\n\n/// .unwrap(); // Will only fail if the URI scheme or authority is unrecognizable\n\n///\n\n/// // Create a future that sends a request to a specific path\n\n/// // on the remote endpoint, collecting any blocks in the response\n\n/// // and returning `Ok(OwnedImmutableMessage)` upon success.\n\n/// let future = remote_endpoint.send_to(\n\n/// rel_ref!(\"large\"),\n\n/// CoapRequest::get() // This is a CoAP GET request\n\n/// .accept(ContentFormat::TEXT_PLAIN_UTF8) // We only want plaintext\n\n/// .block2(Some(Default::default())) // Enable block2 processing\n\n/// .emit_successful_collected_response() // Collect all blocks into a single message\n\n/// );\n\n///\n\n/// // Wait for the final result and print it.\n\n/// println!(\"result: {:?}\", future.await.unwrap());\n\n/// # };\n\n/// #\n\n/// # pool.run_until(future);\n\n/// ```\n\n///\n\npub trait RemoteEndpoint {\n\n /// The `SocketAddr` type to use with this local endpoint. This is usually\n\n /// simply `std::net::SocketAddr`, but may be different in some cases (like for CoAP-SMS\n\n /// endpoints).\n\n type SocketAddr: SocketAddrExt;\n\n\n\n /// Type used by closure that is passed into `send()`, representing the context for the\n\n /// response.\n\n type InboundContext: InboundContext<SocketAddr = Self::SocketAddr>;\n\n\n\n /// Returns a [`UriBuf`] describing the underlying destination of this remote endpoint.\n\n fn uri(&self) -> UriBuf;\n\n\n\n /// Returns a string slice containing the scheme for this `RemoteEndpoint`.\n\n fn scheme(&self) -> &'static str;\n\n\n\n /// Prevents this remote endpoint from including a `Uri-Host` option.\n\n fn remove_host_option(&mut self);\n\n\n\n /// Creates a clone of this `RemoteEndpoint` with a different relative path.\n", "file_path": "async-coap/src/remote_endpoint.rs", "rank": 14, "score": 118018.39392773343 }, { "content": "/// Trait for reading the various parts of a CoAP message.\n\npub trait MessageRead {\n\n /// Gets the message code for this message.\n\n fn msg_code(&self) -> MsgCode;\n\n\n\n /// Gets the message type for this message.\n\n fn msg_type(&self) -> MsgType;\n\n\n\n /// Gets the message id for this message.\n\n fn msg_id(&self) -> MsgId;\n\n\n\n /// Gets the message token for this message.\n\n fn msg_token(&self) -> MsgToken;\n\n\n\n /// Gets the payload as a byte slice.\n\n fn payload(&self) -> &[u8];\n\n\n\n /// Gets an iterator for processing the options of the message.\n\n fn options(&self) -> OptionIterator<'_>;\n\n\n\n /// Writes this message to the given `target` that implements [`MessageWrite`].\n", "file_path": "async-coap/src/message/read.rs", "rank": 15, "score": 118017.35597686403 }, { "content": "/// Extension trait for `SocketAddr` types that allows the local endpoint get the information\n\n/// it needs.\n\npub trait SocketAddrExt:\n\n Sized + ToSocketAddrs + Copy + core::fmt::Display + core::fmt::Debug + Send + Eq + Hash\n\n{\n\n /// Determines if the address in this `SocketAddr` is a multicast/broadcast address or not.\n\n fn is_multicast(&self) -> bool;\n\n\n\n /// Returns the port number for this socket.\n\n ///\n\n /// A value of zero indicates no specific value.\n\n fn port(&self) -> u16;\n\n\n\n /// Returns a version of this socket address that conforms to the address type of `local`,\n\n /// or `None` if such a conversion is not possible.\n\n ///\n\n /// This method is useful in mixed ipv6/ipv4 environments.\n\n #[allow(unused_variables)]\n\n fn conforming_to(&self, local: Self) -> Option<Self> {\n\n Some(*self)\n\n }\n\n\n", "file_path": "async-coap/src/socketaddr.rs", "rank": 16, "score": 118017.16582819614 }, { "content": "/// Encodes an option into the given buffer, including the value.\n\npub fn encode_option(\n\n buffer: &mut [u8],\n\n prev_key: OptionNumber,\n\n key: OptionNumber,\n\n value: &[u8],\n\n) -> Result<usize, Error> {\n\n let option_len = encode_option_without_value(buffer, prev_key, key, value.len())?;\n\n\n\n // The value bytes are always at the end.\n\n buffer[option_len - value.len()..option_len].copy_from_slice(value);\n\n\n\n return Ok(option_len);\n\n}\n\n\n", "file_path": "async-coap/src/message/codec.rs", "rank": 17, "score": 116636.75861572474 }, { "content": "/// Inserts an option into an option list. Very slow unless called sequentially.\n\npub fn insert_option(\n\n buffer: &mut [u8],\n\n mut len: usize,\n\n last_option: OptionNumber,\n\n key: OptionNumber,\n\n value: &[u8],\n\n) -> Result<(usize, OptionNumber), Error> {\n\n if value.len() > MAX_OPTION_VALUE_SIZE {\n\n return Err(Error::InvalidArgument);\n\n }\n\n\n\n if key >= last_option {\n\n // This is the easy case: A simple append is adequate.\n\n len += encode_option(&mut buffer[len..], last_option, key, value)?;\n\n return Ok((len, key));\n\n }\n\n\n\n // What follows will only happen if this method is called with a property key\n\n // out-of-order. Hopefully this should only happen rarely, as there is a\n\n // significant performance penalty for doing so. This approach does have a\n", "file_path": "async-coap/src/message/codec.rs", "rank": 18, "score": 116636.75861572474 }, { "content": "/// Trait for objects that represent logical URI-references. Useful for generic programming.\n\n///\n\npub trait AnyUriRef {\n\n /// Returns a `UriRawComponents` instance which contains all of the components for this\n\n /// URI reference.\n\n ///\n\n /// This is the only method that is required to be implemented---all other methods have\n\n /// defaults in place which use this method, but they may be inefficient.\n\n #[must_use]\n\n fn components(&self) -> UriRawComponents<'_>;\n\n\n\n /// Returns true if the underlying URI-reference is actually the empty reference.\n\n #[must_use]\n\n fn is_empty(&self) -> bool {\n\n self.components().is_empty()\n\n }\n\n\n\n /// Gets the [`UriType`] of the underlying URI-reference.\n\n ///\n\n /// [`UriType`]: enum.UriType.html\n\n #[must_use]\n\n fn uri_type(&self) -> UriType {\n", "file_path": "async-coap-uri/src/any_uri_ref.rs", "rank": 19, "score": 115941.48874253237 }, { "content": "/// Extension class for additional helper methods for `OptionInsertExt`.\n\npub trait OptionInsertExt {\n\n /// Inserts an option into the message with a value of the appropriate type.\n\n /// Calling this method with out-of-order keys will incur a significant performance penalty.\n\n fn insert_option<'a, T>(&mut self, key: OptionKey<T>, value: T) -> Result<(), Error>\n\n where\n\n T: Into<OptionValue<'a>>;\n\n}\n\n\n\nimpl<O> OptionInsertExt for O\n\nwhere\n\n O: OptionInsert + ?Sized,\n\n{\n\n fn insert_option<'a, T>(&mut self, key: OptionKey<T>, value: T) -> Result<(), Error>\n\n where\n\n T: Into<OptionValue<'a>>,\n\n {\n\n match value.into() {\n\n OptionValue::Integer(x) => self.insert_option_with_u32(key.0, x),\n\n OptionValue::Bytes(x) => self.insert_option_with_bytes(key.0, x),\n\n OptionValue::ETag(x) => self.insert_option_with_bytes(key.0, x.as_bytes()),\n\n }\n\n }\n\n}\n", "file_path": "async-coap/src/option/insert.rs", "rank": 20, "score": 115932.06731681112 }, { "content": "/// A trait for asynchronous datagram sockets.\n\n///\n\n/// This is an empty convenience trait that requires several additional traits to be implemented:\n\n/// [`DatagramSocketTypes`], [`AsyncSendTo`], [`AsyncRecvFrom`], [`MulticastSocket`],\n\n/// and [`Send`]+[`Sync`].\n\n///\n\n/// Implementations of this trait can be used with [`DatagramLocalEndpoint`].\n\npub trait AsyncDatagramSocket:\n\n DatagramSocketTypes + AsyncSendTo + AsyncRecvFrom + MulticastSocket + Send + Sync\n\n{\n\n}\n\n\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 21, "score": 113970.68966389743 }, { "content": "/// Marker trait for identifying that this `SendDesc` is for *multicast* requests.\n\n/// Also contains multicast-specific extensions.\n\npub trait SendDescMulticast {}\n\n\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 22, "score": 113965.74230536743 }, { "content": "/// Marker trait for identifying that this `SendDesc` is for *unicast* requests.\n\n/// Also contains unicast-specific combinators, such as [`block2()`][SendDescUnicast::block2].\n\npub trait SendDescUnicast {\n\n /// Returns a send descriptor that will perform Block2 processing.\n\n ///\n\n /// Note that just adding this to your send descriptor chain alone is unlikely to do what\n\n /// you want. You've got three options:\n\n ///\n\n /// * Add a call to [`emit_successful_collected_response`][UnicastBlock2::emit_successful_collected_response]\n\n /// immediately after the call to this method. This will cause the message to be reconstructed from the blocks\n\n /// and returned as a value from the future from `send`. You can optionally add an\n\n /// [`inspect`][SendDescExt::inspect] combinator to get some feedback as the message is being\n\n /// reconstructed from all of the individual block messages.\n\n /// * Add a call to [`emit_successful_response`][SendDescExt::emit_successful_response] along\n\n /// with using `send_to_stream` instead of `send`. This will give you a `Stream` that will\n\n /// contain all of the individual block messages in the stream.\n\n /// * [Add your own handler][SendDescExt::use_handler] to do whatever you need to do, returning\n\n /// `ResponseStatus::SendNext` until all of the blocks have been received. This is\n\n /// useful if you want to avoid memory allocation.\n\n ///\n\n /// There may be other valid combinations of combinators, depending on what you are trying\n\n /// to do.\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 23, "score": 113965.52833099998 }, { "content": "/// Decodes one option from a `core::slice::Iter`, which can be obtained from a byte slice.\n\n/// The iterator is then advanced to the next option.\n\n///\n\n/// Will return `Ok(None)` if it either encounters the end-of-options marker (0xFF) or if the\n\n/// given iterator has been fully consumed.\n\npub fn decode_option<'a>(\n\n iter: &mut core::slice::Iter<'a, u8>,\n\n last_option: OptionNumber,\n\n) -> Result<Option<(OptionNumber, &'a [u8])>, Error> {\n\n // TODO(#5): Improve performance.\n\n macro_rules! try_next {\n\n ($iter:expr, $none:expr) => {\n\n match ($iter).next() {\n\n Some(x) => *x,\n\n None => return $none,\n\n }\n\n };\n\n }\n\n\n\n let header: u8 = try_next!(iter, Ok(None));\n\n\n\n if header == 0xFF {\n\n // End of options marker.\n\n return Ok(None);\n\n }\n", "file_path": "async-coap/src/message/codec.rs", "rank": 24, "score": 112604.22748442713 }, { "content": "/// Encodes all parts of an option into the given buffer *except* the value. All other parts,\n\n/// including the value length, are encoded. This is typically used directly when inserting\n\n/// options, otherwise `encode_option()` (which writes the value) is typically a better fit.\n\npub fn encode_option_without_value(\n\n buffer: &mut [u8],\n\n prev_key: OptionNumber,\n\n key: OptionNumber,\n\n value_len: usize,\n\n) -> Result<usize, Error> {\n\n if prev_key > key {\n\n return Err(Error::InvalidArgument);\n\n }\n\n\n\n let calc_len = calc_option_size(prev_key, key, value_len);\n\n if calc_len > buffer.len() {\n\n eprintln!(\"calc_len:{}, blen:{}\", calc_len, buffer.len());\n\n return Err(Error::OutOfSpace);\n\n }\n\n\n\n if value_len > MAX_OPTION_VALUE_SIZE {\n\n eprintln!(\"value_len:{}, max:{}\", value_len, MAX_OPTION_VALUE_SIZE);\n\n return Err(Error::InvalidArgument);\n\n }\n", "file_path": "async-coap/src/message/codec.rs", "rank": 25, "score": 112593.33403264932 }, { "content": "/// Trait representing a local (as opposed to remote) CoAP endpoint. Allows for sending and\n\n/// receiving CoAP requests.\n\n///\n\n/// # Implementations\n\n///\n\n/// `LocalEndpoint` is a trait, which allows for multiple back-end implementations.\n\n/// `async-coap` comes with two: [`NullLocalEndpoint`] and [`DatagramLocalEndpoint`].\n\n///\n\n/// [`NullLocalEndpoint`] does what you might expect: nothing. Attempts to send\n\n/// requests always results in [`Error::ResponseTimeout`] and [`LocalEndpoint::receive`]\n\n/// will block indefinitely. Creating an instance of it is quite straightforward:\n\n///\n\n/// [`NullLocalEndpoint`]: crate::null::NullLocalEndpoint\n\n/// [`DatagramLocalEndpoint`]: crate::datagram::DatagramLocalEndpoint\n\n///\n\n/// ```\n\n/// use std::sync::Arc;\n\n/// use async_coap::null::NullLocalEndpoint;\n\n///\n\n/// let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// ```\n\n///\n\n/// If you want to do something more useful, then [`DatagramLocalEndpoint`] is likely\n\n/// what you are looking for. It takes an instance of [`AsyncDatagramSocket`] at construction:\n\n///\n\n/// [`AsyncDatagramSocket`]: crate::datagram::AsyncDatagramSocket\n\n///\n\n/// ```\n\n/// use std::sync::Arc;\n\n/// use async_coap::prelude::*;\n\n/// use async_coap::datagram::{DatagramLocalEndpoint,AllowStdUdpSocket};\n\n///\n\n/// // `AllowStdUdpSocket`, which is a (inefficient) wrapper around the\n\n/// // standard rust `UdpSocket`. It is convenient for testing and for examples\n\n/// // but should not be used in production code.\n\n/// let socket = AllowStdUdpSocket::bind(\"[::]:0\").expect(\"UDP bind failed\");\n\n///\n\n/// // Create a new local endpoint from the socket instance we just created,\n\n/// // wrapping it in a `Arc<>` to ensure it can live long enough.\n\n/// let local_endpoint = Arc::new(DatagramLocalEndpoint::new(socket));\n\n/// ```\n\n///\n\n/// # Client Usage\n\n///\n\n/// Before you can start sending requests and receiving responses, you\n\n/// will need to make sure that the [`LocalEndpoint::receive`] method\n\n/// gets called repeatedly. The easiest way to do that is to add the\n\n/// [`std::future::Future`] returned by [`LocalEndpointExt::receive_loop_arc`]\n\n/// to an execution pool:\n\n///\n\n/// ```\n\n/// # use std::sync::Arc;\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// #\n\n/// # let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// #\n\n/// use futures::{prelude::*,executor::ThreadPool,task::Spawn,task::SpawnExt};\n\n///\n\n/// let mut pool = ThreadPool::new().expect(\"Unable to create thread pool\");\n\n///\n\n/// // We use a receiver handler of `null_receiver!()` because this instance\n\n/// // will be used purely as a client, not a server.\n\n/// pool.spawn(local_endpoint\n\n/// .clone()\n\n/// .receive_loop_arc(null_receiver!())\n\n/// .map(|_|unreachable!())\n\n/// );\n\n/// ```\n\n///\n\n/// Once the `Arc<LocalEndpint>` has been added to an execution pool, the `run_until` method\n\n/// on the pool can be used to block execution of the futures emitted by `LocalEndpoint`:\n\n///\n\n/// ```\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// #\n\n/// # // Using a NullLocalEndpoint since this is just a simple usage example.\n\n/// # let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// # let mut local_pool = LocalPool::new();\n\n/// #\n\n/// # local_pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(null_receiver!())\n\n/// # .map(|_|unreachable!())\n\n/// # );\n\n///\n\n/// let result = local_pool.run_until(\n\n/// local_endpoint.send(\n\n/// \"coap.me:5683\",\n\n/// CoapRequest::get() // This is a CoAP GET request\n\n/// .emit_any_response() // Return the first response we get\n\n/// )\n\n/// );\n\n///\n\n/// println!(\"result: {:?}\", result);\n\n/// ```\n\n///\n\n/// Or, more naturally, the returned futures can be used directly in `async` blocks:\n\n///\n\n/// ```\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::prelude::*;\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// #\n\n/// # // Using a NullLocalEndpoint since this is just a simple usage example.\n\n/// # let local_endpoint = Arc::new(NullLocalEndpoint);\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(null_receiver!())\n\n/// # .map(|_|unreachable!())\n\n/// # );\n\n/// #\n\n/// # let future =\n\n/// async move {\n\n/// let future = local_endpoint.send(\n\n/// \"coap.me:5683\",\n\n/// CoapRequest::get() // This is a CoAP GET request\n\n/// .emit_any_response() // Return the first response we get\n\n/// );\n\n///\n\n/// // Wait for the final result and print it.\n\n/// println!(\"result: {:?}\", future.await);\n\n/// }\n\n/// # ;\n\n/// #\n\n/// # pool.run_until(future);\n\n/// ```\n\n///\n\n/// # Server Usage\n\n///\n\n/// In order to serve resources for other devices to interact with, you will\n\n/// need to replace the [`null_receiver!`] we were using earlier with something\n\n/// more substantial. The method takes a closure as an argument, and the closure\n\n/// itself has a single argument: a borrowed [`RespondableInboundContext`].\n\n///\n\n/// For example, to have our server return a response for a request instead of\n\n/// just returning an error, we could use the following function as our receive handler:\n\n///\n\n/// ```\n\n/// use async_coap::prelude::*;\n\n/// use async_coap::{RespondableInboundContext, Error};\n\n///\n\n/// fn receive_handler<T: RespondableInboundContext>(context: &T) -> Result<(),Error> {\n\n/// context.respond(|msg_out|{\n\n/// msg_out.set_msg_code(MsgCode::SuccessContent);\n\n/// msg_out.insert_option(option::CONTENT_FORMAT, ContentFormat::TEXT_PLAIN_UTF8)?;\n\n/// msg_out.append_payload_string(\"Successfully fetched!\")?;\n\n/// Ok(())\n\n/// })?;\n\n/// Ok(())\n\n/// }\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket, LoopbackSocketAddr};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// # use async_coap::message::MessageRead;\n\n/// #\n\n/// # let local_endpoint = Arc::new(DatagramLocalEndpoint::new(LoopbackSocket::new()));\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # pool.spawner().spawn_local(local_endpoint.clone().receive_loop_arc(receive_handler).map(|_|unreachable!()));\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::get() // This is a CoAP GET request\n\n/// # .emit_any_response() // Return the first response we get\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # let result = result.unwrap();\n\n/// # assert_eq!(result.msg_code(), MsgCode::SuccessContent);\n\n/// # assert_eq!(result.msg_type(), MsgType::Ack);\n\n/// ```\n\n///\n\n/// However, that's actually not super useful: it returns a successful result for\n\n/// every possible request: including bogus ones. Let's say that we wanted to expose a\n\n/// resource that lives at \"`/test`\" on our server, returning a [`4.04 Not Found`](MsgCode::ClientErrorNotFound)\n\n/// for every other request. That might look something like this:\n\n///\n\n/// ```\n\n/// use async_coap::prelude::*;\n\n/// use async_coap::{RespondableInboundContext, Error, LinkFormatWrite, LINK_ATTR_TITLE};\n\n/// use core::fmt::Write; // For `write!()`\n\n/// use core::borrow::Borrow;\n\n/// use option::CONTENT_FORMAT;\n\n///\n\n/// fn receive_handler<T: RespondableInboundContext>(context: &T) -> Result<(),Error> {\n\n/// let msg = context.message();\n\n/// let uri = msg.options().extract_uri()?;\n\n/// let decoded_path = uri.raw_path().unescape_uri().skip_slashes().to_cow();\n\n///\n\n/// match (msg.msg_code(), decoded_path.borrow()) {\n\n/// // Handle GET /test\n\n/// (MsgCode::MethodGet, \"test\") => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::SuccessContent);\n\n/// msg_out.insert_option(CONTENT_FORMAT, ContentFormat::TEXT_PLAIN_UTF8);\n\n/// write!(msg_out,\"Successfully fetched {:?}!\", uri.as_str())?;\n\n/// Ok(())\n\n/// }),\n\n///\n\n/// // Handle GET /.well-known/core, for service discovery.\n\n/// (MsgCode::MethodGet, \".well-known/core\") => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::SuccessContent);\n\n/// msg_out.insert_option(CONTENT_FORMAT, ContentFormat::APPLICATION_LINK_FORMAT);\n\n/// LinkFormatWrite::new(msg_out)\n\n/// .link(uri_ref!(\"/test\"))\n\n/// .attr(LINK_ATTR_TITLE, \"Test Resource\")\n\n/// .finish()?;\n\n/// Ok(())\n\n/// }),\n\n///\n\n/// // Handle unsupported methods\n\n/// (_, \"test\") | (_, \".well-known/core\") => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::ClientErrorMethodNotAllowed);\n\n/// write!(msg_out,\"Method \\\"{:?}\\\" Not Allowed\", msg.msg_code())?;\n\n/// Ok(())\n\n/// }),\n\n///\n\n/// // Everything else is a 4.04\n\n/// (_, _) => context.respond(|msg_out| {\n\n/// msg_out.set_msg_code(MsgCode::ClientErrorNotFound);\n\n/// write!(msg_out,\"{:?} Not Found\", uri.as_str())?;\n\n/// Ok(())\n\n/// }),\n\n/// }\n\n/// }\n\n/// # use std::sync::Arc;\n\n/// # use futures::{prelude::*,executor::LocalPool,task::LocalSpawnExt};\n\n/// # use async_coap::datagram::{DatagramLocalEndpoint, AllowStdUdpSocket, LoopbackSocket, LoopbackSocketAddr};\n\n/// # use async_coap::null::NullLocalEndpoint;\n\n/// # use async_coap::message::MessageRead;\n\n/// # use std::borrow::Cow;\n\n/// #\n\n/// # let local_endpoint = Arc::new(DatagramLocalEndpoint::new(LoopbackSocket::new()));\n\n/// # let mut pool = LocalPool::new();\n\n/// #\n\n/// # pool.spawner().spawn_local(local_endpoint\n\n/// # .clone()\n\n/// # .receive_loop_arc(receive_handler)\n\n/// # .map(|_|unreachable!())\n\n/// # );\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::get() // This is a CoAP GET request\n\n/// # .uri_host_path(None, rel_ref!(\"test\")) // Add a path to the message\n\n/// # .emit_any_response() // Return the first response we get\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # let result = result.unwrap();\n\n/// # assert_eq!(result.msg_code(), MsgCode::SuccessContent);\n\n/// # assert_eq!(result.msg_type(), MsgType::Ack);\n\n/// #\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::post() // This is a CoAP POST request\n\n/// # .uri_host_path(None, rel_ref!(\"test\")) // Add a path to the message\n\n/// # .emit_successful_response() // Return the first successful response we get\n\n/// # .inspect(|cx| {\n\n/// # // Inspect here since we currently can't do\n\n/// # // a detailed check in the return value.\n\n/// # assert_eq!(cx.message().msg_code(), MsgCode::ClientErrorMethodNotAllowed);\n\n/// # assert_eq!(cx.message().msg_type(), MsgType::Ack);\n\n/// # })\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # assert_eq!(result.err(), Some(Error::ClientRequestError));\n\n/// #\n\n/// # let result = pool.run_until(\n\n/// # local_endpoint.send(\n\n/// # LoopbackSocketAddr::Unicast,\n\n/// # CoapRequest::get() // This is a CoAP GET request\n\n/// # .emit_successful_response() // Return the first successful response we get\n\n/// # .uri_host_path(None, rel_ref!(\"/foobar\"))\n\n/// # .inspect(|cx| {\n\n/// # // Inspect here since we currently can't do\n\n/// # // a detailed check in the return value.\n\n/// # assert_eq!(cx.message().msg_code(), MsgCode::ClientErrorNotFound);\n\n/// # assert_eq!(cx.message().msg_type(), MsgType::Ack);\n\n/// # })\n\n/// # )\n\n/// # );\n\n/// # println!(\"result: {:?}\", result);\n\n/// # assert_eq!(result.err(), Some(Error::ResourceNotFound));\n\n/// ```\n\n///\n\npub trait LocalEndpoint: Sized {\n\n /// The `SocketAddr` type to use with this local endpoint. This is usually\n\n /// simply `std::net::SocketAddr`, but may be different in some cases (like for CoAP-SMS\n\n /// endpoints).\n\n type SocketAddr: SocketAddrExt\n\n + ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::SocketError>;\n\n\n\n /// The error type associated with errors generated by socket and address-lookup operations.\n\n /// Typically, this is `std::io::Error`, but it may be different if `Self::SocketAddr` isn't\n\n /// `std::net::SocketAddr`.\n\n type SocketError: core::fmt::Debug;\n\n\n\n /// The trait representing the default transmission parameters to use.\n\n type DefaultTransParams: TransParams;\n\n\n\n /// Type used by closure that is passed into `send()`, representing the context for the\n\n /// response.\n\n type InboundContext: InboundContext<SocketAddr = Self::SocketAddr>;\n\n\n\n /// Type used by closure that is passed into `receive()`, representing the context for\n", "file_path": "async-coap/src/local_endpoint.rs", "rank": 26, "score": 111785.42660817702 }, { "content": "/// Represents the context for processing an inbound message.\n\npub trait InboundContext: Send {\n\n /// The `SocketAddr` type from the associated `LocalEndpoint`.\n\n type SocketAddr: SocketAddrExt;\n\n\n\n /// Returns a copy of the remote address of the inbound message.\n\n fn remote_socket_addr(&self) -> Self::SocketAddr;\n\n\n\n /// Indicates if the endpoint thinks this message is a duplicate. This is used\n\n /// for non-idempotent methods (like POST) to determine if the operation should\n\n /// have real effects or if it should just go through the motions without changing\n\n /// state. Duplicates are generally only passed through when the underlying transport\n\n /// doesn't support support storing sent replies for this purpose.\n\n fn is_dupe(&self) -> bool;\n\n\n\n /// Returns a reference to a MessageRead trait to inspect the content\n\n /// of the inbound message.\n\n fn message(&self) -> &dyn MessageRead;\n\n}\n\n\n", "file_path": "async-coap/src/inbound_context.rs", "rank": 27, "score": 111767.26285535688 }, { "content": "/// A convenience trait for `Arc<>` that makes it easier to construct `ArcGuard<>` instances.\n\n///\n\n/// See [Module Documentation](index.html) for more information.\n\npub trait ArcGuardExt<RC> {\n\n /// Convenience method for constructing `ArcGuard<>` instances.\n\n ///\n\n /// See [Module Documentation](index.html) for more information.\n\n fn guard<'head, F, T>(&self, getter: F) -> ArcGuard<RC, T>\n\n where\n\n F: FnOnce(&'head RC) -> T,\n\n RC: 'head,\n\n T: 'head;\n\n}\n\n\n\nimpl<RC> ArcGuardExt<RC> for Arc<RC> {\n\n fn guard<'head, F, T>(&self, getter: F) -> ArcGuard<RC, T>\n\n where\n\n F: FnOnce(&'head RC) -> T,\n\n RC: 'head,\n\n T: 'head,\n\n {\n\n ArcGuard::new(self.clone(), getter)\n\n }\n\n}\n", "file_path": "async-coap/src/arc_guard.rs", "rank": 28, "score": 109800.80707709398 }, { "content": "/// Trait implemented by a \"socket\" that describes the underlying `SocketAddr` and socket error\n\n/// types as associated types.\n\npub trait DatagramSocketTypes: Unpin {\n\n /// The \"`SocketAddr`\" type used by this \"socket\". Typically [`std::net::SocketAddr`].\n\n type SocketAddr: SocketAddrExt\n\n + core::fmt::Display\n\n + core::fmt::Debug\n\n + std::string::ToString\n\n + ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::Error>\n\n + Send\n\n + Unpin\n\n + Copy;\n\n\n\n /// The error type for errors emitted from this socket. Typically [`std::io::Error`].\n\n type Error: std::fmt::Display + std::fmt::Debug;\n\n\n\n /// Returns the local `SocketAddr` of this \"socket\".\n\n fn local_addr(&self) -> Result<Self::SocketAddr, Self::Error>;\n\n\n\n /// Performs a blocking hostname lookup.\n\n fn lookup_host(\n\n host: &str,\n\n port: u16,\n\n ) -> Result<std::vec::IntoIter<Self::SocketAddr>, Self::Error>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 29, "score": 107935.3795820543 }, { "content": "/// Extension trait which implements additional helper methods.\n\npub trait RemoteEndpointExt: RemoteEndpoint {\n\n /// Sends an application-level ping to to one or more addresses specified by `dest`.\n\n /// The first response received causes the future to emit `Ok(())`.\n\n fn ping(&self) -> BoxFuture<'_, Result<(), Error>> {\n\n self.send(Ping::new())\n\n }\n\n\n\n /// Analogous to [`LocalEndpointExt::send_as_stream`], except using this `RemoteEndpoint` for\n\n /// the destination SocketAddr and path.\n\n fn send_as_stream<'a, R, SD>(&'a self, send_desc: SD) -> SendAsStream<'a, R>\n\n where\n\n SD: SendDesc<Self::InboundContext, R> + 'a,\n\n R: Send + 'a,\n\n {\n\n let (sender, receiver) = futures::channel::mpsc::channel::<Result<R, Error>>(10);\n\n\n\n SendAsStream {\n\n receiver,\n\n send_future: self.send(SendAsStreamDesc::new(send_desc, sender)),\n\n }\n", "file_path": "async-coap/src/remote_endpoint.rs", "rank": 30, "score": 107931.1334121429 }, { "content": "/// Extension trait for [`LocalEndpoint`] which implements additional helper methods.\n\npub trait LocalEndpointExt: LocalEndpoint {\n\n /// Sends a message where multiple responses are expected, returned as a [`SendAsStream`].\n\n ///\n\n /// In this version of [`LocalEndpoint::send`], the `send_desc` can return\n\n /// [`ResponseStatus::Done`] from its handler multiple times, with the results being emitted\n\n /// from the returned [`SendAsStream`].\n\n ///\n\n /// The stream can be cleanly ended by the handler eventually returning\n\n /// [`Error::ResponseTimeout`] or [`Error::Cancelled`], neither of which will be emitted\n\n /// as an error.\n\n fn send_as_stream<'a, S, R, SD>(&'a self, dest: S, send_desc: SD) -> SendAsStream<'a, R>\n\n where\n\n S: ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::SocketError> + 'a,\n\n SD: SendDesc<Self::InboundContext, R> + 'a,\n\n R: Send + 'a,\n\n {\n\n let (sender, receiver) = futures::channel::mpsc::channel::<Result<R, Error>>(10);\n\n\n\n SendAsStream {\n\n receiver,\n", "file_path": "async-coap/src/local_endpoint.rs", "rank": 31, "score": 107931.03451677304 }, { "content": "#[doc(hidden)]\n\npub trait NeedsEscape: Clone {\n\n fn byte_needs_escape(b: u8) -> bool {\n\n Self::char_needs_escape(b as char) || (b & 0x80) != 0\n\n }\n\n fn char_needs_escape(c: char) -> bool;\n\n fn escape_space_as_plus() -> bool {\n\n false\n\n }\n\n}\n\n\n\n/// A zero-sized implementor of [`NeedsEscape`] that escapes all reserved characters.\n\n///\n\n/// Its behavior is subject to change and is not considered stable.\n\n#[doc(hidden)]\n\n#[derive(Default, Copy, Clone, Debug)]\n\npub struct EscapeUriFull;\n\nimpl NeedsEscape for EscapeUriFull {\n\n fn char_needs_escape(c: char) -> bool {\n\n !is_char_uri_unreserved(c)\n\n }\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 32, "score": 107926.14974522349 }, { "content": "/// Represents the context for processing an inbound request that can be responded to.\n\npub trait RespondableInboundContext: InboundContext {\n\n /// Indicates if the inbound request was a multicast request or not. Multicast\n\n /// requests have additional response timing requirements in order to avoid\n\n /// congestion.\n\n fn is_multicast(&self) -> bool;\n\n\n\n /// Indicates if this inbound request is from a real inbound request or if it\n\n /// is a fake request that is being generated internally to solicit a response.\n\n /// Fake requests are only generated for the `GET` method.\n\n fn is_fake(&self) -> bool;\n\n\n\n /// Responds to this inbound request using a message generated from `msg_gen`.\n\n /// The `msg_id` and `msg_token` fields will be automatically populated.\n\n /// This method will return the value returned by `msg_gen`.\n\n fn respond<F>(&self, msg_gen: F) -> Result<(), Error>\n\n where\n\n F: Fn(&mut dyn MessageWrite) -> Result<(), Error>;\n\n}\n", "file_path": "async-coap/src/inbound_context.rs", "rank": 33, "score": 107926.14974522349 }, { "content": "#[doc(hidden)]\n\npub trait TryOptionValueFrom<'a>: Sized {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self>;\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for ETag {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n if buffer.len() <= ETag::MAX_LEN {\n\n Some(ETag::new(buffer))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> TryOptionValueFrom<'a> for &'a [u8] {\n\n fn try_option_value_from(buffer: &'a [u8]) -> Option<Self> {\n\n Some(buffer)\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/option/value.rs", "rank": 34, "score": 106400.84814821734 }, { "content": "/// Trait for providing `recv_from` functionality for asynchronous, datagram-based sockets.\n\n///\n\n/// The value returned on success is a tuple of the following:\n\n///\n\n/// ```\n\n/// # use std::net::SocketAddr;\n\n/// # fn ignore_this_line\n\n/// #\n\n/// (bytes_written: usize,\n\n/// remote_socket_addr: SocketAddr,\n\n/// local_socket_addr: Option<SocketAddr>)\n\n/// #\n\n/// # {} // ignore this line\n\n/// ```\n\n///\n\n/// `local_socket_addr` indicates the local address that the packet was sent to, and may not be\n\n/// supported. If this isn't supported, `local_socket_addr` will be set to `None`.\n\npub trait AsyncRecvFrom: DatagramSocketTypes {\n\n /// A non-blocking[^1], `poll_*` version of [`std::net::UdpSocket::recv_from`] that can\n\n /// optionally provide the destination (local) `SocketAddr`.\n\n ///\n\n /// If you need to receive a packet from within an async block, see\n\n /// [`AsyncRecvFrom::recv_from`], which returns a [`Future`][std::future::Future].\n\n ///\n\n /// [^1]: Note that while the spirit of this method intends for it to be non-blocking,\n\n /// [`AllowStdUdpSocket`] can in fact block execution depending on the state of the\n\n /// underlying [`std::net::UdpSocket`].\n\n fn poll_recv_from(\n\n self: Pin<&Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<Result<(usize, Self::SocketAddr, Option<Self::SocketAddr>), Self::Error>>;\n\n\n\n /// Returns a future that uses [`poll_recv_from`][AsyncRecvFrom::poll_recv_from].\n\n fn recv_from<'a, 'b>(&'a self, buf: &'b mut [u8]) -> RecvFromFuture<'a, 'b, Self> {\n\n RecvFromFuture {\n\n socket: self,\n\n buffer: buf,\n\n }\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 35, "score": 106159.2747689122 }, { "content": "/// Extension trait for [`AnyUriRef`] that provides methods that cannot be overridden from\n\n/// their default implementations.\n\n///\n\n/// This trait is automatically implemented for all types that implement [`AnyUriRef`].\n\npub trait AnyUriRefExt: AnyUriRef {\n\n /// Wraps this `AnyUriRef` instance in a [`UriDisplay`] object for use with formatting\n\n /// macros like `write!` and `format!`.\n\n ///\n\n /// The resulting instance will ultimately use the [`AnyUriRef::write_to_unsafe`] method\n\n /// to render the URI-reference.\n\n ///\n\n /// This method is similar to the [`display`][display-path] method on [`std::path::Path`].\n\n ///\n\n /// [display-path]: std::path::Path::display\n\n ///\n\n /// ## Example\n\n ///\n\n /// ```\n\n /// use async_coap_uri::prelude::*;\n\n ///\n\n /// let uri_ref = uri_ref!(\"http://example.com/\");\n\n ///\n\n /// println!(\"uri_ref = {}\", uri_ref.display());\n\n /// ```\n", "file_path": "async-coap-uri/src/any_uri_ref.rs", "rank": 36, "score": 106154.89935327673 }, { "content": "/// Trait that provides methods for joining/leaving multicast groups.\n\npub trait MulticastSocket: DatagramSocketTypes {\n\n /// The \"address\" type for this socket.\n\n ///\n\n /// Note that this is different than a `SocketAddr`, which also includes a port number.\n\n /// This is just the address.\n\n type IpAddr;\n\n\n\n /// Attempts to join the given multicast group.\n\n fn join_multicast<A>(&self, addr: A) -> Result<(), Self::Error>\n\n where\n\n A: std::convert::Into<Self::IpAddr>;\n\n\n\n /// Attempts to leave the given multicast group.\n\n fn leave_multicast<A>(&self, addr: A) -> Result<(), Self::Error>\n\n where\n\n A: std::convert::Into<Self::IpAddr>;\n\n}\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 37, "score": 106154.36774572995 }, { "content": "/// Trait for providing `sent_to` functionality for asynchronous, datagram-based sockets.\n\npub trait AsyncSendTo: DatagramSocketTypes {\n\n /// A non-blocking[^1], `poll_*` version of `std::net::UdpSocket::send_to`.\n\n ///\n\n /// [^1]: Note that while the spirit of this method intends for it to be non-blocking,\n\n /// [`AllowStdUdpSocket`] can block execution depending on the implementation details\n\n /// of the underlying [`std::net::UdpSocket`].\n\n fn poll_send_to<B>(\n\n self: Pin<&Self>,\n\n cx: &mut Context<'_>,\n\n buf: &[u8],\n\n addr: B,\n\n ) -> Poll<Result<usize, Self::Error>>\n\n where\n\n B: super::ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::Error>;\n\n\n\n /// Returns a future that uses [`AsyncSendTo::poll_send_to`].\n\n fn send_to<'a, 'b, B>(&'a self, buf: &'b [u8], addr: B) -> SendToFuture<'a, 'b, Self>\n\n where\n\n B: super::ToSocketAddrs<SocketAddr = Self::SocketAddr, Error = Self::Error>,\n\n {\n", "file_path": "async-coap/src/datagram/async_socket.rs", "rank": 38, "score": 106154.31879357906 }, { "content": "#[doc(hidden)]\n\npub trait TransParams: Default + Copy + Sync + Send + Unpin {\n\n fn max_outbound_packet_length(&self) -> usize {\n\n Self::MAX_OUTBOUND_PACKET_LENGTH\n\n }\n\n\n\n fn coap_max_retransmit(&self) -> u32 {\n\n Self::COAP_MAX_RETRANSMIT\n\n }\n\n\n\n fn coap_ack_timeout(&self) -> Duration {\n\n Self::COAP_ACK_TIMEOUT\n\n }\n\n\n\n fn coap_ack_random_factor(&self) -> f32 {\n\n Self::COAP_ACK_RANDOM_FACTOR\n\n }\n\n\n\n fn coap_nstart(&self) -> u32 {\n\n Self::COAP_NSTART\n\n }\n", "file_path": "async-coap/src/trans_params.rs", "rank": 39, "score": 93824.68165177229 }, { "content": "fn string_literal_from_token_stream(input: TokenStream) -> String {\n\n use syn::LitStr;\n\n\n\n if let Some(nom) = syn::parse::<LitStr>(input.clone()).ok() {\n\n return nom.value();\n\n }\n\n\n\n panic!(\"Expected string literal, got {:?}\", input);\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 40, "score": 92466.04961227752 }, { "content": "/// # Send Descriptor Trait\n\n///\n\n/// Types implementing this trait can be passed to the `send*` methods of [`LocalEndpoint`]\n\n/// and [`RemoteEndpoint`], and can define almost every aspect of how a message transaction\n\n/// is handled.\n\n///\n\n/// See the [module level documentation](index.html) for more information on typical usage\n\n/// patterns.\n\n///\n\n/// ## Internals\n\n///\n\n/// There are several methods in this trait, but three of them are critical:\n\n///\n\n/// * [`write_options`](SendDesc::write_options)\\: Defines which options are going to be\n\n/// included in the outbound message.\n\n/// * [`write_payload`](SendDesc::write_payload)\\: Defines the contents of the payload for the\n\n/// outbound message.\n\n/// * [`handler`](SendDesc::handler)\\: Handles inbound reply messages, as well as error conditions.\n\n///\n\npub trait SendDesc<IC, R = (), TP = StandardCoapConstants>: Send\n\nwhere\n\n IC: InboundContext,\n\n R: Send,\n\n TP: TransParams,\n\n{\n\n /// **Experimental**: Gets custom transmission parameters.\n\n fn trans_params(&self) -> Option<TP> {\n\n None\n\n }\n\n\n\n /// **Experimental**: Used for determining if the given option seen in the reply message\n\n /// is supported or not.\n\n ///\n\n /// Response messages with any options that cause this\n\n /// method to return false will be rejected.\n\n ///\n\n fn supports_option(&self, option: OptionNumber) -> bool {\n\n !option.is_critical()\n\n }\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 41, "score": 89222.24139005573 }, { "content": "/// Combinator extension trait for Send Descriptors.\n\npub trait SendDescExt<IC, R, TP>: SendDesc<IC, R, TP> + Sized\n\nwhere\n\n IC: InboundContext,\n\n R: Send,\n\n TP: TransParams,\n\n{\n\n /// Adds zero or more instances of the option `key`, using values coming from `viter`.\n\n ///\n\n /// This method allows you to conditionally add options to a send descriptor. For example,\n\n /// you could convert an `Option` to an iterator (using `into_iterator()`) and pass it to\n\n /// this method: if the `Option` is `None` then no coap option will be added.\n\n fn add_option_iter<K, I>(self, key: OptionKey<K>, viter: I) -> AddOption<Self, K, I, IC>\n\n where\n\n I: IntoIterator<Item = K> + Send + Clone,\n\n K: Send + Clone,\n\n {\n\n AddOption {\n\n inner: self,\n\n key,\n\n viter,\n", "file_path": "async-coap/src/send_desc/mod.rs", "rank": 42, "score": 80539.542434861 }, { "content": "/// Calculates the encoded size of a CoAP option.\n\npub fn calc_option_size(prev_key: OptionNumber, key: OptionNumber, mut value_len: usize) -> usize {\n\n if value_len >= 269 {\n\n value_len += 2;\n\n } else if value_len >= 13 {\n\n value_len += 1;\n\n }\n\n\n\n let option_delta = (key - prev_key) as u16;\n\n\n\n if option_delta >= 269 {\n\n value_len += 3;\n\n } else if option_delta >= 13 {\n\n value_len += 2;\n\n } else {\n\n value_len += 1;\n\n }\n\n\n\n return value_len;\n\n}\n\n\n", "file_path": "async-coap/src/message/codec.rs", "rank": 43, "score": 77388.71052883167 }, { "content": "#[derive(Copy, Clone, Eq, PartialEq)]\n\nenum Error {\n\n #[allow(unused)]\n\n EncodingError,\n\n MalformedStructure,\n\n MalformedScheme,\n\n Degenerate,\n\n}\n\n\n\nimpl std::fmt::Debug for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n match self {\n\n Error::EncodingError => f.write_str(\"Encoding Error\"),\n\n Error::MalformedStructure => f.write_str(\"The structure of the URI is not recognized.\"),\n\n Error::MalformedScheme => f.write_str(\"The scheme of the URI is malformed.\"),\n\n Error::Degenerate => {\n\n f.write_str(\"This relative reference could be confused with a URI.\")\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "async-coap-uri/proc-macros/src/lib.rs", "rank": 44, "score": 64168.45627724329 }, { "content": "#[test]\n\nfn test_uri() {\n\n let _ = uri!(\"https://www.example.com/\");\n\n}\n\n\n", "file_path": "async-coap-uri/tests/macros.rs", "rank": 45, "score": 61766.357465573245 }, { "content": "#[test]\n\nfn uri_regex() {\n\n {\n\n let captures = RFC3986_APPENDIX_B\n\n .captures(\"http://www.ics.uci.edu/pub/ietf/uri/#Related\")\n\n .expect(\"Should have matched regex\");\n\n assert_eq!(\"http:\", &captures[1]);\n\n assert_eq!(\"http\", &captures[2]);\n\n assert_eq!(\"//www.ics.uci.edu\", &captures[3]);\n\n assert_eq!(\"www.ics.uci.edu\", &captures[4]);\n\n assert_eq!(\"/pub/ietf/uri/\", &captures[5]);\n\n assert_eq!(None, captures.get(6));\n\n assert_eq!(None, captures.get(7));\n\n assert_eq!(\"#Related\", &captures[8]);\n\n assert_eq!(\"Related\", &captures[9]);\n\n }\n\n {\n\n let captures = RFC3986_APPENDIX_B\n\n .captures(\"coap+sms://username:[email protected]:1234?query&d=3#frag\")\n\n .expect(\"Should have matched regex\");\n\n assert_eq!(\"coap+sms:\", &captures[1]);\n", "file_path": "async-coap-uri/src/test.rs", "rank": 46, "score": 61766.357465573245 }, { "content": "#[test]\n\nfn escaped_starts_with_1() {\n\n let s = \"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(\n\n s.unescape_uri().starts_with(\"blåbær///syltetøy\"),\n\n Some(\"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y\".len())\n\n );\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 47, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn test_rel_ref() {\n\n let _ = rel_ref!(\"a/b/c?q=foobar#frag\");\n\n}\n\n\n", "file_path": "async-coap-uri/tests/macros.rs", "rank": 48, "score": 60668.152738508965 }, { "content": "/// Helper function for implementing option insertion.\n\n/// Return value is a tuple of several fields:\n\n///\n\n/// * `split_index` (`usize`) The index where the new option should be inserted.\n\n/// * `prev_option_key` (`OptionNumber`) The option number of the option immediately before the split.\n\n/// * `next_key` (`OptionNumber`) The option number of the option immediately after the split.\n\n/// * `next_value_len` (`usize`) The length of the value of the option immediately after the split.\n\n/// * `next_option_size` (`usize`) The length of the entire option immediately after the split.\n\n///\n\nfn insert_split_helper(\n\n buffer: &[u8],\n\n key: OptionNumber,\n\n) -> (usize, OptionNumber, OptionNumber, usize, usize) {\n\n // This is the key for the option immediately prior to\n\n // the option we are adding.\n\n let mut prev_option_key = OptionNumber(0);\n\n\n\n // This marks at what index we will split the two halves.\n\n let mut split_index;\n\n\n\n let mut iter = OptionIterator::new(buffer);\n\n\n\n loop {\n\n split_index = iter.as_slice().as_ptr() as usize - buffer.as_ptr() as usize;\n\n\n\n let (next_key, next_value) = iter\n\n .next()\n\n .expect(&format!(\n\n \"Unexpected end of options (prev: {}, iter: {:?})\",\n", "file_path": "async-coap/src/message/codec.rs", "rank": 49, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn test_uri_ref() {\n\n let _ = uri_ref!(\"a/b/c?q=foobar#frag\");\n\n}\n", "file_path": "async-coap-uri/tests/macros.rs", "rank": 50, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_3() {\n\n let s = \"/1/2/3/\";\n\n assert_eq!(s.unescape_uri().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 51, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_2() {\n\n let s = \"/bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(s.unescape_uri().starts_with(\"blåbær///syltetøy\"), None);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 52, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_4() {\n\n let s = \"/1/\";\n\n assert_eq!(s.unescape_uri().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 53, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn escaped_starts_with_0() {\n\n let s = \"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(s.unescape_uri().starts_with(\"blåbær/%2F/syltetøy\"), None);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 54, "score": 60668.152738508965 }, { "content": "#[test]\n\nfn unescape_uri_cow_2() {\n\n let s = \"needs%20unescaping\";\n\n let cow = s.unescape_uri().to_cow();\n\n\n\n assert_ne!(cow, s);\n\n assert_eq!(cow, \"needs unescaping\");\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 55, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_4() {\n\n let s = \"/1/\";\n\n assert_eq!(s.unescape_uri().skip_slashes().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 56, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_2() {\n\n let s = \"/bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(\n\n s.unescape_uri()\n\n .skip_slashes()\n\n .starts_with(\"blåbær/%2F/syltetøy\"),\n\n None\n\n );\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 57, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escape_uri_cow_2() {\n\n let s = \"needs escaping\";\n\n let cow = s.escape_uri().to_cow();\n\n\n\n assert_ne!(cow, s);\n\n assert_eq!(cow, \"needs%20escaping\");\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 58, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escape_uri_cow_1() {\n\n let s = \"needs-no-escaping\";\n\n let cow = s.escape_uri().to_cow();\n\n\n\n assert_eq!(cow, s);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 59, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn unescape_uri_cow_1() {\n\n let s = \"needs-no-unescaping\";\n\n let cow = s.unescape_uri().to_cow();\n\n\n\n assert_eq!(cow, s);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 60, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_3() {\n\n let s = \"/1/2/3/\";\n\n assert_eq!(s.unescape_uri().skip_slashes().starts_with(\"/1/\"), Some(3));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 61, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn escaped_path_starts_with_1() {\n\n let s = \"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y/and/on/and/on\";\n\n assert_eq!(\n\n s.unescape_uri()\n\n .skip_slashes()\n\n .starts_with(\"blåbær/%2F/syltetøy\"),\n\n Some(\"bl%C3%A5b%C3%A6r/%2F/syltet%C3%B8y\".len())\n\n );\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 62, "score": 59629.22092698778 }, { "content": "#[test]\n\nfn unescape_uri_path_cow_1() {\n\n let s = \"needs/no/unescaping\";\n\n let cow = s.unescape_uri().skip_slashes().to_cow();\n\n\n\n assert_eq!(cow, s);\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 63, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn try_unescape_uri_cow_1() {\n\n let s = \"needs-no-unescaping\";\n\n let cow = s.unescape_uri().try_to_cow();\n\n\n\n assert_eq!(cow, Ok(Cow::from(s)));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 64, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn try_unescape_uri_cow_2() {\n\n let s = \"needs%20unescaping\";\n\n let cow = s.unescape_uri().try_to_cow();\n\n\n\n assert_ne!(cow, Ok(Cow::from(s)));\n\n assert_eq!(cow, Ok(Cow::from(\"needs unescaping\")));\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 65, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn try_unescape_uri_cow_3() {\n\n let s = \"bad%10escaping\";\n\n let cow = s.unescape_uri().try_to_cow();\n\n\n\n assert_eq!(cow, Err(6));\n\n}\n\n\n\nmacro_rules! test_escape_unescape {\n\n ( $NAME:ident, $UNESCAPED:expr, $ESCAPED:expr ) => {\n\n #[test]\n\n fn $NAME() {\n\n assert_eq!(\n\n &$UNESCAPED.escape_uri().to_string(),\n\n $ESCAPED,\n\n \"Failed on escape_uri().to_string()\"\n\n );\n\n assert_eq!(\n\n &$ESCAPED.unescape_uri().to_string(),\n\n $UNESCAPED,\n\n \"Failed on unescape_uri().to_string()\"\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 66, "score": 58644.88946022018 }, { "content": "#[test]\n\nfn unescape_uri_path_cow_2() {\n\n let s = \"this/%20does%20/need%2Funescaping\";\n\n let cow = s.unescape_uri().skip_slashes().to_cow();\n\n\n\n assert_ne!(cow, s);\n\n assert_eq!(cow, \"this/ does /need%2Funescaping\");\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/test.rs", "rank": 67, "score": 58644.88946022018 }, { "content": "fn is_char_uri_pchar(c: char) -> bool {\n\n is_char_uri_unreserved(c) || is_char_uri_sub_delim(c) || c == ':' || c == '@'\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 68, "score": 50680.18732067075 }, { "content": "fn is_char_uri_unreserved(c: char) -> bool {\n\n c.is_ascii_alphanumeric() || c == '-' || c == '.' || c == '_' || c == '~'\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 69, "score": 50680.18732067075 }, { "content": "fn is_char_uri_fragment(c: char) -> bool {\n\n is_char_uri_pchar(c) || c == '/' || c == '?' || c == '#'\n\n}\n\n\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone)]\n\npub(super) enum EscapeUriState {\n\n Normal,\n\n OutputHighNibble(u8),\n\n OutputLowNibble(u8),\n\n}\n\n\n\n/// An internal, unstable trait that is used to adjust the behavior of [`EscapeUri`].\n\n///\n\n/// It is subject to change and is not considered stable.\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 70, "score": 50680.18732067075 }, { "content": "fn is_char_uri_quote(c: char) -> bool {\n\n c != '+' && (is_char_uri_pchar(c) || c == '/' || c == '?')\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 71, "score": 50680.18732067075 }, { "content": "fn is_char_uri_sub_delim(c: char) -> bool {\n\n c == '!'\n\n || c == '$'\n\n || c == '&'\n\n || c == '\\''\n\n || c == '('\n\n || c == ')'\n\n || c == '*'\n\n || c == '+'\n\n || c == ','\n\n || c == ';'\n\n || c == '='\n\n}\n\n\n", "file_path": "async-coap-uri/src/escape/escape_uri.rs", "rank": 72, "score": 49836.11703993633 }, { "content": "\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n <Self as Debug>::fmt(self, f)\n\n }\n\n}\n\n\n\nimpl Default for Error {\n\n fn default() -> Self {\n\n Error::Unspecified\n\n }\n\n}\n\n\n\nimpl Extend<Result<(), Error>> for Error {\n\n fn extend<T: IntoIterator<Item = Result<(), Error>>>(&mut self, iter: T) {\n\n if let Some(Err(err)) = iter.into_iter().next() {\n\n *self = err;\n\n }\n\n }\n\n}\n", "file_path": "async-coap/src/error.rs", "rank": 73, "score": 43494.08220347607 }, { "content": "}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl std::convert::From<std::io::Error> for Error {\n\n fn from(_: std::io::Error) -> Self {\n\n Error::IOError\n\n }\n\n}\n\n\n\nimpl std::convert::From<Error> for core::fmt::Error {\n\n fn from(_: Error) -> Self {\n\n core::fmt::Error\n\n }\n\n}\n\n\n\nimpl From<std::fmt::Error> for crate::Error {\n\n fn from(_err: std::fmt::Error) -> Self {\n\n Error::OutOfSpace\n\n }\n\n}\n", "file_path": "async-coap/src/error.rs", "rank": 74, "score": 43489.657188207835 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse std::fmt::{Debug, Display, Formatter};\n\n\n\n/// Type for errors encountered while sending or receiving CoAP requests and responses.\n\n#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)]\n\npub enum Error {\n", "file_path": "async-coap/src/error.rs", "rank": 75, "score": 43488.87245181893 }, { "content": " /// One or more of the supplied arguments are not valid for the given operation.\n\n InvalidArgument,\n\n\n\n /// There is not enough space in the given buffer to complete the operation.\n\n OutOfSpace,\n\n\n\n /// An error was encountered while attempting to parse the data.\n\n ParseFailure,\n\n\n\n /// Operation timed out waiting for a response.\n\n ResponseTimeout,\n\n\n\n /// The response was well-formed, but not appropriate for the given request.\n\n BadResponse,\n\n\n\n /// The [message code][async-coap::message::MsgCode] was not recognized by this\n\n /// version of rust-async-coap.\n\n UnknownMessageCode,\n\n\n\n /// A critical option present in the message was not supported.\n", "file_path": "async-coap/src/error.rs", "rank": 76, "score": 43486.44579841163 }, { "content": " /// The response indicated that the request was forbidden.\n\n Forbidden,\n\n\n\n /// The response indicated an unspecified client error.\n\n ClientRequestError,\n\n\n\n /// The response indicated an unspecified server error.\n\n ServerError,\n\n\n\n /// The transaction was reset.\n\n Reset,\n\n\n\n /// More than one instance of an option marked as non-repeatable was encountered.\n\n OptionNotRepeatable,\n\n\n\n /// The given URI scheme is not supported by the associated local endpoint.\n\n UnsupportedUriScheme,\n\n\n\n /// An unspecified error has occurred.\n\n Unspecified,\n", "file_path": "async-coap/src/error.rs", "rank": 77, "score": 43483.80817542839 }, { "content": " UnhandledCriticalOption,\n\n\n\n /// An I/O error occurred while performing this operation.\n\n IOError,\n\n\n\n /// This operation has been cancelled.\n\n Cancelled,\n\n\n\n /// Unable to look up the given host because it was not found.\n\n HostNotFound,\n\n\n\n /// Unable to look up the given host for an unspecified reason.\n\n HostLookupFailure,\n\n\n\n /// The response indicated that the given resource was not found.\n\n ResourceNotFound,\n\n\n\n /// The response indicated that the request was unauthorized.\n\n Unauthorized,\n\n\n", "file_path": "async-coap/src/error.rs", "rank": 78, "score": 43482.84133779072 }, { "content": " self.len as usize\n\n }\n\n\n\n /// Returns true if the length of this token is zero.\n\n pub fn is_empty(&self) -> bool {\n\n self.len == 0\n\n }\n\n\n\n /// Returns a byte slice containing this token.\n\n pub fn as_bytes(&self) -> &[u8] {\n\n &self.bytes[..self.len as usize]\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for MsgToken {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n for b in self.as_bytes() {\n\n write!(f, \"{:02X}\", b)?;\n\n }\n\n Ok(())\n", "file_path": "async-coap/src/message/token.rs", "rank": 79, "score": 42203.33261977065 }, { "content": "#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone, Ord, PartialOrd)]\n\npub struct MsgToken {\n\n len: u8,\n\n bytes: [u8; 8],\n\n}\n\n\n\nimpl MsgToken {\n\n /// Constant representing an empty token.\n\n pub const EMPTY: MsgToken = MsgToken {\n\n len: 0u8,\n\n bytes: [0; 8],\n\n };\n\n\n\n /// Creates a new token from the given byte slice.\n\n pub fn new(x: &[u8]) -> MsgToken {\n\n MsgToken::from(x)\n\n }\n\n\n\n /// Returns the length of this token.\n\n pub fn len(&self) -> usize {\n", "file_path": "async-coap/src/message/token.rs", "rank": 80, "score": 42202.90826858462 }, { "content": " }\n\n}\n\n\n\nimpl Default for MsgToken {\n\n fn default() -> Self {\n\n MsgToken::EMPTY\n\n }\n\n}\n\n\n\nimpl Deref for MsgToken {\n\n type Target = [u8];\n\n\n\n fn deref(&self) -> &Self::Target {\n\n self.as_bytes()\n\n }\n\n}\n\n\n\nimpl core::cmp::PartialEq<[u8]> for MsgToken {\n\n fn eq(&self, other: &[u8]) -> bool {\n\n self.as_bytes() == other\n", "file_path": "async-coap/src/message/token.rs", "rank": 81, "score": 42202.3734733396 }, { "content": "impl core::convert::From<u16> for MsgToken {\n\n fn from(x: u16) -> Self {\n\n core::convert::Into::into(x as u32)\n\n }\n\n}\n\n\n\nimpl core::convert::From<&[u8]> for MsgToken {\n\n // Note: this will panic if x is too big.\n\n fn from(x: &[u8]) -> Self {\n\n let mut bytes = [0u8; 8];\n\n let len = x.len();\n\n bytes[..len].copy_from_slice(x);\n\n MsgToken {\n\n len: len as u8,\n\n bytes,\n\n }\n\n }\n\n}\n", "file_path": "async-coap/src/message/token.rs", "rank": 82, "score": 42201.94051770604 }, { "content": " }\n\n}\n\n\n\nimpl core::convert::From<u32> for MsgToken {\n\n fn from(x: u32) -> Self {\n\n let mut bytes = [0u8; 8];\n\n let len = encode_u32(x, &mut bytes).len();\n\n MsgToken {\n\n len: len as u8,\n\n bytes,\n\n }\n\n }\n\n}\n\n\n\nimpl core::convert::From<i32> for MsgToken {\n\n fn from(x: i32) -> Self {\n\n core::convert::Into::into(x as u32)\n\n }\n\n}\n\n\n", "file_path": "async-coap/src/message/token.rs", "rank": 83, "score": 42201.38365766956 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse super::util::encode_u32;\n\nuse core::convert::From;\n\nuse core::ops::Deref;\n\n\n\n/// Type for holding the value of a CoAP message token.\n", "file_path": "async-coap/src/message/token.rs", "rank": 84, "score": 42197.13681864258 }, { "content": "\n\n/// URI parse error type.\n\n///\n\n/// This type indicates the details of an error that occurs while parsing a URI.\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash)]\n\npub struct ParseError {\n\n desc: &'static str,\n\n span: Option<Range<usize>>,\n\n}\n\n\n\nimpl ParseError {\n\n /// Constructor for URI parse errors.\n\n pub fn new(desc: &'static str, span: Option<Range<usize>>) -> ParseError {\n\n ParseError { desc, span }\n\n }\n\n\n\n /// The location in the input string of the error. Optional.\n\n pub fn span(&self) -> Option<Range<usize>> {\n\n self.span.clone()\n\n }\n\n\n\n /// A debugging description of the error.\n\n pub fn desc(&self) -> &'static str {\n\n self.desc\n\n }\n\n}\n", "file_path": "async-coap-uri/src/error.rs", "rank": 85, "score": 42178.443064551735 }, { "content": "/// and a few others.\n\n///\n\n/// [`AnyUriRef::write_resolved`]: async-coap-uri::AnyUriRef::write_resolved\n\n/// [`AnyUriRef::resolved`]: async-coap-uri::AnyUriRef::resolved\n\n#[derive(Debug, Eq, PartialEq, Hash, Clone, Copy)]\n\npub enum ResolveError {\n\n /// The URI-reference being given as a base cannot be used as a base for the given\n\n /// target URI-reference.\n\n CannotBeABase,\n\n\n\n /// Unable to write to the given [`core::fmt::Write`] instance.\n\n WriteFailure,\n\n}\n\n\n\n/// Transparent conversions from [`core::fmt::Error`] to [`ResolveError`].\n\nimpl From<core::fmt::Error> for ResolveError {\n\n fn from(_: core::fmt::Error) -> Self {\n\n ResolveError::WriteFailure\n\n }\n\n}\n", "file_path": "async-coap-uri/src/error.rs", "rank": 86, "score": 42177.05974346011 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\nuse std::ops::Range;\n\n\n\n/// Error type for resolving a target URI against a base URI.\n\n///\n\n/// Emitted by [`AnyUriRef::write_resolved`], [`AnyUriRef::resolved`],\n", "file_path": "async-coap-uri/src/error.rs", "rank": 87, "score": 42169.89095603103 }, { "content": " 4 => Some(MsgCodeClass::ClientError),\n\n 5 => Some(MsgCodeClass::ServerError),\n\n 7 => Some(MsgCodeClass::Signal),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Returns true if the given message code is in this message code class.\n\n pub fn contains(self, code: MsgCode) -> bool {\n\n let code_u8 = code as u8;\n\n\n\n code_u8 != 0 && (code_u8 >> 5) == self as u8\n\n }\n\n}\n\n\n\n/// Helper function\n\nconst fn calc_code(class: u8, detail: u8) -> isize {\n\n (((class & 0x7) << 5) + detail) as isize\n\n}\n\n\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 88, "score": 40945.0105418045 }, { "content": " _ => None,\n\n }\n\n }\n\n\n\n /// Returns an approximation of this message code as an HTTP status code.\n\n pub fn to_http_code(self) -> u16 {\n\n ((self as u8) >> 5) as u16 * 100 + (self as u8 as u16) & 0b11111\n\n }\n\n\n\n /// Returns true if this is the empty code.\n\n pub fn is_empty(self) -> bool {\n\n self as u8 == 0\n\n }\n\n\n\n /// Returns true if message code is a method.\n\n pub fn is_method(self) -> bool {\n\n MsgCodeClass::Method.contains(self)\n\n }\n\n\n\n /// Returns true if message code is a client error.\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 89, "score": 40944.6574497293 }, { "content": " pub fn is_signal(self) -> bool {\n\n MsgCodeClass::Signal.contains(self)\n\n }\n\n}\n\n\n\nimpl Default for MsgCode {\n\n fn default() -> Self {\n\n MsgCode::Empty\n\n }\n\n}\n\n\n\nimpl core::convert::From<MsgCode> for u8 {\n\n fn from(code: MsgCode) -> Self {\n\n code as u8\n\n }\n\n}\n\n\n\nimpl core::convert::From<MsgCode> for u16 {\n\n fn from(code: MsgCode) -> Self {\n\n code as u16\n\n }\n\n}\n\n\n\nimpl core::convert::From<MsgCode> for u32 {\n\n fn from(code: MsgCode) -> Self {\n\n code as u32\n\n }\n\n}\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 90, "score": 40944.34622717227 }, { "content": "\n\n /// Class for successful responses\n\n Success = 2,\n\n\n\n /// Class for client error responses\n\n ClientError = 4,\n\n\n\n /// Class for server error responses\n\n ServerError = 5,\n\n\n\n /// Class for in-band signaling\n\n Signal = 7,\n\n}\n\n\n\nimpl MsgCodeClass {\n\n /// Tries to calculate the message code class from the given message code.\n\n pub fn try_from(x: u8) -> Option<MsgCodeClass> {\n\n match x {\n\n 0 => Some(MsgCodeClass::Method),\n\n 2 => Some(MsgCodeClass::Success),\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 91, "score": 40943.06087348138 }, { "content": " pub fn is_client_error(self) -> bool {\n\n MsgCodeClass::ClientError.contains(self)\n\n }\n\n\n\n /// Returns true if message code is a server error.\n\n pub fn is_server_error(self) -> bool {\n\n MsgCodeClass::ServerError.contains(self)\n\n }\n\n\n\n /// Returns true if message code is any sort of error.\n\n pub fn is_error(self) -> bool {\n\n self.is_client_error() || self.is_server_error()\n\n }\n\n\n\n /// Returns true if message code indicates success.\n\n pub fn is_success(self) -> bool {\n\n MsgCodeClass::Success.contains(self)\n\n }\n\n\n\n /// Returns true if message code is an in-band signal.\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 92, "score": 40941.80347186074 }, { "content": " SignalCsm = 0xE1,\n\n\n\n /// CoAP PING in-band signal.\n\n SignalPing = 0xE2,\n\n\n\n /// CoAP PONG in-band signal.\n\n SignalPong = 0xE3,\n\n\n\n /// CoAP RELEASE in-band signal.\n\n SignalRelease = 0xE4,\n\n\n\n /// CoAP ABORT in-band signal.\n\n SignalAbort = 0xE5,\n\n}\n\n\n\nimpl MsgCode {\n\n /// Tries to convert the given `u8` into a `MsgCode`. If the given code isn't recognized,\n\n /// this method will return `None`.\n\n pub fn try_from(x: u8) -> Option<MsgCode> {\n\n use MsgCode::*;\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 93, "score": 40940.99238650092 }, { "content": "/// Enum representing a CoAP message code.\n\n#[derive(Debug, Copy, Eq, PartialEq, Clone)]\n\npub enum MsgCode {\n\n /// Empty message code. Only used for ping requests, resets, and empty acknowledgements.\n\n Empty = 0x00,\n\n\n\n /// CoAP GET method.\n\n MethodGet = 0x01,\n\n\n\n /// CoAP POST method.\n\n MethodPost = 0x02,\n\n\n\n /// CoAP PUT method.\n\n MethodPut = 0x03,\n\n\n\n /// CoAP DELETE method.\n\n MethodDelete = 0x04,\n\n\n\n /// CoAP FETCH method.\n\n MethodFetch = 0x05,\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 94, "score": 40935.842157733 }, { "content": "// Copyright 2019 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// https://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n//\n\n\n\n/// Enum representing the *class* of a CoAP message code.\n\n#[derive(Debug, Copy, Eq, PartialEq, Clone)]\n\npub enum MsgCodeClass {\n\n /// Class for methods\n\n Method = 0,\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 95, "score": 40934.79014695095 }, { "content": " /// CoAP METHOD_NOT_ALLOWED client error.\n\n ClientErrorMethodNotAllowed = 0x85,\n\n\n\n /// CoAP NOT_ACCEPTABLE client error.\n\n ClientErrorNotAcceptable = 0x86,\n\n\n\n /// CoAP REQUEST_ENTITY_INCOMPLETE client error.\n\n ClientErrorRequestEntityIncomplete = 0x88,\n\n\n\n /// CoAP PRECONDITION_FAILED client error.\n\n ClientErrorPreconditionFailed = 0x8C,\n\n\n\n /// CoAP REQUEST_ENTITY_TOO_LARGE client error.\n\n ClientErrorRequestEntityTooLarge = 0x8D,\n\n\n\n /// CoAP UNSUPPORTED_MEDIA_TYPE client error.\n\n ClientErrorUnsupportedMediaType = 0x8F,\n\n\n\n /// RFC8516 \"Too Many Requests\" Response Code for the Constrained Application Protocol\n\n ClientErrorTooManyRequests = calc_code(4, 29),\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 96, "score": 40934.72473588424 }, { "content": " SuccessContent = 0x45,\n\n\n\n /// CoAP CONTINUE success code.\n\n SuccessContinue = 0x5F,\n\n\n\n /// CoAP BAD_REQUEST client error.\n\n ClientErrorBadRequest = 0x80,\n\n\n\n /// CoAP UNAUTHORIZED client error.\n\n ClientErrorUnauthorized = 0x81,\n\n\n\n /// CoAP BAD_OPTION client error.\n\n ClientErrorBadOption = 0x82,\n\n\n\n /// CoAP FORBIDDEN client error.\n\n ClientErrorForbidden = 0x83,\n\n\n\n /// CoAP NOT_FOUND client error.\n\n ClientErrorNotFound = 0x84,\n\n\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 97, "score": 40934.016471542825 }, { "content": "\n\n /// CoAP INTERNAL_SERVER_ERROR server error.\n\n ServerErrorInternalServerError = 0xA0,\n\n\n\n /// CoAP NOT_IMPLEMENTED server error.\n\n ServerErrorNotImplemented = 0xA1,\n\n\n\n /// CoAP BAD_GATEWAY server error.\n\n ServerErrorBadGateway = 0xA2,\n\n\n\n /// CoAP SERVICE_UNAVAILABLE server error.\n\n ServerErrorServiceUnavailable = 0xA3,\n\n\n\n /// CoAP GATEWAY_TIMEOUT server error.\n\n ServerErrorGatewayTimeout = 0xA4,\n\n\n\n /// CoAP PROXYING_NOT_SUPPORTED server error.\n\n ServerErrorProxyingNotSupported = 0xA5,\n\n\n\n /// CoAP CSM in-band signal.\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 98, "score": 40930.95587347584 }, { "content": " 0x86 => Some(ClientErrorNotAcceptable),\n\n 0x88 => Some(ClientErrorRequestEntityIncomplete),\n\n 0x8C => Some(ClientErrorPreconditionFailed),\n\n 0x8D => Some(ClientErrorRequestEntityTooLarge),\n\n 0x8F => Some(ClientErrorUnsupportedMediaType),\n\n 0x9D => Some(ClientErrorTooManyRequests),\n\n\n\n 0xA0 => Some(ServerErrorInternalServerError),\n\n 0xA1 => Some(ServerErrorNotImplemented),\n\n 0xA2 => Some(ServerErrorBadGateway),\n\n 0xA3 => Some(ServerErrorServiceUnavailable),\n\n 0xA4 => Some(ServerErrorGatewayTimeout),\n\n 0xA5 => Some(ServerErrorProxyingNotSupported),\n\n\n\n 0xE1 => Some(SignalCsm),\n\n 0xE2 => Some(SignalPing),\n\n 0xE3 => Some(SignalPong),\n\n 0xE4 => Some(SignalRelease),\n\n 0xE5 => Some(SignalAbort),\n\n\n", "file_path": "async-coap/src/message/msg_code.rs", "rank": 99, "score": 40930.857750541814 } ]
Rust
src/client.rs
shichaoyuan/mini-redis
cefca5377af54520904c55764d16fc7c0a291902
use crate::cmd::{Get, Publish, Set, Subscribe, Unsubscribe}; use crate::{Connection, Frame}; use async_stream::try_stream; use bytes::Bytes; use std::io::{Error, ErrorKind}; use std::time::Duration; use tokio::net::{TcpStream, ToSocketAddrs}; use tokio::stream::Stream; use tracing::{debug, instrument}; pub struct Client { connection: Connection, } pub struct Subscriber { client: Client, subscribed_channels: Vec<String>, } #[derive(Debug, Clone)] pub struct Message { pub channel: String, pub content: Bytes, } pub async fn connect<T: ToSocketAddrs>(addr: T) -> crate::Result<Client> { let socket = TcpStream::connect(addr).await?; let connection = Connection::new(socket); Ok(Client { connection }) } impl Client { #[instrument(skip(self))] pub async fn get(&mut self, key: &str) -> crate::Result<Option<Bytes>> { let frame = Get::new(key).into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; match self.read_response().await? { Frame::Simple(value) => Ok(Some(value.into())), Frame::Bulk(value) => Ok(Some(value)), Frame::Null => Ok(None), frame => Err(frame.to_error()), } } #[instrument(skip(self))] pub async fn set(&mut self, key: &str, value: Bytes) -> crate::Result<()> { self.set_cmd(Set::new(key, value, None)).await } #[instrument(skip(self))] pub async fn set_expires( &mut self, key: &str, value: Bytes, expiration: Duration, ) -> crate::Result<()> { self.set_cmd(Set::new(key, value, Some(expiration))).await } async fn set_cmd(&mut self, cmd: Set) -> crate::Result<()> { let frame = cmd.into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; match self.read_response().await? { Frame::Simple(response) if response == "OK" => Ok(()), frame => Err(frame.to_error()), } } #[instrument(skip(self))] pub async fn publish(&mut self, channel: &str, message: Bytes) -> crate::Result<u64> { let frame = Publish::new(channel, message).into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; match self.read_response().await? { Frame::Integer(response) => Ok(response), frame => Err(frame.to_error()), } } #[instrument(skip(self))] pub async fn subscribe(mut self, channels: Vec<String>) -> crate::Result<Subscriber> { self.subscribe_cmd(&channels).await?; Ok(Subscriber { client: self, subscribed_channels: channels, }) } async fn subscribe_cmd(&mut self, channels: &[String]) -> crate::Result<()> { let frame = Subscribe::new(&channels).into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; for channel in channels { let response = self.read_response().await?; match response { Frame::Array(ref frame) => match frame.as_slice() { [subscribe, schannel, ..] if *subscribe == "subscribe" && *schannel == channel => {} _ => return Err(response.to_error()), }, frame => return Err(frame.to_error()), }; } Ok(()) } async fn read_response(&mut self) -> crate::Result<Frame> { let response = self.connection.read_frame().await?; debug!(?response); match response { Some(Frame::Error(msg)) => Err(msg.into()), Some(frame) => Ok(frame), None => { let err = Error::new(ErrorKind::ConnectionReset, "connection reset by server"); Err(err.into()) } } } } impl Subscriber { pub fn get_subscribed(&self) -> &[String] { &self.subscribed_channels } pub async fn next_message(&mut self) -> crate::Result<Option<Message>> { match self.client.connection.read_frame().await? { Some(mframe) => { debug!(?mframe); match mframe { Frame::Array(ref frame) => match frame.as_slice() { [message, channel, content] if *message == "message" => Ok(Some(Message { channel: channel.to_string(), content: Bytes::from(content.to_string()), })), _ => Err(mframe.to_error()), }, frame => Err(frame.to_error()), } } None => Ok(None), } } pub fn into_stream(mut self) -> impl Stream<Item = crate::Result<Message>> { try_stream! { while let Some(message) = self.next_message().await? { yield message; } } } #[instrument(skip(self))] pub async fn subscribe(&mut self, channels: &[String]) -> crate::Result<()> { self.client.subscribe_cmd(channels).await?; self.subscribed_channels .extend(channels.iter().map(Clone::clone)); Ok(()) } #[instrument(skip(self))] pub async fn unsubscribe(&mut self, channels: &[String]) -> crate::Result<()> { let frame = Unsubscribe::new(&channels).into_frame(); debug!(request = ?frame); self.client.connection.write_frame(&frame).await?; let num = if channels.is_empty() { self.subscribed_channels.len() } else { channels.len() }; for _ in 0..num { let response = self.client.read_response().await?; match response { Frame::Array(ref frame) => match frame.as_slice() { [unsubscribe, channel, ..] if *unsubscribe == "unsubscribe" => { let len = self.subscribed_channels.len(); if len == 0 { return Err(response.to_error()); } self.subscribed_channels.retain(|c| *channel != &c[..]); if self.subscribed_channels.len() != len - 1 { return Err(response.to_error()); } } _ => return Err(response.to_error()), }, frame => return Err(frame.to_error()), }; } Ok(()) } }
use crate::cmd::{Get, Publish, Set, Subscribe, Unsubscribe}; use crate::{Connection, Frame}; use async_stream::try_stream; use bytes::Bytes; use std::io::{Error, ErrorKind}; use std::time::Duration; use tokio::net::{TcpStream, ToSocketAddrs}; use tokio::stream::Stream; use tracing::{debug, instrument}; pub struct Client { connection: Connection, } pub struct Subscriber { client: Client, subscribed_channels: Vec<String>, } #[derive(Debug, Clone)] pub struct Message { pub channel: String, pub content: Bytes, } pub async fn connect<T: ToSocketAddrs>(addr: T) -> crate::Result<Client> { let socket = TcpStream::connect(addr).await?; let connection = Connection::new(socket); Ok(Client { connection }) } impl Client { #[instrument(skip(self))] pub async fn get(&mut self, key: &str) -> crate::Result<Option<Bytes>> { let frame = Get::new(key).into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; match self.read_response().await? { Frame::Simple(value) => Ok(Some(value.into())), Frame::Bulk(value) => Ok(Some(value)), Frame::Null => Ok(None), frame => Err(frame.to_error()), } } #[instrument(skip(self))] pub async fn set(&mut self, key: &str, value: Bytes) -> crate::Result<()> { self.set_cmd(Set::new(key, value, None)).await } #[instrument(skip(self))] pub async fn set_expires( &mut self, key: &str, value: Bytes, expiration: Duration, ) -> crate::Result<()> { self.set_cmd(Set::new(key, value, Some(expiration))).await } async fn set_cmd(&mut self, cmd: Set) -> crate::Result<()> { let frame = cmd.into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; match self.read_response().await? { Frame::Simple(response) if response == "OK" => Ok(()), frame => Err(frame.to_error()), } } #[instrument(skip(self))] pub async fn publish(&mut self, channel: &str, message: Bytes) -> crate::Result<u64> { let frame = Publish::new(channel, message).into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; match self.read_response().await? { Frame::Integer(response) => Ok(response), frame => Err(frame.to_error()), } } #[instrument(skip(self))] pub async fn subscribe(mut self, channels: Vec<String>) -> crate::Result<Subscriber> { self.subscribe_cmd(&channels).await?; Ok(Subscriber { client: self, subscribed_channels: channels, }) } async fn subscribe_cmd(&mut self, channels: &[String]) -> crate::Result<()> { let frame = Subscribe::new(&channels).into_frame(); debug!(request = ?frame); self.connection.write_frame(&frame).await?; for channel in channels { let response = self.read_response().await?; match response { Frame::Array(ref frame) => match frame.as_slice() { [subscribe, schannel, ..] if *subscribe == "subscribe" && *schannel == channel => {} _ => return Err(response.to_error()), }, frame => return Err(frame.to_error()), }; } Ok(()) } async fn read_response(&mut self) -> crate::Result<Frame> { let response = self.connection.read_frame().await?; debug!(?response); match response { Some(Frame::Error(msg)) => Err(msg.into()), Some(frame) => Ok(frame), None => { let err = Error::new(ErrorKind::ConnectionReset, "connection reset by server"); Err(err.into()) } } } } impl Subscriber { pub fn get_subscribed(&self) -> &[String] { &self.subscribed_channels } pub async fn next_message(&mut self) -> crate::Result<Option<Message>> { match self.client.connection.read_frame().await? { Some(mframe) => { debug!(?mframe); match mframe { Frame::Array(ref frame) => match frame.as_slice() { [message, channel, content] if *message == "message" => Ok(Some(Message { channel: channel.to_string(), content: Bytes::from(content.to_string()), })), _ => Err(mframe.to_error()), }, frame => Err(frame.to_error()), } } None => Ok(None), } } pub fn into_stream(mut self) -> impl Stream<Item = crate::Result<Message>> { try_stream! { while let Some(message) = self.next_message().await? { yield message; } } } #[instrument(skip(self))] pub async fn subscribe(&mut self, channels: &[String]) -> crate::Result<()> { self.client.subscribe_cmd(channels).awai
#[instrument(skip(self))] pub async fn unsubscribe(&mut self, channels: &[String]) -> crate::Result<()> { let frame = Unsubscribe::new(&channels).into_frame(); debug!(request = ?frame); self.client.connection.write_frame(&frame).await?; let num = if channels.is_empty() { self.subscribed_channels.len() } else { channels.len() }; for _ in 0..num { let response = self.client.read_response().await?; match response { Frame::Array(ref frame) => match frame.as_slice() { [unsubscribe, channel, ..] if *unsubscribe == "unsubscribe" => { let len = self.subscribed_channels.len(); if len == 0 { return Err(response.to_error()); } self.subscribed_channels.retain(|c| *channel != &c[..]); if self.subscribed_channels.len() != len - 1 { return Err(response.to_error()); } } _ => return Err(response.to_error()), }, frame => return Err(frame.to_error()), }; } Ok(()) } }
t?; self.subscribed_channels .extend(channels.iter().map(Clone::clone)); Ok(()) }
function_block-function_prefixed
[ { "content": "/// Creates a message informing the client about a new message on a channel that\n\n/// the client subscribes to.\n\nfn make_message_frame(channel_name: String, msg: Bytes) -> Frame {\n\n let mut response = Frame::array();\n\n response.push_bulk(Bytes::from_static(b\"message\"));\n\n response.push_bulk(Bytes::from(channel_name));\n\n response.push_bulk(msg);\n\n response\n\n}\n\n\n\nimpl Unsubscribe {\n\n /// Create a new `Unsubscribe` command with the given `channels`.\n\n pub(crate) fn new(channels: &[String]) -> Unsubscribe {\n\n Unsubscribe {\n\n channels: channels.to_vec(),\n\n }\n\n }\n\n\n\n /// Parse a `Unsubscribe` instance from a received frame.\n\n ///\n\n /// The `Parse` argument provides a cursor-like API to read fields from the\n\n /// `Frame`. At this point, the entire frame has already been received from\n", "file_path": "src/cmd/subscribe.rs", "rank": 0, "score": 189018.68146059584 }, { "content": "/// Creates the response to an unsubcribe request.\n\nfn make_unsubscribe_frame(channel_name: String, num_subs: usize) -> Frame {\n\n let mut response = Frame::array();\n\n response.push_bulk(Bytes::from_static(b\"unsubscribe\"));\n\n response.push_bulk(Bytes::from(channel_name));\n\n response.push_int(num_subs as u64);\n\n response\n\n}\n\n\n", "file_path": "src/cmd/subscribe.rs", "rank": 1, "score": 165247.90515542444 }, { "content": "/// Creates the response to a subcribe request.\n\n///\n\n/// All of these functions take the `channel_name` as a `String` instead of\n\n/// a `&str` since `Bytes::from` can reuse the allocation in the `String`, and\n\n/// taking a `&str` would require copying the data. This allows the caller to\n\n/// decide whether to clone the channel name or not.\n\nfn make_subscribe_frame(channel_name: String, num_subs: usize) -> Frame {\n\n let mut response = Frame::array();\n\n response.push_bulk(Bytes::from_static(b\"subscribe\"));\n\n response.push_bulk(Bytes::from(channel_name));\n\n response.push_int(num_subs as u64);\n\n response\n\n}\n\n\n", "file_path": "src/cmd/subscribe.rs", "rank": 2, "score": 155744.1120660308 }, { "content": "/// Create a new client request buffer\n\n///\n\n/// The `Client` performs Redis commands directly on the TCP connection. Only a\n\n/// single request may be in-flight at a given time and operations require\n\n/// mutable access to the `Client` handle. This prevents using a single Redis\n\n/// connection from multiple Tokio tasks.\n\n///\n\n/// The strategy for dealing with this class of problem is to spawn a dedicated\n\n/// Tokio task to manage the Redis connection and using \"message passing\" to\n\n/// operate on the connection. Commands are pushed into a channel. The\n\n/// connection task pops commands off of the channel and applies them to the\n\n/// Redis connection. When the response is received, it is forwarded to the\n\n/// original requester.\n\n///\n\n/// The returned `Buffer` handle may be cloned before passing the new handle to\n\n/// separate tasks.\n\npub fn buffer(client: Client) -> Buffer {\n\n // Setting the message limit to a hard coded value of 32. in a real-app, the\n\n // buffer size should be configurable, but we don't need to do that here.\n\n let (tx, rx) = channel(32);\n\n\n\n // Spawn a task to process requests for the connection.\n\n tokio::spawn(async move { run(client, rx).await });\n\n\n\n // Return the `Buffer` handle.\n\n Buffer { tx }\n\n}\n\n\n\n// Enum used to message pass the requested command from the `Buffer` handle\n", "file_path": "src/buffer.rs", "rank": 3, "score": 116969.35484944856 }, { "content": "fn skip(src: &mut Cursor<&[u8]>, n: usize) -> Result<(), Error> {\n\n if src.remaining() < n {\n\n return Err(Error::Incomplete);\n\n }\n\n\n\n src.advance(n);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 4, "score": 76730.29993209182 }, { "content": "/// Read a new-line terminated decimal\n\nfn get_decimal(src: &mut Cursor<&[u8]>) -> Result<u64, Error> {\n\n use atoi::atoi;\n\n\n\n let line = get_line(src)?;\n\n\n\n atoi::<u64>(line).ok_or_else(|| \"protocol error; invalid frame format\".into())\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 5, "score": 75275.68205667596 }, { "content": "fn get_u8(src: &mut Cursor<&[u8]>) -> Result<u8, Error> {\n\n if !src.has_remaining() {\n\n return Err(Error::Incomplete);\n\n }\n\n\n\n Ok(src.get_u8())\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 6, "score": 75275.68205667596 }, { "content": "fn peek_u8(src: &mut Cursor<&[u8]>) -> Result<u8, Error> {\n\n if !src.has_remaining() {\n\n return Err(Error::Incomplete);\n\n }\n\n\n\n Ok(src.bytes()[0])\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 7, "score": 75275.68205667596 }, { "content": "/// Find a line\n\nfn get_line<'a>(src: &mut Cursor<&'a [u8]>) -> Result<&'a [u8], Error> {\n\n // Scan the bytes directly\n\n let start = src.position() as usize;\n\n // Scan to the second to last byte\n\n let end = src.get_ref().len() - 1;\n\n\n\n for i in start..end {\n\n if src.get_ref()[i] == b'\\r' && src.get_ref()[i + 1] == b'\\n' {\n\n // We found a line, update the position to be *after* the \\n\n\n src.set_position((i + 2) as u64);\n\n\n\n // Return the line\n\n return Ok(&src.get_ref()[start..i]);\n\n }\n\n }\n\n\n\n Err(Error::Incomplete)\n\n}\n\n\n\nimpl From<String> for Error {\n", "file_path": "src/frame.rs", "rank": 8, "score": 72699.31200795018 }, { "content": "#[derive(Debug)]\n\nstruct Handler {\n\n /// Shared database handle.\n\n ///\n\n /// When a command is received from `connection`, it is applied with `db`.\n\n /// The implementation of the command is in the `cmd` module. Each command\n\n /// will need to interact with `db` in order to complete the work.\n\n db: Db,\n\n\n\n /// The TCP connection decorated with the redis protocol encoder / decoder\n\n /// implemented using a buffered `TcpStream`.\n\n ///\n\n /// When `Listener` receives an inbound connection, the `TcpStream` is\n\n /// passed to `Connection::new`, which initializes the associated buffers.\n\n /// `Connection` allows the handler to operate at the \"frame\" level and keep\n\n /// the byte level protocol parsing details encapsulated in `Connection`.\n\n connection: Connection,\n\n\n\n /// Max connection semaphore.\n\n ///\n\n /// When the handler is dropped, a permit is returned to this semaphore. If\n", "file_path": "src/server.rs", "rank": 9, "score": 71994.04837304656 }, { "content": "#[derive(Debug)]\n\nstruct Listener {\n\n /// Shared database handle.\n\n ///\n\n /// Contains the key / value store as well as the broadcast channels for\n\n /// pub/sub.\n\n ///\n\n /// This is a wrapper around an `Arc`. This enables `db` to be cloned and\n\n /// passed into the per connection state (`Handler`).\n\n db: Db,\n\n\n\n /// TCP listener supplied by the `run` caller.\n\n listener: TcpListener,\n\n\n\n /// Limit the max number of connections.\n\n ///\n\n /// A `Semaphore` is used to limit the max number of connections. Before\n\n /// attempting to accept a new connection, a permit is acquired from the\n\n /// semaphore. If none are available, the listener waits for one.\n\n ///\n\n /// When handlers complete processing a connection, the permit is returned\n", "file_path": "src/server.rs", "rank": 10, "score": 71994.04837304656 }, { "content": "use crate::{Connection, Db, Frame, Parse};\n\n\n\nuse bytes::Bytes;\n\n\n\n/// Posts a message to the given channel.\n\n///\n\n/// Send a message into a channel without any knowledge of individual consumers.\n\n/// Consumers may subscribe to channels in order to receive the messages.\n\n///\n\n/// Channel names have no relation to the key-value namespace. Publishing on a\n\n/// channel named \"foo\" has no relation to setting the \"foo\" key.\n\n#[derive(Debug)]\n\npub struct Publish {\n\n /// Name of the channel on which the message should be published.\n\n channel: String,\n\n\n\n /// The message to publish.\n\n message: Bytes,\n\n}\n\n\n", "file_path": "src/cmd/publish.rs", "rank": 11, "score": 56207.253930398416 }, { "content": " /// `Err` is returned.\n\n ///\n\n /// # Format\n\n ///\n\n /// Expects an array frame containing three entries.\n\n ///\n\n /// ```text\n\n /// PUBLISH channel message\n\n /// ```\n\n pub(crate) fn parse_frames(parse: &mut Parse) -> crate::Result<Publish> {\n\n // The `PUBLISH` string has already been consumed. Extract the `channel`\n\n // and `message` values from the frame.\n\n //\n\n // The `channel` must be a valid string.\n\n let channel = parse.next_string()?;\n\n\n\n // The `message` is arbitrary bytes.\n\n let message = parse.next_bytes()?;\n\n\n\n Ok(Publish { channel, message })\n", "file_path": "src/cmd/publish.rs", "rank": 12, "score": 56203.58590188703 }, { "content": "impl Publish {\n\n /// Create a new `Publish` command which sends `message` on `channel`.\n\n pub(crate) fn new(channel: impl ToString, message: Bytes) -> Publish {\n\n Publish {\n\n channel: channel.to_string(),\n\n message,\n\n }\n\n }\n\n\n\n /// Parse a `Publish` instance from a received frame.\n\n ///\n\n /// The `Parse` argument provides a cursor-like API to read fields from the\n\n /// `Frame`. At this point, the entire frame has already been received from\n\n /// the socket.\n\n ///\n\n /// The `PUBLISH` string has already been consumed.\n\n ///\n\n /// # Returns\n\n ///\n\n /// On success, the `Publish` value is returned. If the frame is malformed,\n", "file_path": "src/cmd/publish.rs", "rank": 13, "score": 56203.5285351569 }, { "content": " let response = Frame::Integer(num_subscribers as u64);\n\n\n\n // Write the frame to the client.\n\n dst.write_frame(&response).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Converts the command into an equivalent `Frame`.\n\n ///\n\n /// This is called by the client when encoding a `Publish` command to send\n\n /// to the server.\n\n pub(crate) fn into_frame(self) -> Frame {\n\n let mut frame = Frame::array();\n\n frame.push_bulk(Bytes::from(\"publish\".as_bytes()));\n\n frame.push_bulk(Bytes::from(self.channel.into_bytes()));\n\n frame.push_bulk(self.message);\n\n\n\n frame\n\n }\n\n}\n", "file_path": "src/cmd/publish.rs", "rank": 14, "score": 56200.44176097143 }, { "content": " }\n\n\n\n /// Apply the `Publish` command to the specified `Db` instance.\n\n ///\n\n /// The response is written to `dst`. This is called by the server in order\n\n /// to execute a received command.\n\n pub(crate) async fn apply(self, db: &Db, dst: &mut Connection) -> crate::Result<()> {\n\n // The shared state contains the `tokio::sync::broadcast::Sender` for\n\n // all active channels. Calling `db.publish` dispatches the message into\n\n // the appropriate channel.\n\n //\n\n // The number of subscribers currently listening on the channel is\n\n // returned. This does not mean that `num_subscriber` channels will\n\n // receive the message. Subscribers may drop before receiving the\n\n // message. Given this, `num_subscribers` should only be used as a\n\n // \"hint\".\n\n let num_subscribers = db.publish(&self.channel, self.message);\n\n\n\n // The number of subscribers is returned as the response to the publish\n\n // request.\n", "file_path": "src/cmd/publish.rs", "rank": 15, "score": 56196.030826500326 }, { "content": "pub struct Set {\n\n /// the lookup key\n\n key: String,\n\n\n\n /// the value to be stored\n\n value: Bytes,\n\n\n\n /// When to expire the key\n\n expire: Option<Duration>,\n\n}\n\n\n\nimpl Set {\n\n /// Create a new `Set` command which sets `key` to `value`.\n\n ///\n\n /// If `expire` is `Some`, the value should expire after the specified\n\n /// duration.\n\n pub fn new(key: impl ToString, value: Bytes, expire: Option<Duration>) -> Set {\n\n Set {\n\n key: key.to_string(),\n\n value,\n", "file_path": "src/cmd/set.rs", "rank": 16, "score": 56117.7429676915 }, { "content": " use ParseError::EndOfStream;\n\n\n\n // Read the key to set. This is a required field\n\n let key = parse.next_string()?;\n\n\n\n // Read the value to set. This is a required field.\n\n let value = parse.next_bytes()?;\n\n\n\n // The expiration is optional. If nothing else follows, then it is\n\n // `None`.\n\n let mut expire = None;\n\n\n\n // Attempt to parse another string.\n\n match parse.next_string() {\n\n Ok(s) if s == \"EX\" => {\n\n // An expiration is specified in seconds. The next value is an\n\n // integer.\n\n let secs = parse.next_int()?;\n\n expire = Some(Duration::from_secs(secs));\n\n }\n", "file_path": "src/cmd/set.rs", "rank": 17, "score": 56116.91860676138 }, { "content": "use crate::cmd::{Parse, ParseError};\n\nuse crate::{Connection, Db, Frame};\n\n\n\nuse bytes::Bytes;\n\nuse std::time::Duration;\n\nuse tracing::{debug, instrument};\n\n\n\n/// Set `key` to hold the string `value`.\n\n///\n\n/// If `key` already holds a value, it is overwritten, regardless of its type.\n\n/// Any previous time to live associated with the key is discarded on successful\n\n/// SET operation.\n\n///\n\n/// # Options\n\n///\n\n/// Currently, the following options are supported:\n\n///\n\n/// * EX `seconds` -- Set the specified expire time, in seconds.\n\n/// * PX `milliseconds` -- Set the specified expire time, in milliseconds.\n\n#[derive(Debug)]\n", "file_path": "src/cmd/set.rs", "rank": 18, "score": 56115.875670033995 }, { "content": " }\n\n\n\n /// Apply the `Set` command to the specified `Db` instance.\n\n ///\n\n /// The response is written to `dst`. This is called by the server in order\n\n /// to execute a received command.\n\n #[instrument(skip(self, db, dst))]\n\n pub(crate) async fn apply(self, db: &Db, dst: &mut Connection) -> crate::Result<()> {\n\n // Set the value in the shared database state.\n\n db.set(self.key, self.value, self.expire);\n\n\n\n // Create a success response and write it to `dst`.\n\n let response = Frame::Simple(\"OK\".to_string());\n\n debug!(?response);\n\n dst.write_frame(&response).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Converts the command into an equivalent `Frame`.\n", "file_path": "src/cmd/set.rs", "rank": 19, "score": 56114.65577749028 }, { "content": " expire,\n\n }\n\n }\n\n\n\n /// Get the key\n\n pub fn key(&self) -> &str {\n\n &self.key\n\n }\n\n\n\n /// Get the value\n\n pub fn value(&self) -> &Bytes {\n\n &self.value\n\n }\n\n\n\n /// Get the expire\n\n pub fn expire(&self) -> Option<Duration> {\n\n self.expire\n\n }\n\n\n\n /// Parse a `Set` instance from a received frame.\n", "file_path": "src/cmd/set.rs", "rank": 20, "score": 56114.192173280666 }, { "content": " ///\n\n /// This is called by the client when encoding a `Set` command to send to\n\n /// the server.\n\n pub(crate) fn into_frame(self) -> Frame {\n\n let mut frame = Frame::array();\n\n frame.push_bulk(Bytes::from(\"set\".as_bytes()));\n\n frame.push_bulk(Bytes::from(self.key.into_bytes()));\n\n frame.push_bulk(self.value);\n\n frame\n\n }\n\n}\n", "file_path": "src/cmd/set.rs", "rank": 21, "score": 56111.148114310396 }, { "content": " ///\n\n /// The `Parse` argument provides a cursor-like API to read fields from the\n\n /// `Frame`. At this point, the entire frame has already been received from\n\n /// the socket.\n\n ///\n\n /// The `SET` string has already been consumed.\n\n ///\n\n /// # Returns\n\n ///\n\n /// Returns the `Set` value on success. If the frame is malformed, `Err` is\n\n /// returned.\n\n ///\n\n /// # Format\n\n ///\n\n /// Expects an array frame containing at least 3 entries.\n\n ///\n\n /// ```text\n\n /// SET key value [EX seconds|PX milliseconds]\n\n /// ```\n\n pub(crate) fn parse_frames(parse: &mut Parse) -> crate::Result<Set> {\n", "file_path": "src/cmd/set.rs", "rank": 22, "score": 56110.70267906671 }, { "content": " Ok(s) if s == \"PX\" => {\n\n // An expiration is specified in milliseconds. The next value is\n\n // an integer.\n\n let ms = parse.next_int()?;\n\n expire = Some(Duration::from_millis(ms));\n\n }\n\n // Currently, mini-redis does not support any of the other SET\n\n // options. An error here results in the connection being\n\n // terminated. Other connections will continue to operate normally.\n\n Ok(_) => return Err(\"currently `SET` only supports the expiration option\".into()),\n\n // The `EndOfStream` error indicates there is no further data to\n\n // parse. In this case, it is a normal run time situation and\n\n // indicates there are no specified `SET` options.\n\n Err(EndOfStream) => {}\n\n // All other errors are bubbled up, resulting in the connection\n\n // being terminated.\n\n Err(err) => return Err(err.into()),\n\n }\n\n\n\n Ok(Set { key, value, expire })\n", "file_path": "src/cmd/set.rs", "rank": 23, "score": 56108.15032884056 }, { "content": " /// This is called by the client when encoding a `Subscribe` command to send\n\n /// to the server.\n\n pub(crate) fn into_frame(self) -> Frame {\n\n let mut frame = Frame::array();\n\n frame.push_bulk(Bytes::from(\"subscribe\".as_bytes()));\n\n for channel in self.channels {\n\n frame.push_bulk(Bytes::from(channel.into_bytes()));\n\n }\n\n frame\n\n }\n\n}\n\n\n\nasync fn subscribe_to_channel(\n\n channel_name: String,\n\n subscriptions: &mut StreamMap<String, broadcast::Receiver<Bytes>>,\n\n db: &Db,\n\n dst: &mut Connection,\n\n) -> crate::Result<()> {\n\n // Subscribe to the channel.\n\n let rx = db.subscribe(channel_name.clone());\n", "file_path": "src/cmd/subscribe.rs", "rank": 24, "score": 55820.675801672776 }, { "content": "\n\n // Track subscription in this client's subscription set.\n\n subscriptions.insert(channel_name.clone(), rx);\n\n\n\n // Respond with the successful subscription\n\n let response = make_subscribe_frame(channel_name, subscriptions.len());\n\n dst.write_frame(&response).await?;\n\n\n\n Ok(())\n\n}\n\n\n\n/// Handle a command received while inside `Subscribe::apply`. Only subscribe\n\n/// and unsubscribe commands are permitted in this context.\n\n///\n\n/// Any new subscriptions are appended to `subscribe_to` instead of modifying\n\n/// `subscriptions`.\n\nasync fn handle_command(\n\n frame: Frame,\n\n subscribe_to: &mut Vec<String>,\n\n subscriptions: &mut StreamMap<String, broadcast::Receiver<Bytes>>,\n", "file_path": "src/cmd/subscribe.rs", "rank": 25, "score": 55816.60545705919 }, { "content": " // the channels to subscribe to.\n\n //\n\n // Extract the first string. If there is none, the the frame is\n\n // malformed and the error is bubbled up.\n\n let mut channels = vec![parse.next_string()?];\n\n\n\n // Now, the remainder of the frame is consumed. Each value must be a\n\n // string or the frame is malformed. Once all values in the frame have\n\n // been consumed, the command is fully parsed.\n\n loop {\n\n match parse.next_string() {\n\n // A string has been consumed from the `parse`, push it into the\n\n // list of channels to subscribe to.\n\n Ok(s) => channels.push(s),\n\n // The `EndOfStream` error indicates there is no further data to\n\n // parse.\n\n Err(EndOfStream) => break,\n\n // All other errors are bubbled up, resulting in the connection\n\n // being terminated.\n\n Err(err) => return Err(err.into()),\n", "file_path": "src/cmd/subscribe.rs", "rank": 26, "score": 55816.60465902025 }, { "content": "use crate::cmd::{Parse, ParseError, Unknown};\n\nuse crate::{Command, Connection, Db, Frame, Shutdown};\n\n\n\nuse bytes::Bytes;\n\nuse tokio::select;\n\nuse tokio::stream::{StreamExt, StreamMap};\n\nuse tokio::sync::broadcast;\n\n\n\n/// Subscribes the client to one or more channels.\n\n///\n\n/// Once the client enters the subscribed state, it is not supposed to issue any\n\n/// other commands, except for additional SUBSCRIBE, PSUBSCRIBE, UNSUBSCRIBE,\n\n/// PUNSUBSCRIBE, PING and QUIT commands.\n\n#[derive(Debug)]\n\npub struct Subscribe {\n\n channels: Vec<String>,\n\n}\n\n\n\n/// Unsubscribes the client from one or more channels.\n\n///\n", "file_path": "src/cmd/subscribe.rs", "rank": 27, "score": 55816.558168361335 }, { "content": " //\n\n // - Receive a message from one of the subscribed channels.\n\n // - Receive a subscribe or unsubscribe command from the client.\n\n // - A server shutdown signal.\n\n select! {\n\n // Receive messages from subscribed channels\n\n Some((channel_name, msg)) = subscriptions.next() => {\n\n use tokio::sync::broadcast::RecvError;\n\n\n\n let msg = match msg {\n\n Ok(msg) => msg,\n\n Err(RecvError::Lagged(_)) => continue,\n\n Err(RecvError::Closed) => unreachable!(),\n\n };\n\n\n\n dst.write_frame(&make_message_frame(channel_name, msg)).await?;\n\n }\n\n res = dst.read_frame() => {\n\n let frame = match res? {\n\n Some(frame) => frame,\n", "file_path": "src/cmd/subscribe.rs", "rank": 28, "score": 55816.46889501998 }, { "content": "/// When no channels are specified, the client is unsubscribed from all the\n\n/// previously subscribed channels.\n\n#[derive(Clone, Debug)]\n\npub struct Unsubscribe {\n\n channels: Vec<String>,\n\n}\n\n\n\nimpl Subscribe {\n\n /// Creates a new `Subscribe` command to listen on the specified channels.\n\n pub(crate) fn new(channels: &[String]) -> Subscribe {\n\n Subscribe {\n\n channels: channels.to_vec(),\n\n }\n\n }\n\n\n\n /// Parse a `Subscribe` instance from a received frame.\n\n ///\n\n /// The `Parse` argument provides a cursor-like API to read fields from the\n\n /// `Frame`. At this point, the entire frame has already been received from\n\n /// the socket.\n", "file_path": "src/cmd/subscribe.rs", "rank": 29, "score": 55815.83569578012 }, { "content": " let mut channels = vec![];\n\n\n\n // Each entry in the frame must be a string or the frame is malformed.\n\n // Once all values in the frame have been consumed, the command is fully\n\n // parsed.\n\n loop {\n\n match parse.next_string() {\n\n // A string has been consumed from the `parse`, push it into the\n\n // list of channels to unsubscribe from.\n\n Ok(s) => channels.push(s),\n\n // The `EndOfStream` error indicates there is no further data to\n\n // parse.\n\n Err(EndOfStream) => break,\n\n // All other errors are bubbled up, resulting in the connection\n\n // being terminated.\n\n Err(err) => return Err(err),\n\n }\n\n }\n\n\n\n Ok(Unsubscribe { channels })\n", "file_path": "src/cmd/subscribe.rs", "rank": 30, "score": 55815.75696720923 }, { "content": " /// the socket.\n\n ///\n\n /// The `UNSUBSCRIBE` string has already been consumed.\n\n ///\n\n /// # Returns\n\n ///\n\n /// On success, the `Unsubscribe` value is returned. If the frame is\n\n /// malformed, `Err` is returned.\n\n ///\n\n /// # Format\n\n ///\n\n /// Expects an array frame containing at least one entry.\n\n ///\n\n /// ```text\n\n /// UNSUBSCRIBE [channel [channel ...]]\n\n /// ```\n\n pub(crate) fn parse_frames(parse: &mut Parse) -> Result<Unsubscribe, ParseError> {\n\n use ParseError::EndOfStream;\n\n\n\n // There may be no channels listed, so start with an empty vec.\n", "file_path": "src/cmd/subscribe.rs", "rank": 31, "score": 55815.38206351577 }, { "content": " ///\n\n /// The `SUBSCRIBE` string has already been consumed.\n\n ///\n\n /// # Returns\n\n ///\n\n /// On success, the `Subscribe` value is returned. If the frame is\n\n /// malformed, `Err` is returned.\n\n ///\n\n /// # Format\n\n ///\n\n /// Expects an array frame containing two or more entries.\n\n ///\n\n /// ```text\n\n /// SUBSCRIBE channel [channel ...]\n\n /// ```\n\n pub(crate) fn parse_frames(parse: &mut Parse) -> crate::Result<Subscribe> {\n\n use ParseError::EndOfStream;\n\n\n\n // The `SUBSCRIBE` string has already been consumed. At this point,\n\n // there is one or more strings remaining in `parse`. These represent\n", "file_path": "src/cmd/subscribe.rs", "rank": 32, "score": 55813.689947329636 }, { "content": " dst: &mut Connection,\n\n) -> crate::Result<()> {\n\n // A command has been received from the client.\n\n //\n\n // Only `SUBSCRIBE` and `UNSUBSCRIBE` commands are permitted\n\n // in this context.\n\n match Command::from_frame(frame)? {\n\n Command::Subscribe(subscribe) => {\n\n // The `apply` method will subscribe to the channels we add to this\n\n // vector.\n\n subscribe_to.extend(subscribe.channels.into_iter());\n\n }\n\n Command::Unsubscribe(mut unsubscribe) => {\n\n // If no channels are specified, this requests unsubscribing from\n\n // **all** channels. To implement this, the `unsubscribe.channels`\n\n // vec is populated with the list of channels currently subscribed\n\n // to.\n\n if unsubscribe.channels.is_empty() {\n\n unsubscribe.channels = subscriptions\n\n .keys()\n", "file_path": "src/cmd/subscribe.rs", "rank": 33, "score": 55812.91851527835 }, { "content": " }\n\n\n\n /// Converts the command into an equivalent `Frame`.\n\n ///\n\n /// This is called by the client when encoding an `Unsubscribe` command to\n\n /// send to the server.\n\n pub(crate) fn into_frame(self) -> Frame {\n\n let mut frame = Frame::array();\n\n frame.push_bulk(Bytes::from(\"unsubscribe\".as_bytes()));\n\n\n\n for channel in self.channels {\n\n frame.push_bulk(Bytes::from(channel.into_bytes()));\n\n }\n\n\n\n frame\n\n }\n\n}\n", "file_path": "src/cmd/subscribe.rs", "rank": 34, "score": 55811.82013859579 }, { "content": " .map(|channel_name| channel_name.to_string())\n\n .collect();\n\n }\n\n\n\n for channel_name in unsubscribe.channels {\n\n subscriptions.remove(&channel_name);\n\n\n\n let response = make_unsubscribe_frame(channel_name, subscriptions.len());\n\n dst.write_frame(&response).await?;\n\n }\n\n }\n\n command => {\n\n let cmd = Unknown::new(command.get_name());\n\n cmd.apply(dst).await?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Creates the response to a subcribe request.\n\n///\n\n/// All of these functions take the `channel_name` as a `String` instead of\n\n/// a `&str` since `Bytes::from` can reuse the allocation in the `String`, and\n\n/// taking a `&str` would require copying the data. This allows the caller to\n\n/// decide whether to clone the channel name or not.\n", "file_path": "src/cmd/subscribe.rs", "rank": 35, "score": 55811.18714648961 }, { "content": " }\n\n }\n\n\n\n Ok(Subscribe { channels })\n\n }\n\n\n\n /// Apply the `Subscribe` command to the specified `Db` instance.\n\n ///\n\n /// This function is the entry point and includes the initial list of\n\n /// channels to subscribe to. Additional `subscribe` and `unsubscribe`\n\n /// commands may be received from the client and the list of subscriptions\n\n /// are updated accordingly.\n\n ///\n\n /// [here]: https://redis.io/topics/pubsub\n\n pub(crate) async fn apply(\n\n mut self,\n\n db: &Db,\n\n dst: &mut Connection,\n\n shutdown: &mut Shutdown,\n\n ) -> crate::Result<()> {\n", "file_path": "src/cmd/subscribe.rs", "rank": 36, "score": 55810.03817145132 }, { "content": " // Each individual channel subscription is handled using a\n\n // `sync::broadcast` channel. Messages are then fanned out to all\n\n // clients currently subscribed to the channels.\n\n //\n\n // An individual client may subscribe to multiple channels and may\n\n // dynamically add and remove channels from its subscription set. To\n\n // handle this, a `StreamMap` is used to track active subscriptions. The\n\n // `StreamMap` merges messages from individual broadcast channels as\n\n // they are received.\n\n let mut subscriptions = StreamMap::new();\n\n\n\n loop {\n\n // `self.channels` is used to track additional channels to subscribe\n\n // to. When new `SUBSCRIBE` commands are received during the\n\n // execution of `apply`, the new channels are pushed onto this vec.\n\n for channel_name in self.channels.drain(..) {\n\n subscribe_to_channel(channel_name, &mut subscriptions, db, dst).await?;\n\n }\n\n\n\n // Wait for one of the following to happen:\n", "file_path": "src/cmd/subscribe.rs", "rank": 37, "score": 55806.70269187621 }, { "content": " // This happens if the remote client has disconnected.\n\n None => return Ok(())\n\n };\n\n\n\n handle_command(\n\n frame,\n\n &mut self.channels,\n\n &mut subscriptions,\n\n dst,\n\n ).await?;\n\n }\n\n _ = shutdown.recv() => {\n\n return Ok(());\n\n }\n\n };\n\n }\n\n }\n\n\n\n /// Converts the command into an equivalent `Frame`.\n\n ///\n", "file_path": "src/cmd/subscribe.rs", "rank": 38, "score": 55805.534374031726 }, { "content": "#[derive(Debug)]\n\nstruct Entry {\n\n /// Uniquely identifies this entry.\n\n id: u64,\n\n\n\n /// Stored data\n\n data: Bytes,\n\n\n\n /// Instant at which the entry expires and should be removed from the\n\n /// database.\n\n expires_at: Option<Instant>,\n\n}\n\n\n\nimpl Db {\n\n /// Create a new, empty, `Db` instance. Allocates shared state and spawns a\n\n /// background task to manage key expiration.\n\n pub(crate) fn new() -> Db {\n\n let shared = Arc::new(Shared {\n\n state: Mutex::new(State {\n\n entries: HashMap::new(),\n\n pub_sub: HashMap::new(),\n", "file_path": "src/db.rs", "rank": 39, "score": 44388.68344317523 }, { "content": "#[derive(Debug)]\n\nstruct State {\n\n /// The key-value data. We are not trying to do anything fancy so a\n\n /// `std::collections::HashMap` works fine.\n\n entries: HashMap<String, Entry>,\n\n\n\n /// The pub/sub key-space. Redis uses a **separate** key space for key-value\n\n /// and pub/sub. `mini-redis` handles this by using a separate `HashMap`.\n\n pub_sub: HashMap<String, broadcast::Sender<Bytes>>,\n\n\n\n /// Tracks key TTLs.\n\n ///\n\n /// A `BTreeMap` is used to maintain expirations sorted by when they expire.\n\n /// This allows the background task to iterate this map to find the value\n\n /// expiring next.\n\n ///\n\n /// While highly unlikely, it is possible for more than one expiration to be\n\n /// created for the same instant. Because of this, the `Instant` is\n\n /// insufficient for the key. A unique expiration identifier (`u64`) is used\n\n /// to break these ties.\n\n expirations: BTreeMap<(Instant, u64), String>,\n", "file_path": "src/db.rs", "rank": 40, "score": 44388.68344317523 }, { "content": "#[derive(Debug)]\n\nstruct Shared {\n\n /// The shared state is guarded by a mutex. This is a `std::sync::Mutex` and\n\n /// not a Tokio mutex. This is because there are no asynchronous operations\n\n /// being performed while holding the mutex. Additionally, the critical\n\n /// sections are very small.\n\n ///\n\n /// A Tokio mutex is mostly intended to be used when locks need to be held\n\n /// across `.await` yield points. All other cases are **usually** best\n\n /// served by a std mutex. If the critical section does not include any\n\n /// async operations but is long (CPU intensive or performing blocking\n\n /// operations), then the entire operation, including waiting for the mutex,\n\n /// is considered a \"blocking\" operation and `tokio::task::spawn_blocking`\n\n /// should be used.\n\n state: Mutex<State>,\n\n\n\n /// Notifies the background task handling entry expiration. The background\n\n /// task waits on this to be notified, then checks for expired values or the\n\n /// shutdown signal.\n\n background_task: Notify,\n\n}\n\n\n", "file_path": "src/db.rs", "rank": 41, "score": 44388.68344317523 }, { "content": "// Message type sent over the channel to the connection task.\n\n//\n\n// `Command` is the command to forward to the connection.\n\n//\n\n// `oneshot::Sender` is a channel type that sends a **single** value. It is used\n\n// here to send the response received from the connection back to the original\n\n// requester.\n\ntype Message = (Command, oneshot::Sender<Result<Option<Bytes>>>);\n\n\n\n/// Receive commands sent through the channel and forward them to client. The\n\n/// response is returned back to the caller via a `oneshot`.\n\nasync fn run(mut client: Client, mut rx: Receiver<Message>) {\n\n // Repeatedly pop messages from the channel. A return value of `None`\n\n // indicates that all `Buffer` handles have dropped and there will never be\n\n // another message sent on the channel.\n\n while let Some((cmd, tx)) = rx.recv().await {\n\n // The command is forwarded to the connection\n\n let response = match cmd {\n\n Command::Get(key) => client.get(&key).await,\n\n Command::Set(key, value) => client.set(&key, value).await.map(|_| None),\n\n };\n\n\n\n // Send the response back to the caller.\n\n //\n\n // Failing to send the message indicates the `rx` half dropped\n\n // before receiving the message. This is a normal runtime event.\n\n let _ = tx.send(response);\n", "file_path": "src/buffer.rs", "rank": 42, "score": 42727.02463765074 }, { "content": "\n\n#[tokio::main]\n\nasync fn main() -> Result<()> {\n\n // Open a connection to the mini-redis address.\n\n let mut client = client::connect(\"127.0.0.1:6379\").await?;\n\n\n\n // publish message `bar` on channel foo\n\n client.publish(\"foo\", \"bar\".into()).await?;\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/pub.rs", "rank": 43, "score": 30561.787101640275 }, { "content": "//! Publish to a redis channel example.\n\n//!\n\n//! A simple client that connects to a mini-redis server, and\n\n//! publishes a message on `foo` channel\n\n//!\n\n//! You can test this out by running:\n\n//!\n\n//! cargo run --bin server\n\n//!\n\n//! Then in another terminal run:\n\n//!\n\n//! cargo run --example sub\n\n//!\n\n//! And then in another terminal run:\n\n//!\n\n//! cargo run --example pub\n\n\n\n#![warn(rust_2018_idioms)]\n\n\n\nuse mini_redis::{client, Result};\n", "file_path": "examples/pub.rs", "rank": 44, "score": 30559.32758572351 }, { "content": " // We do not want to return `Err` from here as this \"error\" is an\n\n // expected runtime condition.\n\n Err(Incomplete) => Ok(None),\n\n // An error was encountered while parsing the frame. The connection\n\n // is now in an invalid state. Returning `Err` from here will result\n\n // in the connection being closed.\n\n Err(e) => Err(e.into()),\n\n }\n\n }\n\n\n\n /// Write a single `Frame` value to the underlying stream.\n\n ///\n\n /// The `Frame` value is written to the socket using the various `write_*`\n\n /// functions provided by `AsyncWrite`. Calling these functions directly on\n\n /// a `TcpStream` is **not** advised, as this will result in a large number of\n\n /// syscalls. However, it is fine to call these functions on a *buffered*\n\n /// write stream. The data will be written to the buffer. Once the buffer is\n\n /// full, it is flushed to the underlying socket.\n\n pub async fn write_frame(&mut self, frame: &Frame) -> io::Result<()> {\n\n // Arrays are encoded by encoding each entry. All other frame types are\n", "file_path": "src/connection.rs", "rank": 45, "score": 30286.949971799728 }, { "content": "use crate::frame::{self, Frame};\n\n\n\nuse bytes::{Buf, BytesMut};\n\nuse std::io::{self, Cursor};\n\nuse tokio::io::{AsyncReadExt, AsyncWriteExt, BufWriter};\n\nuse tokio::net::TcpStream;\n\n\n\n/// Send and receive `Frame` values from a remote peer.\n\n///\n\n/// When implementing networking protocols, a message on that protocol is\n\n/// often composed of several smaller messages known as frames. The purpose of\n\n/// `Connection` is to read and write frames on the underlying `TcpStream`.\n\n///\n\n/// To read frames, the `Connection` uses an internal buffer, which is filled\n\n/// up until there are enough bytes to create a full frame. Once this happens,\n\n/// the `Connection` creates the frame and returns it to the caller.\n\n///\n\n/// When sending frames, the frame is first encoded into the write buffer.\n\n/// The contents of the write buffer are then written to the socket.\n\n#[derive(Debug)]\n", "file_path": "src/connection.rs", "rank": 46, "score": 30284.531371324538 }, { "content": " // Attempt to parse a frame from the buffered data. If enough data\n\n // has been buffered, the frame is returned.\n\n if let Some(frame) = self.parse_frame()? {\n\n return Ok(Some(frame));\n\n }\n\n\n\n // There is not enough buffered data to read a frame. Attempt to\n\n // read more data from the socket.\n\n //\n\n // On success, the number of bytes is returned. `0` indicates \"end\n\n // of stream\".\n\n if 0 == self.stream.read_buf(&mut self.buffer).await? {\n\n // The remote closed the connection. For this to be a clean\n\n // shutdown, there should be no data in the read buffer. If\n\n // there is, this means that the peer closed the socket while\n\n // sending a frame.\n\n if self.buffer.is_empty() {\n\n return Ok(None);\n\n } else {\n\n return Err(\"connection reset by peer\".into());\n", "file_path": "src/connection.rs", "rank": 47, "score": 30282.131601391902 }, { "content": " // returned. This should terminate the **current** connection\n\n // but should not impact any other connected client.\n\n let frame = Frame::parse(&mut buf)?;\n\n\n\n // Discard the parsed data from the read buffer.\n\n //\n\n // When `advance` is called on the read buffer, all of the data\n\n // up to `len` is discarded. The details of how this works is\n\n // left to `BytesMut`. This is often done by moving an internal\n\n // cursor, but it may be done by reallocating and copying data.\n\n self.buffer.advance(len);\n\n\n\n // Return the parsed frame to the caller.\n\n Ok(Some(frame))\n\n }\n\n // There is not enough data present in the read buffer to parse a\n\n // single frame. We must wait for more data to be received from the\n\n // socket. Reading from the socket will be done in the statement\n\n // after this `match`.\n\n //\n", "file_path": "src/connection.rs", "rank": 48, "score": 30280.520673926945 }, { "content": " // this is fine. However, real applications will want to tune this\n\n // value to their specific use case. There is a high likelihood that\n\n // a larger read buffer will work better.\n\n buffer: BytesMut::with_capacity(4 * 1024),\n\n }\n\n }\n\n\n\n /// Read a single `Frame` value from the underlying stream.\n\n ///\n\n /// The function waits until it has retrieved enough data to parse a frame.\n\n /// Any data remaining in the read buffer after the frame has been parsed is\n\n /// kept there for the next call to `read_frame`.\n\n ///\n\n /// # Returns\n\n ///\n\n /// On success, the received frame is returned. If the `TcpStream`\n\n /// is closed in a way that doesn't break a frame in half, it returns\n\n /// `None`. Otherwise, an error is returned.\n\n pub async fn read_frame(&mut self) -> crate::Result<Option<Frame>> {\n\n loop {\n", "file_path": "src/connection.rs", "rank": 49, "score": 30279.18238647494 }, { "content": " // are to the buffered stream and writes. Calling `flush` writes the\n\n // remaining contents of the buffer to the socket.\n\n self.stream.flush().await\n\n }\n\n\n\n /// Write a frame literal to the stream\n\n async fn write_value(&mut self, frame: &Frame) -> io::Result<()> {\n\n match frame {\n\n Frame::Simple(val) => {\n\n self.stream.write_u8(b'+').await?;\n\n self.stream.write_all(val.as_bytes()).await?;\n\n self.stream.write_all(b\"\\r\\n\").await?;\n\n }\n\n Frame::Error(val) => {\n\n self.stream.write_u8(b'-').await?;\n\n self.stream.write_all(val.as_bytes()).await?;\n\n self.stream.write_all(b\"\\r\\n\").await?;\n\n }\n\n Frame::Integer(val) => {\n\n self.stream.write_u8(b':').await?;\n", "file_path": "src/connection.rs", "rank": 50, "score": 30277.63105266003 }, { "content": "pub struct Connection {\n\n // The `TcpStream`. It is decorated with a `BufWriter`, which provides write\n\n // level buffering. The `BufWriter` implementation provided by Tokio is\n\n // sufficient for our needs.\n\n stream: BufWriter<TcpStream>,\n\n\n\n // The buffer for reading frames. Unfortunately, Tokio's `BufReader`\n\n // currently requires you to empty its buffer before you can ask it to\n\n // retrieve more data from the underlying stream, so we have to manually\n\n // implement buffering. This should be fixed in Tokio v0.3.\n\n buffer: BytesMut,\n\n}\n\n\n\nimpl Connection {\n\n /// Create a new `Connection`, backed by `socket`. Read and write buffers\n\n /// are initialized.\n\n pub fn new(socket: TcpStream) -> Connection {\n\n Connection {\n\n stream: BufWriter::new(socket),\n\n // Default to a 4KB read buffer. For the use case of mini redis,\n", "file_path": "src/connection.rs", "rank": 51, "score": 30276.844505319063 }, { "content": " Ok(())\n\n }\n\n\n\n /// Write a decimal frame to the stream\n\n async fn write_decimal(&mut self, val: u64) -> io::Result<()> {\n\n use std::io::Write;\n\n\n\n // Convert the value to a string\n\n let mut buf = [0u8; 12];\n\n let mut buf = Cursor::new(&mut buf[..]);\n\n write!(&mut buf, \"{}\", val)?;\n\n\n\n let pos = buf.position() as usize;\n\n self.stream.write_all(&buf.get_ref()[..pos]).await?;\n\n self.stream.write_all(b\"\\r\\n\").await?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/connection.rs", "rank": 52, "score": 30276.800763406285 }, { "content": " }\n\n }\n\n }\n\n }\n\n\n\n /// Tries to parse a frame from the buffer. If the buffer contains enough\n\n /// data, the frame is returned and the data removed from the buffer. If not\n\n /// enough data has been buffered yet, `Ok(None)` is returned. If the\n\n /// buffered data does not represent a valid frame, `Err` is returned.\n\n fn parse_frame(&mut self) -> crate::Result<Option<Frame>> {\n\n use frame::Error::Incomplete;\n\n\n\n // Cursor is used to track the \"current\" location in the\n\n // buffer. Cursor also implements `Buf` from the `bytes` crate\n\n // which provides a number of helpful utilities for working\n\n // with bytes.\n\n let mut buf = Cursor::new(&self.buffer[..]);\n\n\n\n // The first step is to check if enough data has been buffered to parse\n\n // a single frame. This step is usually much faster than doing a full\n", "file_path": "src/connection.rs", "rank": 53, "score": 30275.938368539086 }, { "content": " // parse of the frame, and allows us to skip allocating data structures\n\n // to hold the frame data unless we know the full frame has been\n\n // received.\n\n match Frame::check(&mut buf) {\n\n Ok(_) => {\n\n // The `check` function will have advanced the cursor until the\n\n // end of the frame. Since the cursor had position set to zero\n\n // before `Frame::check` was called, we obtain the length of the\n\n // frame by checking the cursor position.\n\n let len = buf.position() as usize;\n\n\n\n // Reset the position to zero before passing the cursor to\n\n // `Frame::parse`.\n\n buf.set_position(0);\n\n\n\n // Parse the frame from the buffer. This allocates the necessary\n\n // structures to represent the frame and returns the frame\n\n // value.\n\n //\n\n // If the encoded frame representation is invalid, an error is\n", "file_path": "src/connection.rs", "rank": 54, "score": 30275.237448256332 }, { "content": " // considered literals. For now, mini-redis is not able to encode\n\n // recursive frame structures. See below for more details.\n\n match frame {\n\n Frame::Array(val) => {\n\n // Encode the frame type prefix. For an array, it is `*`.\n\n self.stream.write_u8(b'*').await?;\n\n\n\n // Encode the length of the array.\n\n self.write_decimal(val.len() as u64).await?;\n\n\n\n // Iterate and encode each entry in the array.\n\n for entry in &**val {\n\n self.write_value(entry).await?;\n\n }\n\n }\n\n // The frame type is a literal. Encode the value directly.\n\n _ => self.write_value(frame).await?,\n\n }\n\n\n\n // Ensure the encoded frame is written to the socket. The calls above\n", "file_path": "src/connection.rs", "rank": 55, "score": 30270.24797120111 }, { "content": " self.write_decimal(*val).await?;\n\n }\n\n Frame::Null => {\n\n self.stream.write_all(b\"$-1\\r\\n\").await?;\n\n }\n\n Frame::Bulk(val) => {\n\n let len = val.len();\n\n\n\n self.stream.write_u8(b'$').await?;\n\n self.write_decimal(len as u64).await?;\n\n self.stream.write_all(val).await?;\n\n self.stream.write_all(b\"\\r\\n\").await?;\n\n }\n\n // Encoding an `Array` from within a value cannot be done using a\n\n // recursive strategy. In general, async fns do not support\n\n // recursion. Mini-redis has not needed to encode nested arrays yet,\n\n // so for now it is skipped.\n\n Frame::Array(_val) => unreachable!(),\n\n }\n\n\n", "file_path": "src/connection.rs", "rank": 56, "score": 30268.307928934635 }, { "content": " _ => false,\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Frame {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n use std::str;\n\n\n\n match self {\n\n Frame::Simple(response) => response.fmt(fmt),\n\n Frame::Error(msg) => write!(fmt, \"error: {}\", msg),\n\n Frame::Integer(num) => num.fmt(fmt),\n\n Frame::Bulk(msg) => match str::from_utf8(msg) {\n\n Ok(string) => string.fmt(fmt),\n\n Err(_) => write!(fmt, \"{:?}\", msg),\n\n },\n\n Frame::Null => \"(nil)\".fmt(fmt),\n\n Frame::Array(parts) => {\n\n for (i, part) in parts.iter().enumerate() {\n", "file_path": "src/frame.rs", "rank": 57, "score": 29992.051891556526 }, { "content": " actual => Err(format!(\"protocol error; invalid frame type byte `{}`\", actual).into()),\n\n }\n\n }\n\n\n\n /// The message has already been validated with `scan`.\n\n pub fn parse(src: &mut Cursor<&[u8]>) -> Result<Frame, Error> {\n\n match get_u8(src)? {\n\n b'+' => {\n\n // Read the line and convert it to `Vec<u8>`\n\n let line = get_line(src)?.to_vec();\n\n\n\n // Convert the line to a String\n\n let string = String::from_utf8(line)?;\n\n\n\n Ok(Frame::Simple(string))\n\n }\n\n b'-' => {\n\n // Read the line and convert it to `Vec<u8>`\n\n let line = get_line(src)?.to_vec();\n\n\n", "file_path": "src/frame.rs", "rank": 58, "score": 29991.002345105924 }, { "content": " /// panics if `self` is not an array\n\n pub(crate) fn push_bulk(&mut self, bytes: Bytes) {\n\n match self {\n\n Frame::Array(vec) => {\n\n vec.push(Frame::Bulk(bytes));\n\n }\n\n _ => panic!(\"not an array frame\"),\n\n }\n\n }\n\n\n\n /// Push an \"integer\" frame into the array. `self` must be an Array frame.\n\n ///\n\n /// # Panics\n\n ///\n\n /// panics if `self` is not an array\n\n pub(crate) fn push_int(&mut self, value: u64) {\n\n match self {\n\n Frame::Array(vec) => {\n\n vec.push(Frame::Integer(value));\n\n }\n", "file_path": "src/frame.rs", "rank": 59, "score": 29989.01507491711 }, { "content": "//! Provides a type representing a Redis protocol frame as well as utilities for\n\n//! parsing frames from a byte array.\n\n\n\nuse bytes::{Buf, Bytes};\n\nuse std::convert::TryInto;\n\nuse std::fmt;\n\nuse std::io::Cursor;\n\nuse std::num::TryFromIntError;\n\nuse std::string::FromUtf8Error;\n\n\n\n/// A frame in the Redis protocol.\n\n#[derive(Clone, Debug)]\n\npub enum Frame {\n\n Simple(String),\n\n Error(String),\n\n Integer(u64),\n\n Bulk(Bytes),\n\n Null,\n\n Array(Vec<Frame>),\n\n}\n", "file_path": "src/frame.rs", "rank": 60, "score": 29988.33305489549 }, { "content": " out.push(Frame::parse(src)?);\n\n }\n\n\n\n Ok(Frame::Array(out))\n\n }\n\n _ => unimplemented!(),\n\n }\n\n }\n\n\n\n /// Converts the frame to an \"unexpected frame\" error\n\n pub(crate) fn to_error(&self) -> crate::Error {\n\n format!(\"unexpected frame: {}\", self).into()\n\n }\n\n}\n\n\n\nimpl PartialEq<&str> for Frame {\n\n fn eq(&self, other: &&str) -> bool {\n\n match self {\n\n Frame::Simple(s) => s.eq(other),\n\n Frame::Bulk(s) => s.eq(other),\n", "file_path": "src/frame.rs", "rank": 61, "score": 29987.044054443028 }, { "content": " _ => panic!(\"not an array frame\"),\n\n }\n\n }\n\n\n\n /// Checks if an entire message can be decoded from `src`\n\n pub fn check(src: &mut Cursor<&[u8]>) -> Result<(), Error> {\n\n match get_u8(src)? {\n\n b'+' => {\n\n get_line(src)?;\n\n Ok(())\n\n }\n\n b'-' => {\n\n get_line(src)?;\n\n Ok(())\n\n }\n\n b':' => {\n\n let _ = get_decimal(src)?;\n\n Ok(())\n\n }\n\n b'$' => {\n", "file_path": "src/frame.rs", "rank": 62, "score": 29985.663370111764 }, { "content": "\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Not enough data is available to parse a message\n\n Incomplete,\n\n\n\n /// Invalid message encoding\n\n Other(crate::Error),\n\n}\n\n\n\nimpl Frame {\n\n /// Returns an empty array\n\n pub(crate) fn array() -> Frame {\n\n Frame::Array(vec![])\n\n }\n\n\n\n /// Push a \"bulk\" frame into the array. `self` must be an Array frame.\n\n ///\n\n /// # Panics\n\n ///\n", "file_path": "src/frame.rs", "rank": 63, "score": 29985.075551304653 }, { "content": " // Convert the line to a String\n\n let string = String::from_utf8(line)?;\n\n\n\n Ok(Frame::Error(string))\n\n }\n\n b':' => {\n\n let len = get_decimal(src)?;\n\n Ok(Frame::Integer(len))\n\n }\n\n b'$' => {\n\n if b'-' == peek_u8(src)? {\n\n let line = get_line(src)?;\n\n\n\n if line != b\"-1\" {\n\n return Err(\"protocol error; invalid frame format\".into());\n\n }\n\n\n\n Ok(Frame::Null)\n\n } else {\n\n // Read the bulk string\n", "file_path": "src/frame.rs", "rank": 64, "score": 29983.25256447242 }, { "content": " let len = get_decimal(src)?.try_into()?;\n\n let n = len + 2;\n\n\n\n if src.remaining() < n {\n\n return Err(Error::Incomplete);\n\n }\n\n\n\n let data = Bytes::copy_from_slice(&src.bytes()[..len]);\n\n\n\n // skip that number of bytes + 2 (\\r\\n).\n\n skip(src, n)?;\n\n\n\n Ok(Frame::Bulk(data))\n\n }\n\n }\n\n b'*' => {\n\n let len = get_decimal(src)?.try_into()?;\n\n let mut out = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n", "file_path": "src/frame.rs", "rank": 65, "score": 29982.87301213506 }, { "content": " fn from(src: String) -> Error {\n\n Error::Other(src.into())\n\n }\n\n}\n\n\n\nimpl From<&str> for Error {\n\n fn from(src: &str) -> Error {\n\n src.to_string().into()\n\n }\n\n}\n\n\n\nimpl From<FromUtf8Error> for Error {\n\n fn from(_src: FromUtf8Error) -> Error {\n\n \"protocol error; invalid frame format\".into()\n\n }\n\n}\n\n\n\nimpl From<TryFromIntError> for Error {\n\n fn from(_src: TryFromIntError) -> Error {\n\n \"protocol error; invalid frame format\".into()\n", "file_path": "src/frame.rs", "rank": 66, "score": 29982.73361455627 }, { "content": " }\n\n}\n\n\n\nimpl std::error::Error for Error {}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::Incomplete => \"stream ended early\".fmt(fmt),\n\n Error::Other(err) => err.fmt(fmt),\n\n }\n\n }\n\n}\n", "file_path": "src/frame.rs", "rank": 67, "score": 29981.591792027735 }, { "content": " if b'-' == peek_u8(src)? {\n\n // Skip '-1\\r\\n'\n\n skip(src, 4)\n\n } else {\n\n // Read the bulk string\n\n let len: usize = get_decimal(src)?.try_into()?;\n\n\n\n // skip that number of bytes + 2 (\\r\\n).\n\n skip(src, len + 2)\n\n }\n\n }\n\n b'*' => {\n\n let len = get_decimal(src)?;\n\n\n\n for _ in 0..len {\n\n Frame::check(src)?;\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "src/frame.rs", "rank": 68, "score": 29979.9004876221 }, { "content": " if i > 0 {\n\n write!(fmt, \" \")?;\n\n part.fmt(fmt)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/frame.rs", "rank": 69, "score": 29972.583187962922 }, { "content": "/// a single channel subscription will be tested instead\n\n#[tokio::test]\n\nasync fn receive_message_subscribed_channel() {\n\n let (addr, _) = start_server().await;\n\n\n\n let client = client::connect(addr.clone()).await.unwrap();\n\n let mut subscriber = client.subscribe(vec![\"hello\".into()]).await.unwrap();\n\n\n\n tokio::spawn(async move {\n\n let mut client = client::connect(addr).await.unwrap();\n\n client.publish(\"hello\", \"world\".into()).await.unwrap()\n\n });\n\n\n\n let message = subscriber.next_message().await.unwrap().unwrap();\n\n assert_eq!(\"hello\", &message.channel);\n\n assert_eq!(b\"world\", &message.content[..])\n\n}\n\n\n\n/// test that a client gets messages from multiple subscribed channels\n\n#[tokio::test]\n", "file_path": "tests/client.rs", "rank": 85, "score": 29695.34624444363 }, { "content": "async fn receive_message_multiple_subscribed_channels() {\n\n let (addr, _) = start_server().await;\n\n\n\n let client = client::connect(addr.clone()).await.unwrap();\n\n let mut subscriber = client\n\n .subscribe(vec![\"hello\".into(), \"world\".into()])\n\n .await\n\n .unwrap();\n\n\n\n tokio::spawn(async move {\n\n let mut client = client::connect(addr).await.unwrap();\n\n client.publish(\"hello\", \"world\".into()).await.unwrap()\n\n });\n\n\n\n let message1 = subscriber.next_message().await.unwrap().unwrap();\n\n assert_eq!(\"hello\", &message1.channel);\n\n assert_eq!(b\"world\", &message1.content[..]);\n\n\n\n tokio::spawn(async move {\n\n let mut client = client::connect(addr).await.unwrap();\n", "file_path": "tests/client.rs", "rank": 86, "score": 29695.185397515615 }, { "content": " client.publish(\"world\", \"howdy?\".into()).await.unwrap()\n\n });\n\n\n\n let message2 = subscriber.next_message().await.unwrap().unwrap();\n\n assert_eq!(\"world\", &message2.channel);\n\n assert_eq!(b\"howdy?\", &message2.content[..])\n\n}\n\n\n\n/// test that a client accurately removes its own subscribed chanel list\n\n/// when unbscribing to all subscribed channels by submitting an empty vec\n\n#[tokio::test]\n\nasync fn unsubscribes_from_channels() {\n\n let (addr, _) = start_server().await;\n\n\n\n let client = client::connect(addr.clone()).await.unwrap();\n\n let mut subscriber = client\n\n .subscribe(vec![\"hello\".into(), \"world\".into()])\n\n .await\n\n .unwrap();\n\n\n", "file_path": "tests/client.rs", "rank": 87, "score": 29694.512835016667 }, { "content": "use mini_redis::{client, server};\n\nuse std::net::SocketAddr;\n\nuse tokio::net::TcpListener;\n\nuse tokio::task::JoinHandle;\n\n\n\n/// A basic \"hello world\" style test. A server instance is started in a\n\n/// background task. A client instance is then established and set and get\n\n/// commands are sent to the server. The response is then evaluated\n\n#[tokio::test]\n\nasync fn key_value_get_set() {\n\n let (addr, _) = start_server().await;\n\n\n\n let mut client = client::connect(addr).await.unwrap();\n\n client.set(\"hello\", \"world\".into()).await.unwrap();\n\n\n\n let value = client.get(\"hello\").await.unwrap().unwrap();\n\n assert_eq!(b\"world\", &value[..])\n\n}\n\n\n\n/// similar to the \"hello world\" style test, But this time\n", "file_path": "tests/client.rs", "rank": 91, "score": 29690.418556387936 }, { "content": " subscriber.unsubscribe(&[]).await.unwrap();\n\n assert_eq!(subscriber.get_subscribed().len(), 0);\n\n}\n\n\n\nasync fn start_server() -> (SocketAddr, JoinHandle<mini_redis::Result<()>>) {\n\n let listener = TcpListener::bind(\"127.0.0.1:0\").await.unwrap();\n\n let addr = listener.local_addr().unwrap();\n\n\n\n let handle = tokio::spawn(async move { server::run(listener, tokio::signal::ctrl_c()).await });\n\n\n\n (addr, handle)\n\n}\n", "file_path": "tests/client.rs", "rank": 98, "score": 29679.49214666423 }, { "content": "use mini_redis::server;\n\n\n\nuse std::net::{Shutdown, SocketAddr};\n\nuse tokio::io::{AsyncReadExt, AsyncWriteExt};\n\nuse tokio::net::{TcpListener, TcpStream};\n\nuse tokio::time::{self, Duration};\n\n\n\n/// A basic \"hello world\" style test. A server instance is started in a\n\n/// background task. A client TCP connection is then established and raw redis\n\n/// commands are sent to the server. The response is evaluated at the byte\n\n/// level.\n\n#[tokio::test]\n\nasync fn key_value_get_set() {\n\n let addr = start_server().await;\n\n\n\n // Establish a connection to the server\n\n let mut stream = TcpStream::connect(addr).await.unwrap();\n\n\n\n // Get a key, data is missing\n\n stream\n", "file_path": "tests/server.rs", "rank": 99, "score": 29335.477753037725 } ]
Rust
crates/nomination/src/lib.rs
gregdhill/interbtc
88e53a7d46c437fdd58ef232973388469186ecf9
#![deny(warnings)] #![cfg_attr(test, feature(proc_macro_hygiene))] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(test)] mod mock; #[cfg(test)] mod tests; mod ext; mod types; mod default_weights; use ext::vault_registry::{DefaultVault, SlashingError, TryDepositCollateral, TryWithdrawCollateral}; use frame_support::{ dispatch::{DispatchError, DispatchResult}, ensure, transactional, weights::Weight, }; use frame_system::{ensure_root, ensure_signed}; use reward::RewardPool; use sp_runtime::{ traits::{CheckedAdd, CheckedDiv, CheckedSub, One, Zero}, FixedPointNumber, }; use sp_std::convert::TryInto; pub use types::Nominator; use types::{ BalanceOf, Collateral, DefaultNominator, RichNominator, SignedFixedPoint, SignedInner, UnsignedFixedPoint, }; pub trait WeightInfo { fn set_nomination_enabled() -> Weight; fn opt_in_to_nomination() -> Weight; fn opt_out_of_nomination() -> Weight; fn deposit_collateral() -> Weight; fn withdraw_collateral() -> Weight; } pub use pallet::*; #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: frame_system::Config + security::Config + vault_registry::Config + fee::Config<UnsignedFixedPoint = UnsignedFixedPoint<Self>, UnsignedInner = BalanceOf<Self>> { type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; type WeightInfo: WeightInfo; type VaultRewards: reward::Rewards<Self::AccountId, SignedFixedPoint = SignedFixedPoint<Self>>; } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId", Collateral<T> = "Collateral")] pub enum Event<T: Config> { NominationOptIn(T::AccountId), NominationOptOut(T::AccountId), DepositCollateral(T::AccountId, T::AccountId, Collateral<T>), WithdrawCollateral(T::AccountId, T::AccountId, Collateral<T>), } #[pallet::error] pub enum Error<T> { InsufficientFunds, ArithmeticOverflow, ArithmeticUnderflow, NominatorNotFound, VaultAlreadyOptedInToNomination, VaultNotOptedInToNomination, VaultNotFound, TryIntoIntError, InsufficientCollateral, VaultNominationDisabled, DepositViolatesMaxNominationRatio, HasNominatedCollateral, } impl<T: Config> From<SlashingError> for Error<T> { fn from(err: SlashingError) -> Self { match err { SlashingError::ArithmeticOverflow => Error::<T>::ArithmeticOverflow, SlashingError::ArithmeticUnderflow => Error::<T>::ArithmeticUnderflow, SlashingError::TryIntoIntError => Error::<T>::TryIntoIntError, SlashingError::InsufficientFunds => Error::<T>::InsufficientCollateral, } } } #[pallet::hooks] impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {} #[pallet::storage] #[pallet::getter(fn is_nomination_enabled)] pub type NominationEnabled<T: Config> = StorageValue<_, bool, ValueQuery>; #[pallet::storage] pub(super) type Vaults<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, bool, ValueQuery>; #[pallet::storage] pub(super) type Nominators<T: Config> = StorageDoubleMap< _, Blake2_128Concat, T::AccountId, Blake2_128Concat, T::AccountId, Nominator<T::AccountId, Collateral<T>, SignedFixedPoint<T>>, ValueQuery, >; #[pallet::genesis_config] pub struct GenesisConfig { pub is_nomination_enabled: bool, } #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { Self { is_nomination_enabled: Default::default(), } } } #[pallet::genesis_build] impl<T: Config> GenesisBuild<T> for GenesisConfig { fn build(&self) { { NominationEnabled::<T>::put(self.is_nomination_enabled); } } } #[pallet::pallet] pub struct Pallet<T>(_); #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(<T as Config>::WeightInfo::set_nomination_enabled())] #[transactional] pub fn set_nomination_enabled(origin: OriginFor<T>, enabled: bool) -> DispatchResultWithPostInfo { ensure_root(origin)?; <NominationEnabled<T>>::set(enabled); Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::opt_in_to_nomination())] #[transactional] pub fn opt_in_to_nomination(origin: OriginFor<T>) -> DispatchResultWithPostInfo { ext::security::ensure_parachain_status_running::<T>()?; Self::_opt_in_to_nomination(&ensure_signed(origin)?)?; Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::opt_out_of_nomination())] #[transactional] pub fn opt_out_of_nomination(origin: OriginFor<T>) -> DispatchResultWithPostInfo { Self::_opt_out_of_nomination(&ensure_signed(origin)?)?; Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::deposit_collateral())] #[transactional] pub fn deposit_collateral( origin: OriginFor<T>, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ext::security::ensure_parachain_status_running::<T>()?; Self::_deposit_collateral(sender, vault_id, amount)?; Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::withdraw_collateral())] #[transactional] pub fn withdraw_collateral( origin: OriginFor<T>, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ext::security::ensure_parachain_status_running::<T>()?; Self::_withdraw_collateral(sender, vault_id, amount)?; Ok(().into()) } } } impl<T: Config> Pallet<T> { pub fn _withdraw_collateral( nominator_id: T::AccountId, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResult { ensure!(Self::is_nomination_enabled(), Error::<T>::VaultNominationDisabled); ensure!( Self::is_nominatable(&vault_id)?, Error::<T>::VaultNotOptedInToNomination ); ensure!( ext::vault_registry::is_allowed_to_withdraw_collateral::<T>(&vault_id, amount)?, Error::<T>::InsufficientCollateral ); ext::fee::withdraw_all_vault_rewards::<T>(&vault_id)?; Self::withdraw_pool_stake::<<T as pallet::Config>::VaultRewards>(&nominator_id, &vault_id, amount)?; let mut nominator: RichNominator<T> = Self::get_nominator(&nominator_id, &vault_id)?.into(); nominator.try_withdraw_collateral(amount)?; ext::collateral::unlock_and_transfer::<T>(&vault_id, &nominator_id, amount)?; Self::deposit_event(Event::<T>::WithdrawCollateral(nominator_id, vault_id, amount)); Ok(()) } pub fn _deposit_collateral( nominator_id: T::AccountId, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResult { ensure!(Self::is_nomination_enabled(), Error::<T>::VaultNominationDisabled); ensure!( Self::is_nominatable(&vault_id)?, Error::<T>::VaultNotOptedInToNomination ); let vault_backing_collateral = ext::vault_registry::get_backing_collateral::<T>(&vault_id)?; let total_nominated_collateral = Self::get_total_nominated_collateral(&vault_id)?; let new_nominated_collateral = total_nominated_collateral .checked_add(&amount) .ok_or(Error::<T>::ArithmeticOverflow)?; ensure!( new_nominated_collateral <= Self::get_max_nominatable_collateral(vault_backing_collateral)?, Error::<T>::DepositViolatesMaxNominationRatio ); ext::fee::withdraw_all_vault_rewards::<T>(&vault_id)?; Self::deposit_pool_stake::<<T as pallet::Config>::VaultRewards>(&nominator_id, &vault_id, amount)?; let mut nominator: RichNominator<T> = Self::register_or_get_nominator(&nominator_id, &vault_id)?.into(); nominator .try_deposit_collateral(amount) .map_err(|e| Error::<T>::from(e))?; ext::collateral::transfer_and_lock::<T>(&nominator_id, &vault_id, amount)?; Self::deposit_event(Event::<T>::DepositCollateral(nominator_id, vault_id, amount)); Ok(()) } pub fn _opt_in_to_nomination(vault_id: &T::AccountId) -> DispatchResult { ensure!(Self::is_nomination_enabled(), Error::<T>::VaultNominationDisabled); ensure!( ext::vault_registry::vault_exists::<T>(&vault_id), Error::<T>::VaultNotFound ); ensure!( !<Vaults<T>>::contains_key(vault_id), Error::<T>::VaultAlreadyOptedInToNomination ); <Vaults<T>>::insert(vault_id, true); Self::deposit_event(Event::<T>::NominationOptIn(vault_id.clone())); Ok(()) } pub fn _opt_out_of_nomination(vault_id: &T::AccountId) -> DispatchResult { ensure!( Self::get_total_nominated_collateral(vault_id)?.is_zero(), Error::<T>::HasNominatedCollateral ); <Vaults<T>>::remove(vault_id); Self::deposit_event(Event::<T>::NominationOptOut(vault_id.clone())); Ok(()) } pub fn is_nominatable(vault_id: &T::AccountId) -> Result<bool, DispatchError> { Ok(<Vaults<T>>::contains_key(&vault_id)) } pub fn is_nominator(nominator_id: &T::AccountId, vault_id: &T::AccountId) -> Result<bool, DispatchError> { Ok(<Nominators<T>>::contains_key(&nominator_id, &vault_id)) } pub fn get_total_nominated_collateral(vault_id: &T::AccountId) -> Result<Collateral<T>, DispatchError> { let vault: DefaultVault<T> = ext::vault_registry::get_vault_from_id::<T>(vault_id)?; let vault_actual_collateral = ext::vault_registry::compute_collateral::<T>(vault_id)?; Ok(vault .backing_collateral .checked_sub(&vault_actual_collateral) .ok_or(Error::<T>::ArithmeticUnderflow)?) } pub fn get_max_nomination_ratio() -> Result<UnsignedFixedPoint<T>, DispatchError> { let secure_collateral_threshold = ext::vault_registry::get_secure_collateral_threshold::<T>(); let premium_redeem_threshold = ext::vault_registry::get_premium_redeem_threshold::<T>(); Ok(secure_collateral_threshold .checked_div(&premium_redeem_threshold) .ok_or(Error::<T>::ArithmeticUnderflow)? .checked_sub(&UnsignedFixedPoint::<T>::one()) .ok_or(Error::<T>::ArithmeticUnderflow)?) } pub fn get_nominator( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<DefaultNominator<T>, DispatchError> { ensure!( Self::is_nominator(&nominator_id, &vault_id)?, Error::<T>::NominatorNotFound ); Ok(<Nominators<T>>::get(nominator_id, vault_id)) } pub fn get_rich_nominator( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<RichNominator<T>, DispatchError> { Ok(Self::get_nominator(&nominator_id, &vault_id)?.into()) } pub fn get_nominator_collateral( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<Collateral<T>, DispatchError> { let nominator = Self::get_rich_nominator(nominator_id, vault_id)?; Ok(nominator.compute_collateral()?) } pub fn register_or_get_nominator( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<DefaultNominator<T>, DispatchError> { if !Self::is_nominator(&nominator_id, &vault_id)? { let nominator = Nominator::new(nominator_id.clone(), vault_id.clone()); <Nominators<T>>::insert(nominator_id, vault_id, nominator.clone()); Ok(nominator) } else { Ok(<Nominators<T>>::get(&nominator_id, &vault_id)) } } pub fn get_max_nominatable_collateral(vault_collateral: Collateral<T>) -> Result<Collateral<T>, DispatchError> { ext::fee::collateral_for::<T>(vault_collateral, Self::get_max_nomination_ratio()?) } fn collateral_to_fixed(x: Collateral<T>) -> Result<SignedFixedPoint<T>, DispatchError> { let signed_inner = TryInto::<SignedInner<T>>::try_into(x).map_err(|_| Error::<T>::TryIntoIntError)?; let signed_fixed_point = SignedFixedPoint::<T>::checked_from_integer(signed_inner).ok_or(Error::<T>::TryIntoIntError)?; Ok(signed_fixed_point) } fn withdraw_pool_stake<R: reward::Rewards<T::AccountId, SignedFixedPoint = SignedFixedPoint<T>>>( account_id: &T::AccountId, vault_id: &T::AccountId, amount: Collateral<T>, ) -> Result<(), DispatchError> { let amount_fixed = Self::collateral_to_fixed(amount)?; if amount_fixed > SignedFixedPoint::<T>::zero() { R::withdraw_stake(RewardPool::Local(vault_id.clone()), account_id, amount_fixed)?; } Ok(()) } fn deposit_pool_stake<R: reward::Rewards<T::AccountId, SignedFixedPoint = SignedFixedPoint<T>>>( account_id: &T::AccountId, vault_id: &T::AccountId, amount: Collateral<T>, ) -> Result<(), DispatchError> { let amount_fixed = Self::collateral_to_fixed(amount)?; R::deposit_stake(RewardPool::Local(vault_id.clone()), account_id, amount_fixed)?; Ok(()) } }
#![deny(warnings)] #![cfg_attr(test, feature(proc_macro_hygiene))] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(test)] mod mock; #[cfg(test)] mod tests; mod ext; mod types; mod default_weights; use ext::vault_registry::{DefaultVault, SlashingError, TryDepositCollateral, TryWithdrawCollateral}; use frame_support::{ dispatch::{DispatchError, DispatchResult}, ensure, transactional, weights::Weight, }; use frame_system::{ensure_root, ensure_signed}; use reward::RewardPool; use sp_runtime::{ traits::{CheckedAdd, CheckedDiv, CheckedSub, One, Zero}, FixedPointNumber, }; use sp_std::convert::TryInto; pub use types::Nominator; use types::{ BalanceOf, Collateral, DefaultNominator, RichNominator, SignedFixedPoint, SignedInner, UnsignedFixedPoint, }; pub trait WeightInfo { fn set_nomination_enabled() -> Weight; fn opt_in_to_nomination() -> Weight; fn opt_out_of_nomination() -> Weight; fn deposit_collateral() -> Weight; fn withdraw_collateral() -> Weight; } pub use pallet::*; #[frame_support::pallet] pub mod pallet { use super::*; use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: frame_system::Config + security::Config + vault_registry::Config + fee::Config<UnsignedFixedPoint = UnsignedFixedPoint<Self>, UnsignedInner = BalanceOf<Self>> { type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; type WeightInfo: WeightInfo; type VaultRewards: reward::Rewards<Self::AccountId, SignedFixedPoint = SignedFixedPoint<Self>>; } #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] #[pallet::metadata(T::AccountId = "AccountId", Collateral<T> = "Collateral")] pub enum Event<T: Config> { NominationOptIn(T::AccountId), NominationOptOut(T::AccountId), DepositCollateral(T::AccountId, T::AccountId, Collateral<T>), WithdrawCollateral(T::AccountId, T::AccountId, Collateral<T>), } #[pallet::error] pub enum Error<T> { InsufficientFunds, ArithmeticOverflow, ArithmeticUnderflow, NominatorNotFound, VaultAlreadyOptedInToNomination, VaultNotOptedInToNomination, VaultNotFound, TryIntoIntError, InsufficientCollateral, VaultNominationDisabled, DepositViolatesMaxNominationRatio, HasNominatedCollateral, } impl<T: Config> From<SlashingError> for Error<T> { fn from(err: SlashingError) -> Self { match err { SlashingError::ArithmeticOverflow => Error::<T>::ArithmeticOverflow, SlashingError::ArithmeticUnderflow => Error::<T>::ArithmeticUnderflow, SlashingError::TryIntoIntError => Error::<T>::TryIntoIntError, SlashingError::InsufficientFunds => Error::<T>::InsufficientCollateral, } } } #[pallet::hooks] impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {} #[pallet::storage] #[pallet::getter(fn is_nomination_enabled)] pub type NominationEnabled<T: Config> = StorageValue<_, bool, ValueQuery>; #[pallet::storage] pub(super) type Vaults<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, bool, ValueQuery>; #[pallet::storage] pub(super) type Nominators<T: Config> = StorageDoubleMap< _, Blake2_128Concat, T::AccountId, Blake2_128Concat, T::AccountId, Nominator<T::AccountId, Collateral<T>, SignedFixedPoint<T>>, ValueQuery, >; #[pallet::genesis_config] pub struct GenesisConfig { pub is_nomination_enabled: bool, } #[cfg(feature = "std")] impl Default for GenesisConfig { fn default() -> Self { Self { is_nomination_enabled: Default::default(), } } } #[pallet::genesis_build] impl<T: Config> GenesisBuild<T> for GenesisConfig { fn build(&self) { { NominationEnabled::<T>::put(self.is_nomination_enabled); } } } #[pallet::pallet] pub struct Pallet<T>(_); #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(<T as Config>::WeightInfo::set_nomination_enabled())] #[transactional] pub fn set_nomination_enabled(origin: OriginFor<T>, enabled: bool) -> DispatchResultWithPostInfo { ensure_root(origin)?; <NominationEnabled<T>>::set(enabled); Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::opt_in_to_nomination())] #[transactional] pub fn opt_in_to_nomination(origin: OriginFor<T>) -> DispatchResultWithPostInfo { ext::security::ensure_parachain_status_running::<T>()?; Self::_opt_in_to_nomination(&ensure_signed(origin)?)?; Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::opt_out_of_nomination())] #[transactional] pub fn opt_out_of_nomination(origin: OriginFor<T>) -> DispatchResultWithPostInfo { Self::_opt_out_of_nomination(&ensure_signed(origin)?)?; Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::deposit_collateral())] #[transactional] pub fn deposit_collateral( origin: OriginFor<T>, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ext::security::ensure_parachain_status_running::<T>()?; Self::_deposit_collateral(sender, vault_id, amount)?; Ok(().into()) } #[pallet::weight(<T as Config>::WeightInfo::withdraw_collateral())] #[transactional] pub fn withdraw_collateral( origin: OriginFor<T>, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResultWithPostInfo { let sender = ensure_signed(origin)?; ext::security::ensure_parachain_status_running::<T>()?; Self::_withdraw_collateral(sender, vault_id, amount)?; Ok(().into()) } } } impl<T: Config> Pallet<T> { pub fn _withdraw_collateral( nominator_id: T::AccountId, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResult { ensure!(Self::is_nomination_enabled(), Error::<T>::VaultNominationDisabled); ensure!( Self::is_nominatable(&vault_id)?, Error::<T>::VaultNotOptedInToNomination ); ensure!( ext::vault_registry::is_allowed_to_withdraw_collateral::<T>(&vault_id, amount)?, Error::<T>::InsufficientCollateral ); ext::fee::withdraw_all_vault_rewards::<T>(&vault_id)?; Self::withdraw_pool_stake::<<T as pallet::Config>::VaultRewards>(&nominator_id, &vault_id, amount)?; let mut nominator: RichNominator<T> = Self::get_nominator(&nominator_id, &vault_id)?.into(); nominator.try_withdraw_collateral(amount)?; ext::collateral::unlock_and_transfer::<T>(&vault_id, &nominator_id, amount)?; Self::deposit_event(Event::<T>::WithdrawCollateral(nominator_id, vault_id, amount)); Ok(()) } pub fn _deposit_collateral( nominator_id: T::AccountId, vault_id: T::AccountId, amount: Collateral<T>, ) -> DispatchResult { ensure!(Self::is_nomination_enabled(), Error::<T>::VaultNominationDisabled); ensure!( Self::is_nominatable(&vault_id)?, Error::<T>::VaultNotOptedInToNomination ); let vault_backing_collateral = ext::vault_registry::get_backing_collateral::<T>(&vault_id)?; let total_nominated_collateral = Self::get_total_nominated_collateral(&vault_id)?; let new_nominated_collateral = total_nominated_collateral .checked_add(&amount) .ok_or(Error::<T>::ArithmeticOverflow)?; ensur
pub fn _opt_in_to_nomination(vault_id: &T::AccountId) -> DispatchResult { ensure!(Self::is_nomination_enabled(), Error::<T>::VaultNominationDisabled); ensure!( ext::vault_registry::vault_exists::<T>(&vault_id), Error::<T>::VaultNotFound ); ensure!( !<Vaults<T>>::contains_key(vault_id), Error::<T>::VaultAlreadyOptedInToNomination ); <Vaults<T>>::insert(vault_id, true); Self::deposit_event(Event::<T>::NominationOptIn(vault_id.clone())); Ok(()) } pub fn _opt_out_of_nomination(vault_id: &T::AccountId) -> DispatchResult { ensure!( Self::get_total_nominated_collateral(vault_id)?.is_zero(), Error::<T>::HasNominatedCollateral ); <Vaults<T>>::remove(vault_id); Self::deposit_event(Event::<T>::NominationOptOut(vault_id.clone())); Ok(()) } pub fn is_nominatable(vault_id: &T::AccountId) -> Result<bool, DispatchError> { Ok(<Vaults<T>>::contains_key(&vault_id)) } pub fn is_nominator(nominator_id: &T::AccountId, vault_id: &T::AccountId) -> Result<bool, DispatchError> { Ok(<Nominators<T>>::contains_key(&nominator_id, &vault_id)) } pub fn get_total_nominated_collateral(vault_id: &T::AccountId) -> Result<Collateral<T>, DispatchError> { let vault: DefaultVault<T> = ext::vault_registry::get_vault_from_id::<T>(vault_id)?; let vault_actual_collateral = ext::vault_registry::compute_collateral::<T>(vault_id)?; Ok(vault .backing_collateral .checked_sub(&vault_actual_collateral) .ok_or(Error::<T>::ArithmeticUnderflow)?) } pub fn get_max_nomination_ratio() -> Result<UnsignedFixedPoint<T>, DispatchError> { let secure_collateral_threshold = ext::vault_registry::get_secure_collateral_threshold::<T>(); let premium_redeem_threshold = ext::vault_registry::get_premium_redeem_threshold::<T>(); Ok(secure_collateral_threshold .checked_div(&premium_redeem_threshold) .ok_or(Error::<T>::ArithmeticUnderflow)? .checked_sub(&UnsignedFixedPoint::<T>::one()) .ok_or(Error::<T>::ArithmeticUnderflow)?) } pub fn get_nominator( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<DefaultNominator<T>, DispatchError> { ensure!( Self::is_nominator(&nominator_id, &vault_id)?, Error::<T>::NominatorNotFound ); Ok(<Nominators<T>>::get(nominator_id, vault_id)) } pub fn get_rich_nominator( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<RichNominator<T>, DispatchError> { Ok(Self::get_nominator(&nominator_id, &vault_id)?.into()) } pub fn get_nominator_collateral( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<Collateral<T>, DispatchError> { let nominator = Self::get_rich_nominator(nominator_id, vault_id)?; Ok(nominator.compute_collateral()?) } pub fn register_or_get_nominator( nominator_id: &T::AccountId, vault_id: &T::AccountId, ) -> Result<DefaultNominator<T>, DispatchError> { if !Self::is_nominator(&nominator_id, &vault_id)? { let nominator = Nominator::new(nominator_id.clone(), vault_id.clone()); <Nominators<T>>::insert(nominator_id, vault_id, nominator.clone()); Ok(nominator) } else { Ok(<Nominators<T>>::get(&nominator_id, &vault_id)) } } pub fn get_max_nominatable_collateral(vault_collateral: Collateral<T>) -> Result<Collateral<T>, DispatchError> { ext::fee::collateral_for::<T>(vault_collateral, Self::get_max_nomination_ratio()?) } fn collateral_to_fixed(x: Collateral<T>) -> Result<SignedFixedPoint<T>, DispatchError> { let signed_inner = TryInto::<SignedInner<T>>::try_into(x).map_err(|_| Error::<T>::TryIntoIntError)?; let signed_fixed_point = SignedFixedPoint::<T>::checked_from_integer(signed_inner).ok_or(Error::<T>::TryIntoIntError)?; Ok(signed_fixed_point) } fn withdraw_pool_stake<R: reward::Rewards<T::AccountId, SignedFixedPoint = SignedFixedPoint<T>>>( account_id: &T::AccountId, vault_id: &T::AccountId, amount: Collateral<T>, ) -> Result<(), DispatchError> { let amount_fixed = Self::collateral_to_fixed(amount)?; if amount_fixed > SignedFixedPoint::<T>::zero() { R::withdraw_stake(RewardPool::Local(vault_id.clone()), account_id, amount_fixed)?; } Ok(()) } fn deposit_pool_stake<R: reward::Rewards<T::AccountId, SignedFixedPoint = SignedFixedPoint<T>>>( account_id: &T::AccountId, vault_id: &T::AccountId, amount: Collateral<T>, ) -> Result<(), DispatchError> { let amount_fixed = Self::collateral_to_fixed(amount)?; R::deposit_stake(RewardPool::Local(vault_id.clone()), account_id, amount_fixed)?; Ok(()) } }
e!( new_nominated_collateral <= Self::get_max_nominatable_collateral(vault_backing_collateral)?, Error::<T>::DepositViolatesMaxNominationRatio ); ext::fee::withdraw_all_vault_rewards::<T>(&vault_id)?; Self::deposit_pool_stake::<<T as pallet::Config>::VaultRewards>(&nominator_id, &vault_id, amount)?; let mut nominator: RichNominator<T> = Self::register_or_get_nominator(&nominator_id, &vault_id)?.into(); nominator .try_deposit_collateral(amount) .map_err(|e| Error::<T>::from(e))?; ext::collateral::transfer_and_lock::<T>(&nominator_id, &vault_id, amount)?; Self::deposit_event(Event::<T>::DepositCollateral(nominator_id, vault_id, amount)); Ok(()) }
function_block-function_prefixed
[ { "content": "pub fn origin_of(account_id: AccountId) -> <Runtime as frame_system::Config>::Origin {\n\n <Runtime as frame_system::Config>::Origin::signed(account_id)\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 0, "score": 354341.9938808502 }, { "content": "pub fn root() -> <Runtime as frame_system::Config>::Origin {\n\n <Runtime as frame_system::Config>::Origin::root()\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 1, "score": 352294.77245113545 }, { "content": "pub fn enable_nomination() {\n\n assert_ok!(Call::Nomination(NominationCall::set_nomination_enabled(true))\n\n .dispatch(<Runtime as frame_system::Config>::Origin::root()));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 2, "score": 317757.39550190524 }, { "content": "#[allow(dead_code)]\n\npub fn set_default_thresholds() {\n\n let secure = FixedU128::checked_from_rational(150, 100).unwrap();\n\n let premium = FixedU128::checked_from_rational(135, 100).unwrap();\n\n let liquidation = FixedU128::checked_from_rational(110, 100).unwrap();\n\n\n\n VaultRegistryPallet::set_secure_collateral_threshold(secure);\n\n VaultRegistryPallet::set_premium_redeem_threshold(premium);\n\n VaultRegistryPallet::set_liquidation_collateral_threshold(liquidation);\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 3, "score": 299964.48176477116 }, { "content": "#[allow(dead_code)]\n\npub fn generate_transaction_and_mine(\n\n address: BtcAddress,\n\n amount: u128,\n\n return_data: Option<H256>,\n\n) -> (H256Le, u32, Vec<u8>, Vec<u8>) {\n\n let (tx_id, height, proof, raw_tx, _) = TransactionGenerator::new()\n\n .with_address(address)\n\n .with_amount(amount)\n\n .with_op_return(return_data)\n\n .mine();\n\n (tx_id, height, proof, raw_tx)\n\n}\n\n\n\npub struct ExtBuilder {\n\n test_externalities: sp_io::TestExternalities,\n\n}\n\n\n\nimpl ExtBuilder {\n\n pub fn build() -> Self {\n\n let mut storage = frame_system::GenesisConfig::default()\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 4, "score": 299911.58529601805 }, { "content": "pub fn execute_refund_with_amount(vault_id: [u8; 32], amount: u128) -> DispatchResultWithPostInfo {\n\n let refund_address_script = bitcoin::Script::try_from(\"a914d7ff6d60ebf40a9b1886acce06653ba2224d8fea87\").unwrap();\n\n let refund_address = BtcAddress::from_script_pub_key(&refund_address_script).unwrap();\n\n\n\n let refund_id = assert_refund_request_event();\n\n\n\n let (_tx_id, _height, proof, raw_tx) = generate_transaction_and_mine(refund_address, amount, Some(refund_id));\n\n\n\n SecurityPallet::set_active_block_number((1 + CONFIRMATIONS) * 2);\n\n\n\n Call::Refund(RefundCall::execute_refund(refund_id, proof, raw_tx)).dispatch(origin_of(account_of(vault_id)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/issue_testing_utils.rs", "rank": 5, "score": 292298.9195271662 }, { "content": "/// Weight functions needed for issue.\n\npub trait WeightInfo {\n\n fn request_issue() -> Weight;\n\n fn execute_issue() -> Weight;\n\n fn cancel_issue() -> Weight;\n\n fn set_issue_period() -> Weight;\n\n}\n\n\n\n// For backwards compatibility and tests\n\nimpl WeightInfo for () {\n\n fn request_issue() -> Weight {\n\n (11_798_074_000 as Weight)\n\n .saturating_add(RocksDbWeight::get().reads(16 as Weight))\n\n .saturating_add(RocksDbWeight::get().writes(5 as Weight))\n\n }\n\n fn execute_issue() -> Weight {\n\n (16_894_787_000 as Weight)\n\n .saturating_add(RocksDbWeight::get().reads(24 as Weight))\n\n .saturating_add(RocksDbWeight::get().writes(9 as Weight))\n\n }\n\n fn cancel_issue() -> Weight {\n\n (6_492_133_000 as Weight)\n\n .saturating_add(RocksDbWeight::get().reads(9 as Weight))\n\n .saturating_add(RocksDbWeight::get().writes(4 as Weight))\n\n }\n\n fn set_issue_period() -> Weight {\n\n (204_239_000 as Weight).saturating_add(RocksDbWeight::get().writes(1 as Weight))\n\n }\n\n}\n", "file_path": "crates/issue/src/default_weights.rs", "rank": 6, "score": 287061.2541913743 }, { "content": "pub trait WeightInfo {\n\n fn request_redeem() -> Weight;\n\n fn liquidation_redeem() -> Weight;\n\n fn execute_redeem() -> Weight;\n\n fn cancel_redeem_reimburse() -> Weight;\n\n fn cancel_redeem_retry() -> Weight;\n\n fn set_redeem_period() -> Weight;\n\n fn mint_tokens_for_reimbursed_redeem() -> Weight;\n\n}\n\n\n\nimpl crate::WeightInfo for () {\n\n fn request_redeem() -> Weight {\n\n 179_175_000_u64\n\n .saturating_add(DbWeight::get().reads(12_u64))\n\n .saturating_add(DbWeight::get().writes(5_u64))\n\n }\n\n fn liquidation_redeem() -> Weight {\n\n 179_175_000_u64\n\n .saturating_add(DbWeight::get().reads(12_u64))\n\n .saturating_add(DbWeight::get().writes(5_u64))\n", "file_path": "crates/redeem/src/default_weights.rs", "rank": 7, "score": 287055.8355388465 }, { "content": "pub trait WeightInfo {\n\n fn initialize() -> Weight;\n\n fn report_vault_theft() -> Weight;\n\n fn store_block_header() -> Weight;\n\n}\n\n\n\nimpl crate::WeightInfo for () {\n\n // WARNING! Some components were not used: [\"u\"]\n\n fn initialize() -> Weight {\n\n (52_558_000 as Weight)\n\n .saturating_add(DbWeight::get().reads(3 as Weight))\n\n .saturating_add(DbWeight::get().writes(7 as Weight))\n\n }\n\n fn report_vault_theft() -> Weight {\n\n (251_206_000 as Weight)\n\n .saturating_add(DbWeight::get().reads(16 as Weight))\n\n .saturating_add(DbWeight::get().writes(5 as Weight))\n\n }\n\n fn store_block_header() -> Weight {\n\n (123_623_000 as Weight)\n\n .saturating_add(DbWeight::get().reads(13 as Weight))\n\n .saturating_add(DbWeight::get().writes(8 as Weight))\n\n }\n\n}\n", "file_path": "crates/relay/src/default_weights.rs", "rank": 8, "score": 287055.8355388465 }, { "content": "pub trait WeightInfo {\n\n fn request_replace() -> Weight;\n\n fn withdraw_replace() -> Weight;\n\n fn accept_replace() -> Weight;\n\n fn execute_replace() -> Weight;\n\n fn cancel_replace() -> Weight;\n\n fn set_replace_period() -> Weight;\n\n}\n\n\n\nimpl crate::WeightInfo for () {\n\n fn request_replace() -> Weight {\n\n 142_819_000_u64\n\n .saturating_add(DbWeight::get().reads(6_u64))\n\n .saturating_add(DbWeight::get().writes(5_u64))\n\n }\n\n fn withdraw_replace() -> Weight {\n\n 132_256_000_u64\n\n .saturating_add(DbWeight::get().reads(10_u64))\n\n .saturating_add(DbWeight::get().writes(3_u64))\n\n }\n", "file_path": "crates/replace/src/default_weights.rs", "rank": 9, "score": 287055.8355388465 }, { "content": "pub trait WeightInfo {\n\n fn execute_refund() -> Weight;\n\n}\n\n\n\nimpl crate::WeightInfo for () {\n\n fn execute_refund() -> Weight {\n\n 100_000_000_u64\n\n }\n\n}\n", "file_path": "crates/refund/src/default_weights.rs", "rank": 10, "score": 287055.83553884644 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n assert_ok!(<exchange_rate_oracle::Pallet<Test>>::_set_exchange_rate(\n\n UnsignedFixedPoint::one()\n\n ));\n\n Security::set_active_block_number(1);\n\n System::set_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/nomination/src/mock.rs", "rank": 12, "score": 283199.3910873043 }, { "content": "/// Weight functions needed for btc_relay.\n\npub trait WeightInfo {\n\n fn verify_and_validate_transaction() -> Weight;\n\n fn verify_transaction_inclusion() -> Weight;\n\n fn validate_transaction() -> Weight;\n\n}\n\n\n\n// For backwards compatibility and tests\n\nimpl WeightInfo for () {\n\n fn verify_and_validate_transaction() -> Weight {\n\n 99_474_000_u64.saturating_add(RocksDbWeight::get().reads(9_u64))\n\n }\n\n fn verify_transaction_inclusion() -> Weight {\n\n 55_622_000_u64.saturating_add(RocksDbWeight::get().reads(8_u64))\n\n }\n\n fn validate_transaction() -> Weight {\n\n 15_739_000_u64.saturating_add(RocksDbWeight::get().reads(1_u64))\n\n }\n\n}\n", "file_path": "crates/btc-relay/src/default_weights.rs", "rank": 13, "score": 282463.32987634203 }, { "content": "pub trait WeightInfo {\n\n fn register_vault() -> Weight;\n\n fn deposit_collateral() -> Weight;\n\n fn withdraw_collateral() -> Weight;\n\n fn update_public_key() -> Weight;\n\n fn register_address() -> Weight;\n\n fn accept_new_issues() -> Weight;\n\n fn report_undercollateralized_vault() -> Weight;\n\n}\n\n\n\nimpl crate::WeightInfo for () {\n\n fn register_vault() -> Weight {\n\n (91_914_000 as Weight)\n\n .saturating_add(DbWeight::get().reads(6 as Weight))\n\n .saturating_add(DbWeight::get().writes(4 as Weight))\n\n }\n\n fn deposit_collateral() -> Weight {\n\n (109_095_000 as Weight)\n\n .saturating_add(DbWeight::get().reads(9 as Weight))\n\n .saturating_add(DbWeight::get().writes(2 as Weight))\n", "file_path": "crates/vault-registry/src/default_weights.rs", "rank": 14, "score": 282457.9793681727 }, { "content": "pub fn default_user_state() -> UserData {\n\n UserData {\n\n free_balance: DEFAULT_USER_FREE_BALANCE,\n\n locked_balance: DEFAULT_USER_LOCKED_BALANCE,\n\n locked_tokens: DEFAULT_USER_LOCKED_TOKENS,\n\n free_tokens: DEFAULT_USER_FREE_TOKENS,\n\n }\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 15, "score": 282214.74047290784 }, { "content": "pub fn disable_nomination() {\n\n assert_ok!(Call::Nomination(NominationCall::set_nomination_enabled(false))\n\n .dispatch(<Runtime as frame_system::Config>::Origin::root()));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 17, "score": 278780.126085051 }, { "content": "pub fn nominate_collateral(\n\n nominator: [u8; 32],\n\n vault: [u8; 32],\n\n amount_collateral: u128,\n\n) -> DispatchResultWithPostInfo {\n\n Call::Nomination(NominationCall::deposit_collateral(account_of(vault), amount_collateral))\n\n .dispatch(origin_of(account_of(nominator)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 18, "score": 278780.1260850509 }, { "content": "pub trait WeightInfo {\n\n fn set_exchange_rate() -> Weight;\n\n fn set_btc_tx_fees_per_byte() -> Weight;\n\n fn insert_authorized_oracle() -> Weight;\n\n fn remove_authorized_oracle() -> Weight;\n\n}\n\n\n\nimpl crate::WeightInfo for () {\n\n // WARNING! Some components were not used: [\"u\"]\n\n fn set_exchange_rate() -> Weight {\n\n 42_788_000_u64\n\n .saturating_add(DbWeight::get().reads(5_u64))\n\n .saturating_add(DbWeight::get().writes(2_u64))\n\n }\n\n fn set_btc_tx_fees_per_byte() -> Weight {\n\n 30_015_705_u64\n\n .saturating_add(DbWeight::get().reads(2_u64))\n\n .saturating_add(DbWeight::get().writes(1_u64))\n\n }\n\n fn insert_authorized_oracle() -> Weight {\n\n 6_788_000_u64.saturating_add(DbWeight::get().writes(1_u64))\n\n }\n\n fn remove_authorized_oracle() -> Weight {\n\n 6_021_000_u64.saturating_add(DbWeight::get().writes(1_u64))\n\n }\n\n}\n", "file_path": "crates/exchange-rate-oracle/src/default_weights.rs", "rank": 19, "score": 278099.89369947877 }, { "content": "pub fn default_vault_state() -> CoreVaultData {\n\n CoreVaultData {\n\n to_be_issued: DEFAULT_VAULT_TO_BE_ISSUED,\n\n issued: DEFAULT_VAULT_ISSUED,\n\n to_be_redeemed: DEFAULT_VAULT_TO_BE_REDEEMED,\n\n backing_collateral: DEFAULT_VAULT_BACKING_COLLATERAL,\n\n griefing_collateral: DEFAULT_VAULT_GRIEFING_COLLATERAL,\n\n free_balance: DEFAULT_VAULT_FREE_BALANCE,\n\n free_tokens: 0,\n\n liquidated_collateral: 0,\n\n replace_collateral: DEFAULT_VAULT_REPLACE_COLLATERAL,\n\n to_be_replaced: DEFAULT_VAULT_TO_BE_REPLACED,\n\n }\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 20, "score": 276273.1531915595 }, { "content": "pub fn withdraw_nominator_collateral(\n\n nominator: [u8; 32],\n\n vault: [u8; 32],\n\n amount_collateral: u128,\n\n) -> DispatchResultWithPostInfo {\n\n Call::Nomination(NominationCall::withdraw_collateral(\n\n account_of(vault),\n\n amount_collateral,\n\n ))\n\n .dispatch(origin_of(account_of(nominator)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 21, "score": 273808.58545754536 }, { "content": "pub fn vault_rewards(amount: u128) -> u128 {\n\n (amount as f64 * VAULT_REWARDS) as u128\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/reward_testing_utils.rs", "rank": 22, "score": 269216.42910015216 }, { "content": "pub fn nomination_opt_out(vault: [u8; 32]) -> DispatchResultWithPostInfo {\n\n Call::Nomination(NominationCall::opt_out_of_nomination()).dispatch(origin_of(account_of(vault)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 23, "score": 264534.9033232387 }, { "content": "pub fn nomination_opt_in(vault: [u8; 32]) -> DispatchResultWithPostInfo {\n\n Call::Nomination(NominationCall::opt_in_to_nomination()).dispatch(origin_of(account_of(vault)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 24, "score": 264534.9033232387 }, { "content": "fn test_with_nomination_enabled<R>(execute: impl FnOnce() -> R) -> R {\n\n test_with(|| {\n\n enable_nomination();\n\n execute()\n\n })\n\n}\n\n\n", "file_path": "parachain/runtime/tests/test_nomination.rs", "rank": 25, "score": 262513.4061554393 }, { "content": "pub fn assert_total_nominated_collateral_is(vault: [u8; 32], amount_collateral: u128) {\n\n let nominated_collateral = NominationPallet::get_total_nominated_collateral(&account_of(vault)).unwrap();\n\n assert_eq!(nominated_collateral, amount_collateral);\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 26, "score": 262283.51443103986 }, { "content": "pub fn account_of(address: [u8; 32]) -> AccountId {\n\n AccountId::from(address)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Default, Clone)]\n\npub struct UserData {\n\n pub free_balance: u128,\n\n pub locked_balance: u128,\n\n pub locked_tokens: u128,\n\n pub free_tokens: u128,\n\n}\n\n\n\nimpl UserData {\n\n #[allow(dead_code)]\n\n pub fn get(id: [u8; 32]) -> Self {\n\n let account_id = account_of(id);\n\n Self {\n\n free_balance: CollateralPallet::get_free_balance(&account_id),\n\n locked_balance: CollateralPallet::get_reserved_balance(&account_id),\n\n locked_tokens: TreasuryPallet::get_reserved_balance(&account_id),\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 27, "score": 261368.92908286466 }, { "content": "pub fn withdraw_vault_collateral(vault: [u8; 32], amount_collateral: u128) -> DispatchResultWithPostInfo {\n\n Call::Nomination(NominationCall::withdraw_collateral(\n\n account_of(vault),\n\n amount_collateral,\n\n ))\n\n .dispatch(origin_of(account_of(vault)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 28, "score": 259213.4666499914 }, { "content": "pub fn assert_nominate_collateral(nominator: [u8; 32], vault: [u8; 32], amount_collateral: u128) {\n\n assert_ok!(nominate_collateral(nominator, vault, amount_collateral));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 29, "score": 258270.4571477926 }, { "content": "fn test_with_nomination_enabled_and_vault_opted_in<R>(execute: impl FnOnce() -> R) -> R {\n\n test_with_nomination_enabled(|| {\n\n assert_nomination_opt_in(VAULT);\n\n execute()\n\n })\n\n}\n\n\n", "file_path": "parachain/runtime/tests/test_nomination.rs", "rank": 30, "score": 254644.29855823703 }, { "content": "pub fn assert_issue_amount_change_event(issue_id: H256, amount: u128, fee: u128, confiscated_collateral: u128) {\n\n let expected_event = IssueEvent::IssueAmountChange(issue_id, amount, fee, confiscated_collateral);\n\n let events = SystemModule::events();\n\n let records: Vec<_> = events\n\n .iter()\n\n .rev()\n\n .filter(|record| matches!(&record.event, Event::Issue(x) if x == &expected_event))\n\n .collect();\n\n assert_eq!(records.len(), 1);\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/issue_testing_utils.rs", "rank": 31, "score": 254522.37881688052 }, { "content": "pub fn assert_withdraw_nominator_collateral(nominator: [u8; 32], vault: [u8; 32], amount_dot: u128) {\n\n assert_ok!(withdraw_nominator_collateral(nominator, vault, amount_dot));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 32, "score": 254136.9224727046 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config + pallet_timestamp::Config + security::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// The primitive balance type.\n\n type Balance: AtLeast32BitUnsigned\n\n + FixedPointOperand\n\n + MaybeSerializeDeserialize\n\n + FullCodec\n\n + Copy\n\n + Default\n\n + Debug;\n\n\n\n /// The unsigned fixed point type.\n\n type UnsignedFixedPoint: FixedPointNumber<Inner = <Self as Config>::Balance> + Encode + EncodeLike + Decode;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n }\n\n\n", "file_path": "crates/exchange-rate-oracle/src/lib.rs", "rank": 33, "score": 252141.0104674257 }, { "content": "pub fn assert_nomination_opt_in(vault: [u8; 32]) {\n\n assert_ok!(nomination_opt_in(vault));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 34, "score": 251944.78062929527 }, { "content": "fn request_issue_ok(origin: AccountId, amount: Balance, vault: AccountId, collateral: Balance) -> H256 {\n\n ext::vault_registry::ensure_not_banned::<Test>.mock_safe(|_| MockResult::Return(Ok(())));\n\n\n\n // Default: Parachain status is \"RUNNING\". Set manually for failure testing\n\n ext::security::ensure_parachain_status_not_shutdown::<Test>.mock_safe(|| MockResult::Return(Ok(())));\n\n\n\n ext::security::get_secure_id::<Test>.mock_safe(|_| MockResult::Return(get_dummy_request_id()));\n\n\n\n ext::vault_registry::try_increase_to_be_issued_tokens::<Test>.mock_safe(|_, _| MockResult::Return(Ok(())));\n\n ext::vault_registry::register_deposit_address::<Test>\n\n .mock_safe(|_, _| MockResult::Return(Ok(BtcAddress::default())));\n\n\n\n Issue::_request_issue(origin, amount, vault, collateral).unwrap()\n\n}\n\n\n", "file_path": "crates/issue/src/tests.rs", "rank": 35, "score": 251611.6706639838 }, { "content": "pub fn register_vault(vault: [u8; 32]) -> DispatchResultWithPostInfo {\n\n Call::VaultRegistry(VaultRegistryCall::register_vault(\n\n DEFAULT_BACKING_COLLATERAL,\n\n dummy_public_key(),\n\n ))\n\n .dispatch(origin_of(account_of(vault)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 36, "score": 251507.4422093919 }, { "content": "type Event = crate::Event<Test>;\n\n\n", "file_path": "crates/relay/src/tests.rs", "rank": 37, "score": 251026.21292826353 }, { "content": "type Event = crate::Event<Test>;\n\n\n\nmacro_rules! assert_emitted {\n\n ($event:expr) => {\n\n let test_event = TestEvent::Redeem($event);\n\n assert!(System::events().iter().any(|a| a.event == test_event));\n\n };\n\n ($event:expr, $times:expr) => {\n\n let test_event = TestEvent::Redeem($event);\n\n assert_eq!(\n\n System::events().iter().filter(|a| a.event == test_event).count(),\n\n $times\n\n );\n\n };\n\n}\n\n\n", "file_path": "crates/redeem/src/tests.rs", "rank": 38, "score": 251026.21292826353 }, { "content": "type Event = crate::Event<Test>;\n\n\n\nmacro_rules! assert_emitted {\n\n ($event:expr) => {\n\n let test_event = TestEvent::Security($event);\n\n assert!(System::events().iter().any(|a| a.event == test_event));\n\n };\n\n ($event:expr, $times:expr) => {\n\n let test_event = TestEvent::Security($event);\n\n assert_eq!(\n\n System::events().iter().filter(|a| a.event == test_event).count(),\n\n $times\n\n );\n\n };\n\n}\n\n\n", "file_path": "crates/security/src/tests.rs", "rank": 39, "score": 251026.21292826353 }, { "content": "type Event = crate::Event<Test>;\n\n\n", "file_path": "crates/replace/src/tests.rs", "rank": 40, "score": 251026.21292826353 }, { "content": "pub fn assert_withdraw_vault_collateral(vault: [u8; 32], amount_dot: u128) {\n\n assert_ok!(withdraw_vault_collateral(vault, amount_dot));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 41, "score": 249238.59919882307 }, { "content": "type Event = crate::Event<Test>;\n\n\n\nuse crate::{Chains, ChainsIndex};\n\nuse bitcoin::{formatter::TryFormattable, merkle::*, parser::*, types::*};\n\nuse frame_support::{assert_err, assert_ok};\n\nuse mocktopus::mocking::*;\n\nuse sp_std::{\n\n convert::{TryFrom, TryInto},\n\n str::FromStr,\n\n};\n\n/// # Getters and setters\n\n///\n\n/// get_chain_position_from_chain_id\n\n/// set_chain_from_position_and_id\n", "file_path": "crates/btc-relay/src/tests.rs", "rank": 42, "score": 247863.04204486957 }, { "content": "type Event = crate::Event<Test>;\n\n\n\n// use macro to avoid messing up stack trace\n\nmacro_rules! assert_emitted {\n\n ($event:expr) => {\n\n let test_event = TestEvent::VaultRegistry($event);\n\n assert!(System::events().iter().any(|a| a.event == test_event));\n\n };\n\n ($event:expr, $times:expr) => {\n\n let test_event = TestEvent::VaultRegistry($event);\n\n assert_eq!(\n\n System::events().iter().filter(|a| a.event == test_event).count(),\n\n $times\n\n );\n\n };\n\n}\n\n\n\nmacro_rules! assert_not_emitted {\n\n ($event:expr) => {\n\n let test_event = TestEvent::VaultRegistry($event);\n\n assert!(!System::events().iter().any(|a| a.event == test_event));\n\n };\n\n}\n\n\n", "file_path": "crates/vault-registry/src/tests.rs", "rank": 43, "score": 247863.04204486957 }, { "content": "// asserts redeem event happen and extracts its id for further testing\n\npub fn assert_redeem_request_event() -> H256 {\n\n let events = SystemModule::events();\n\n let ids = events\n\n .iter()\n\n .filter_map(|r| match r.event {\n\n Event::Redeem(RedeemEvent::RequestRedeem(id, _, _, _, _, _, _, _)) => Some(id),\n\n _ => None,\n\n })\n\n .collect::<Vec<H256>>();\n\n assert_eq!(ids.len(), 1);\n\n ids[0]\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/redeem_testing_utils.rs", "rank": 44, "score": 247031.91251378867 }, { "content": "pub fn assert_refund_request_event() -> H256 {\n\n SystemModule::events()\n\n .iter()\n\n .find_map(|record| match record.event {\n\n Event::Refund(RefundEvent::RequestRefund(id, _, _, _, _, _, _)) => Some(id),\n\n _ => None,\n\n })\n\n .expect(\"request refund event not found\")\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/issue_testing_utils.rs", "rank": 45, "score": 247021.78035532325 }, { "content": "pub fn assert_issue_request_event() -> H256 {\n\n let events = SystemModule::events();\n\n let record = events.iter().rev().find(|record| {\n\n matches!(\n\n record.event,\n\n Event::Issue(IssueEvent::RequestIssue(_, _, _, _, _, _, _, _))\n\n )\n\n });\n\n if let Event::Issue(IssueEvent::RequestIssue(id, _, _, _, _, _, _, _)) = record.unwrap().event {\n\n id\n\n } else {\n\n panic!(\"request issue event not found\")\n\n }\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/issue_testing_utils.rs", "rank": 46, "score": 247021.78035532325 }, { "content": "pub fn assert_store_main_chain_header_event(height: u32, hash: H256Le, relayer: AccountId) {\n\n let store_event = Event::BTCRelay(BTCRelayEvent::StoreMainChainHeader(height, hash, relayer));\n\n let events = SystemModule::events();\n\n\n\n // store only main chain header\n\n assert!(events.iter().any(|a| a.event == store_event));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 47, "score": 246813.65173443378 }, { "content": "pub fn mine_blocks(blocks: u32) {\n\n let start_height = BTCRelayPallet::get_best_block_height();\n\n TransactionGenerator::new().with_confirmations(blocks).mine();\n\n let end_height = BTCRelayPallet::get_best_block_height();\n\n // sanity check\n\n assert_eq!(end_height, start_height + blocks);\n\n}\n\n\n\n#[derive(Default, Clone, Debug)]\n\npub struct TransactionGenerator {\n\n address: BtcAddress,\n\n amount: u128,\n\n return_data: Option<H256>,\n\n script: Vec<u8>,\n\n confirmations: u32,\n\n relayer: Option<[u8; 32]>,\n\n}\n\n\n\nimpl TransactionGenerator {\n\n pub fn new() -> Self {\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 48, "score": 245412.47922787716 }, { "content": "type Event = crate::Event<Test>;\n\n\n\n// use macro to avoid messing up stack trace\n\nmacro_rules! assert_emitted {\n\n ($event:expr) => {\n\n let test_event = TestEvent::ExchangeRateOracle($event);\n\n assert!(System::events().iter().any(|a| a.event == test_event));\n\n };\n\n}\n\n\n\nmacro_rules! assert_not_emitted {\n\n ($event:expr) => {\n\n let test_event = TestEvent::ExchangeRateOracle($event);\n\n assert!(!System::events().iter().any(|a| a.event == test_event));\n\n };\n\n}\n\n\n", "file_path": "crates/exchange-rate-oracle/src/tests.rs", "rank": 49, "score": 244840.5268074653 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n assert_ok!(<exchange_rate_oracle::Pallet<Test>>::_set_exchange_rate(\n\n UnsignedFixedPoint::one()\n\n ));\n\n Security::set_active_block_number(1);\n\n System::set_block_number(1);\n\n\n\n ext::btc_relay::is_fully_initialized::<Test>.mock_safe(|| MockResult::Return(Ok(true)));\n\n test();\n\n });\n\n}\n", "file_path": "crates/issue/src/mock.rs", "rank": 50, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n assert_ok!(<exchange_rate_oracle::Pallet<Test>>::_set_exchange_rate(\n\n UnsignedFixedPoint::one()\n\n ));\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/replace/src/mock.rs", "rank": 51, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/sla/src/mock.rs", "rank": 52, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/security/src/mock.rs", "rank": 53, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/reward/src/mock.rs", "rank": 54, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n assert_ok!(<exchange_rate_oracle::Pallet<Test>>::_set_exchange_rate(\n\n UnsignedFixedPoint::one()\n\n ));\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/refund/src/mock.rs", "rank": 55, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/fee/src/mock.rs", "rank": 56, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/staking/src/mock.rs", "rank": 57, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n assert_ok!(<exchange_rate_oracle::Pallet<Test>>::_set_exchange_rate(\n\n UnsignedFixedPoint::one()\n\n ));\n\n Security::set_active_block_number(1);\n\n System::set_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/redeem/src/mock.rs", "rank": 58, "score": 244474.2074476941 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/relay/src/mock.rs", "rank": 59, "score": 244474.2074476941 }, { "content": " #[pallet::config]\n\n pub trait Config:\n\n frame_system::Config\n\n + security::Config\n\n + vault_registry::Config\n\n + btc_relay::Config\n\n + redeem::Config\n\n + replace::Config\n\n + refund::Config\n\n + sla::Config\n\n + fee::Config\n\n {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n }\n\n\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(super) fn deposit_event)]\n", "file_path": "crates/relay/src/lib.rs", "rank": 60, "score": 241049.0562377111 }, { "content": " #[pallet::config]\n\n pub trait Config:\n\n frame_system::Config\n\n + vault_registry::Config\n\n + btc_relay::Config\n\n + fee::Config<UnsignedInner = BalanceOf<Self>>\n\n + sla::Config<Balance = BalanceOf<Self>>\n\n {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n }\n\n\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(super) fn deposit_event)]\n\n #[pallet::metadata(T::AccountId = \"AccountId\", Wrapped<T> = \"Wrapped\", Collateral<T> = \"Collateral\")]\n\n pub enum Event<T: Config> {\n\n RequestRedeem(\n\n H256, // redeem_id\n", "file_path": "crates/redeem/src/lib.rs", "rank": 61, "score": 241049.0562377111 }, { "content": " #[pallet::config]\n\n pub trait Config:\n\n frame_system::Config\n\n + btc_relay::Config\n\n + fee::Config<UnsignedInner = BalanceOf<Self>>\n\n + sla::Config\n\n + vault_registry::Config\n\n {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n }\n\n\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(super) fn deposit_event)]\n\n #[pallet::metadata(T::AccountId = \"AccountId\", Wrapped<T> = \"Wrapped\")]\n\n pub enum Event<T: Config> {\n\n /// refund_id, issuer, amount_without_fee, vault, btc_address, issue_id, fee\n\n RequestRefund(\n", "file_path": "crates/refund/src/lib.rs", "rank": 62, "score": 241049.0562377111 }, { "content": " #[pallet::config]\n\n pub trait Config:\n\n frame_system::Config\n\n + vault_registry::Config\n\n + btc_relay::Config\n\n + exchange_rate_oracle::Config<Balance = BalanceOf<Self>>\n\n + fee::Config\n\n + sla::Config<Balance = BalanceOf<Self>>\n\n + nomination::Config\n\n {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n }\n\n\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(super) fn deposit_event)]\n\n #[pallet::metadata(T::AccountId = \"AccountId\", Wrapped<T> = \"Wrapped\", Collateral<T> = \"Collateral\")]\n\n pub enum Event<T: Config> {\n", "file_path": "crates/replace/src/lib.rs", "rank": 63, "score": 241049.0562377111 }, { "content": " #[pallet::config]\n\n pub trait Config:\n\n frame_system::Config\n\n + vault_registry::Config\n\n + btc_relay::Config\n\n + exchange_rate_oracle::Config<Balance = BalanceOf<Self>>\n\n + fee::Config<UnsignedInner = BalanceOf<Self>>\n\n + sla::Config<Balance = BalanceOf<Self>>\n\n + refund::Config\n\n {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n }\n\n\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(super) fn deposit_event)]\n\n #[pallet::metadata(T::AccountId = \"AccountId\", Wrapped<T> = \"Wrapped\", Collateral<T> = \"Collateral\")]\n\n pub enum Event<T: Config> {\n", "file_path": "crates/issue/src/lib.rs", "rank": 64, "score": 241049.0562377111 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/btc-relay/src/mock.rs", "rank": 65, "score": 240443.5787830648 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ext::oracle::collateral_to_wrapped::<Test>.mock_safe(|v| MockResult::Return(Ok(v)));\n\n ext::oracle::wrapped_to_collateral::<Test>.mock_safe(|v| MockResult::Return(Ok(v)));\n\n ExtBuilder::build().execute_with(|| {\n\n System::set_block_number(1);\n\n Security::set_active_block_number(1);\n\n set_default_thresholds();\n\n test()\n\n })\n\n}\n", "file_path": "crates/vault-registry/src/mock.rs", "rank": 66, "score": 240443.5787830648 }, { "content": "pub fn dummy_public_key() -> BtcPublicKey {\n\n BtcPublicKey([\n\n 2, 205, 114, 218, 156, 16, 235, 172, 106, 37, 18, 153, 202, 140, 176, 91, 207, 51, 187, 55, 18, 45, 222, 180,\n\n 119, 54, 243, 97, 173, 150, 161, 169, 230,\n\n ])\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 67, "score": 239428.64546634397 }, { "content": "pub fn assert_register_vault(vault: [u8; 32]) {\n\n assert_ok!(register_vault(vault));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 68, "score": 239051.05545300734 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Signed fixed point type.\n\n type SignedFixedPoint: FixedPointNumber<Inner = Self::SignedInner>\n\n + Encode\n\n + EncodeLike\n\n + Decode\n\n + MaybeSerializeDeserialize;\n\n\n\n /// The `Inner` type of the `SignedFixedPoint`.\n\n type SignedInner: Debug\n\n + One\n\n + CheckedMul\n\n + CheckedDiv\n\n + FixedPointOperand\n\n + TryFrom<<Self as Config>::Balance>\n\n + TryInto<<Self as Config>::Balance>;\n\n\n", "file_path": "crates/sla/src/lib.rs", "rank": 69, "score": 237401.55591667996 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n }\n\n\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(super) fn deposit_event)]\n\n #[pallet::metadata(T::BlockNumber = \"BlockNumber\")]\n\n pub enum Event<T: Config> {\n\n RecoverFromErrors(StatusCode, Vec<ErrorCode>),\n\n UpdateActiveBlock(T::BlockNumber),\n\n }\n\n\n\n #[pallet::error]\n\n pub enum Error<T> {\n\n NoDataBTCRelay,\n\n InvalidBTCRelay,\n\n ParachainNotRunning,\n\n ParachainShutdown,\n\n ParachainNotRunningOrLiquidation,\n", "file_path": "crates/security/src/lib.rs", "rank": 70, "score": 237401.55591667994 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// The `Inner` type of the `SignedFixedPoint`.\n\n type SignedInner: CheckedDiv + Ord + FixedPointOperand;\n\n\n\n /// Signed fixed point type.\n\n type SignedFixedPoint: FixedPointNumber<Inner = Self::SignedInner> + Encode + EncodeLike + Decode;\n\n\n\n /// The currency ID type.\n\n type CurrencyId: Parameter + Member + Copy + MaybeSerializeDeserialize + Ord;\n\n }\n\n\n\n // The pallet's events\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(crate) fn deposit_event)]\n\n #[pallet::metadata(\n\n T::CurrencyId = \"CurrencyId\",\n\n T::AccountId = \"AccountId\",\n", "file_path": "crates/staking/src/lib.rs", "rank": 71, "score": 237401.55591667996 }, { "content": "pub trait WeightInfo {\n\n fn withdraw_vault_rewards() -> Weight;\n\n}\n\n\n\npub use pallet::*;\n\n\n\n#[frame_support::pallet]\n\npub mod pallet {\n\n use super::*;\n\n use frame_support::pallet_prelude::*;\n\n use frame_system::pallet_prelude::*;\n\n\n\n /// ## Configuration\n\n /// The pallet's configuration trait.\n", "file_path": "crates/fee/src/lib.rs", "rank": 72, "score": 237269.27637680926 }, { "content": " #[pallet::config]\n\n pub trait Config:\n\n frame_system::Config\n\n + SendTransactionTypes<Call<Self>>\n\n + exchange_rate_oracle::Config<Balance = BalanceOf<Self>>\n\n + sla::Config<Balance = BalanceOf<Self>>\n\n + security::Config\n\n {\n\n /// The vault module id, used for deriving its sovereign account ID.\n\n #[pallet::constant] // put the constant in metadata\n\n type PalletId: Get<PalletId>;\n\n\n\n /// The overarching event type.\n\n type Event: From<Event<Self>>\n\n + Into<<Self as frame_system::Config>::Event>\n\n + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// The source of (pseudo) randomness. Set to collective flip\n\n type RandomnessSource: Randomness<H256, Self::BlockNumber>;\n\n\n\n /// The `Inner` type of the `SignedFixedPoint`.\n", "file_path": "crates/vault-registry/src/lib.rs", "rank": 73, "score": 237010.43692910892 }, { "content": "pub fn run_test<T>(test: T)\n\nwhere\n\n T: FnOnce(),\n\n{\n\n clear_mocks();\n\n ExtBuilder::build().execute_with(|| {\n\n Security::set_active_block_number(1);\n\n System::set_block_number(1);\n\n test();\n\n });\n\n}\n", "file_path": "crates/exchange-rate-oracle/src/mock.rs", "rank": 74, "score": 236612.58253517078 }, { "content": "#[allow(dead_code)]\n\npub fn try_register_operator(operator: [u8; 32]) {\n\n let _ = Call::Nomination(NominationCall::opt_in_to_nomination()).dispatch(origin_of(account_of(operator)));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 75, "score": 236588.3362152265 }, { "content": "pub fn cancel_redeem(redeem_id: H256, redeemer: [u8; 32], reimburse: bool) {\n\n assert_ok!(Call::Redeem(RedeemCall::cancel_redeem(redeem_id, reimburse)).dispatch(origin_of(account_of(redeemer))));\n\n}\n", "file_path": "parachain/runtime/tests/mock/redeem_testing_utils.rs", "rank": 76, "score": 236024.80312742386 }, { "content": "pub fn get_nominator_collateral(nominator: [u8; 32], vault: [u8; 32]) -> u128 {\n\n NominationPallet::get_nominator_collateral(&account_of(nominator), &account_of(vault)).unwrap()\n\n}\n", "file_path": "parachain/runtime/tests/mock/nomination_testing_utils.rs", "rank": 77, "score": 235788.15157279454 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config {}\n\n\n\n #[pallet::hooks]\n\n impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {}\n\n\n\n #[pallet::call]\n\n impl<T: Config> Pallet<T> {}\n\n\n\n #[pallet::genesis_config]\n\n pub struct GenesisConfig {\n\n pub parachain_id: ParaId,\n\n }\n\n\n\n #[cfg(feature = \"std\")]\n\n impl Default for GenesisConfig {\n\n fn default() -> Self {\n\n Self {\n\n parachain_id: 21.into(),\n\n }\n\n }\n", "file_path": "crates/parachain-info/src/lib.rs", "rank": 78, "score": 234172.99789491922 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config + security::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// The fee module id, used for deriving its sovereign account ID.\n\n #[pallet::constant]\n\n type PalletId: Get<PalletId>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n\n\n /// Signed fixed point type.\n\n type SignedFixedPoint: FixedPointNumber<Inner = Self::SignedInner> + Encode + EncodeLike + Decode;\n\n\n\n /// The `Inner` type of the `SignedFixedPoint`.\n\n type SignedInner: Debug\n\n + CheckedDiv\n\n + TryFrom<Collateral<Self>>\n\n + TryFrom<Wrapped<Self>>\n\n + TryInto<Collateral<Self>>\n", "file_path": "crates/fee/src/lib.rs", "rank": 79, "score": 232026.50983658526 }, { "content": "#[allow(dead_code)]\n\npub fn drop_exchange_rate_and_liquidate(vault: [u8; 32]) {\n\n assert_ok!(ExchangeRateOraclePallet::_set_exchange_rate(\n\n FixedU128::checked_from_integer(10_000_000_000).unwrap()\n\n ));\n\n assert_ok!(VaultRegistryPallet::liquidate_vault(&account_of(vault)));\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 80, "score": 231656.67277237336 }, { "content": " #[pallet::config]\n\n pub trait Config: frame_system::Config + security::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self>>\n\n + Into<<Self as frame_system::Config>::Event>\n\n + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Weight information for the extrinsics in this module.\n\n type WeightInfo: WeightInfo;\n\n\n\n #[pallet::constant]\n\n type ParachainBlocksPerBitcoinBlock: Get<<Self as frame_system::Config>::BlockNumber>;\n\n }\n\n\n\n #[pallet::hooks]\n\n impl<T: Config> Hooks<T::BlockNumber> for Pallet<T> {}\n\n\n\n #[pallet::call]\n\n impl<T: Config> Pallet<T> {\n\n /// Verifies the inclusion of `tx_id` into the relay, and validates the given raw Bitcoin transaction, according\n\n /// to the supported transaction format (see <https://interlay.gitlab.io/polkabtc-spec/btcrelay-spec/intro/accepted-format.html>)\n", "file_path": "crates/btc-relay/src/lib.rs", "rank": 81, "score": 229240.86899219005 }, { "content": "#[allow(dead_code)]\n\npub fn required_collateral_for_issue(issued_tokens: u128) -> u128 {\n\n let fee_amount_btc = FeePallet::get_issue_fee(issued_tokens).unwrap();\n\n let total_amount_btc = issued_tokens + fee_amount_btc;\n\n VaultRegistryPallet::get_required_collateral_for_wrapped(total_amount_btc).unwrap()\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 82, "score": 226981.31639664277 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n pub enum Test where\n\n Block = Block,\n\n NodeBlock = Block,\n\n UncheckedExtrinsic = UncheckedExtrinsic,\n\n {\n\n System: frame_system::{Pallet, Call, Storage, Config, Event<T>},\n\n Timestamp: pallet_timestamp::{Pallet, Call, Storage, Inherent},\n\n RandomnessCollectiveFlip: pallet_randomness_collective_flip::{Pallet, Call, Storage},\n\n\n\n // Tokens & Balances\n\n Tokens: orml_tokens::{Pallet, Storage, Config<T>, Event<T>},\n\n\n\n Rewards: reward::{Pallet, Call, Storage, Event<T>},\n\n\n\n // Operational\n\n Security: security::{Pallet, Call, Storage, Event<T>},\n", "file_path": "crates/nomination/src/mock.rs", "rank": 83, "score": 226132.49327094146 }, { "content": "pub fn assert_accept_event() -> H256 {\n\n SystemModule::events()\n\n .iter()\n\n .rev()\n\n .find_map(|record| match record.event {\n\n Event::Replace(ReplaceEvent::AcceptReplace(id, _, _, _, _, _)) => Some(id),\n\n _ => None,\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "parachain/runtime/tests/test_replace.rs", "rank": 84, "score": 223275.6865790395 }, { "content": " #[pallet::config]\n\n pub trait Config<I: 'static = ()>: frame_system::Config {\n\n /// The overarching event type.\n\n type Event: From<Event<Self, I>> + IsType<<Self as frame_system::Config>::Event>;\n\n\n\n /// Signed fixed point type.\n\n type SignedFixedPoint: FixedPointNumber + Encode + EncodeLike + Decode;\n\n\n\n /// The currency ID type.\n\n type CurrencyId: Parameter + Member + Copy + MaybeSerializeDeserialize + Ord;\n\n }\n\n\n\n #[derive(Encode, Decode, Clone, Copy, PartialEq, Debug)]\n\n pub enum RewardPool<AccountId> {\n\n Global,\n\n Local(AccountId),\n\n }\n\n\n\n // The pallet's events\n\n #[pallet::event]\n\n #[pallet::generate_deposit(pub(crate) fn deposit_event)]\n", "file_path": "crates/reward/src/lib.rs", "rank": 85, "score": 222875.83154236275 }, { "content": "pub fn execute_refund(vault_id: [u8; 32]) -> (H256, RefundRequest<AccountId, u128>) {\n\n let refund_id = assert_refund_request_event();\n\n let refund = RefundPallet::get_open_refund_request_from_id(&refund_id).unwrap();\n\n assert_ok!(execute_refund_with_amount(vault_id, refund.amount_wrapped));\n\n (refund_id, refund)\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/issue_testing_utils.rs", "rank": 86, "score": 221402.5465759522 }, { "content": "fn test_with<R>(execute: impl FnOnce() -> R) -> R {\n\n ExtBuilder::build().execute_with(|| {\n\n SecurityPallet::set_active_block_number(1);\n\n assert_ok!(ExchangeRateOraclePallet::_set_exchange_rate(FixedU128::one()));\n\n UserData::force_to(USER, default_user_state());\n\n CoreVaultData::force_to(VAULT, default_vault_state());\n\n execute()\n\n })\n\n}\n\n\n", "file_path": "parachain/runtime/tests/test_nomination.rs", "rank": 87, "score": 221329.3387970639 }, { "content": "#[allow(dead_code)]\n\npub fn try_register_vault(collateral: u128, vault: [u8; 32]) {\n\n if VaultRegistryPallet::get_vault_from_id(&account_of(vault)).is_err() {\n\n assert_ok!(\n\n Call::VaultRegistry(VaultRegistryCall::register_vault(collateral, dummy_public_key()))\n\n .dispatch(origin_of(account_of(vault)))\n\n );\n\n };\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/mod.rs", "rank": 88, "score": 221004.89383874641 }, { "content": "fn create_sample_vault() -> <Test as frame_system::Config>::AccountId {\n\n create_vault(DEFAULT_ID)\n\n}\n\n\n", "file_path": "crates/vault-registry/src/tests.rs", "rank": 89, "score": 219111.9064711287 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "crates/nomination/src/mock.rs", "rank": 90, "score": 218872.8960885087 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(mut config: Configuration) -> Result<(TaskManager, RpcHandlers), ServiceError> {\n\n let telemetry = config\n\n .telemetry_endpoints\n\n .clone()\n\n .filter(|x| !x.is_empty())\n\n .map(|endpoints| -> Result<_, sc_telemetry::Error> {\n\n let worker = TelemetryWorker::new(16)?;\n\n let telemetry = worker.handle().new_telemetry(endpoints);\n\n Ok((worker, telemetry))\n\n })\n\n .transpose()?;\n\n\n\n let (client, backend, keystore_container, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(\n\n &config,\n\n telemetry.as_ref().map(|(_, telemetry)| telemetry.handle()),\n\n )?;\n\n\n\n let mut telemetry = telemetry.map(|(worker, telemetry)| {\n\n task_manager.spawn_handle().spawn(\"telemetry\", worker.run());\n", "file_path": "parachain/service/src/grandpa.rs", "rank": 91, "score": 217232.1808303888 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(mut config: Configuration) -> Result<(TaskManager, RpcHandlers), ServiceError> {\n\n let sc_service::PartialComponents {\n\n client,\n\n backend,\n\n mut task_manager,\n\n import_queue,\n\n mut keystore_container,\n\n select_chain,\n\n transaction_pool,\n\n other: (block_import, grandpa_link, mut telemetry),\n\n } = new_partial(&config)?;\n\n\n\n if let Some(url) = &config.keystore_remote {\n\n match remote_keystore(url) {\n\n Ok(k) => keystore_container.set_remote_keystore(k),\n\n Err(e) => {\n\n return Err(ServiceError::Other(format!(\n\n \"Error hooking up remote keystore for {}: {}\",\n\n url, e\n\n )))\n", "file_path": "parachain/service/src/grandpa.rs", "rank": 92, "score": 217232.1808303888 }, { "content": "fn init_zero_vault<T: Config>(\n\n id: T::AccountId,\n\n) -> Vault<T::AccountId, T::BlockNumber, Wrapped<T>, Collateral<T>, <T as vault_registry::Config>::SignedFixedPoint> {\n\n let mut vault = Vault::default();\n\n vault.id = id;\n\n vault\n\n}\n\n\n", "file_path": "crates/issue/src/tests.rs", "rank": 93, "score": 217095.46373075788 }, { "content": "pub fn execute_redeem(redeem_id: H256) {\n\n ExecuteRedeemBuilder::new(redeem_id).assert_execute();\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/redeem_testing_utils.rs", "rank": 94, "score": 206959.29587806654 }, { "content": "pub fn execute_issue(issue_id: H256) {\n\n ExecuteIssueBuilder::new(issue_id).assert_execute()\n\n}\n\n\n", "file_path": "parachain/runtime/tests/mock/issue_testing_utils.rs", "rank": 95, "score": 206959.29587806654 }, { "content": "#[test]\n\nfn integration_test_vaults_with_zero_nomination_cannot_request_replacement() {\n\n test_with_nomination_enabled_and_vault_opted_in(|| {\n\n let amount = DEFAULT_VAULT_ISSUED - DEFAULT_VAULT_TO_BE_REDEEMED - DEFAULT_VAULT_TO_BE_REPLACED;\n\n let griefing_collateral = 200;\n\n assert_noop!(\n\n Call::Replace(ReplaceCall::request_replace(amount, griefing_collateral))\n\n .dispatch(origin_of(account_of(VAULT))),\n\n ReplaceError::VaultHasEnabledNomination\n\n );\n\n });\n\n}\n\n\n", "file_path": "parachain/runtime/tests/test_nomination.rs", "rank": 96, "score": 205890.7301408787 }, { "content": "fn create_vault(id: u64) -> <Test as frame_system::Config>::AccountId {\n\n create_vault_with_collateral(id, DEFAULT_COLLATERAL)\n\n}\n\n\n", "file_path": "crates/vault-registry/src/tests.rs", "rank": 97, "score": 205156.21352380747 }, { "content": "type TestExtrinsic = TestXt<Call, ()>;\n", "file_path": "crates/nomination/src/mock.rs", "rank": 98, "score": 202424.45066594676 }, { "content": "fn execute_replace_with_amount(replace_id: H256, amount: u128) -> DispatchResultWithPostInfo {\n\n let replace = ReplacePallet::get_open_replace_request(&replace_id).unwrap();\n\n\n\n // send the btc from the old_vault to the new_vault\n\n let (_tx_id, _tx_block_height, merkle_proof, raw_tx) =\n\n generate_transaction_and_mine(replace.btc_address, amount, Some(replace_id));\n\n\n\n SecurityPallet::set_active_block_number(SecurityPallet::active_block_number() + CONFIRMATIONS);\n\n\n\n Call::Replace(ReplaceCall::execute_replace(replace_id, merkle_proof, raw_tx))\n\n .dispatch(origin_of(account_of(OLD_VAULT)))\n\n}\n\n\n", "file_path": "parachain/runtime/tests/test_replace.rs", "rank": 99, "score": 197740.77954129674 } ]
Rust
adafruit-feather-nrf52840-express/examples/feather-express-listener.rs
blueluna/nrf52840-dk-experiments
bbe34a24b9002f9f446e949d7b8013d2e8dae484
#![no_main] #![no_std] use panic_itm as _; use cortex_m::{iprintln, peripheral::ITM}; use rtic::app; use bbqueue::{self, BBBuffer}; use nrf52840_hal::{clocks, gpio, uarte}; use nrf52840_pac as pac; use psila_nrf52::radio::{Radio, MAX_PACKET_LENGHT}; const PACKET_BUFFER_SIZE: usize = 2048; static PKT_BUFFER: BBBuffer<PACKET_BUFFER_SIZE> = BBBuffer::new(); #[app(device = nrf52840_pac, peripherals = true)] const APP: () = { struct Resources { radio: Radio, itm: ITM, uart: uarte::Uarte<pac::UARTE0>, rx_producer: bbqueue::Producer<'static, PACKET_BUFFER_SIZE>, rx_consumer: bbqueue::Consumer<'static, PACKET_BUFFER_SIZE>, } #[init] fn init(cx: init::Context) -> init::LateResources { let port0 = gpio::p0::Parts::new(cx.device.P0); let _clocks = clocks::Clocks::new(cx.device.CLOCK) .enable_ext_hfosc() .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass) .start_lfclk(); let uarte0 = uarte::Uarte::new( cx.device.UARTE0, uarte::Pins { txd: port0 .p0_06 .into_push_pull_output(gpio::Level::High) .degrade(), rxd: port0.p0_08.into_floating_input().degrade(), cts: Some(port0.p0_07.into_floating_input().degrade()), rts: Some( port0 .p0_05 .into_push_pull_output(gpio::Level::High) .degrade(), ), }, uarte::Parity::EXCLUDED, uarte::Baudrate::BAUD115200, ); let (q_producer, q_consumer) = PKT_BUFFER.try_split().unwrap(); let mut radio = Radio::new(cx.device.RADIO); radio.set_channel(15); radio.set_transmission_power(8); radio.receive_prepare(); init::LateResources { radio, itm: cx.core.ITM, uart: uarte0, rx_producer: q_producer, rx_consumer: q_consumer, } } #[task(binds = RADIO, resources = [radio, rx_producer],)] fn radio(cx: radio::Context) { let radio = cx.resources.radio; let queue = cx.resources.rx_producer; match queue.grant_exact(MAX_PACKET_LENGHT) { Ok(mut grant) => { if grant.buf().len() < MAX_PACKET_LENGHT { grant.commit(0); } else { if let Ok(packet_len) = radio.receive_slice(grant.buf()) { grant.commit(packet_len); } else { grant.commit(0); } } } Err(_) => { let mut buffer = [0u8; MAX_PACKET_LENGHT]; let _ = radio.receive(&mut buffer); } } } #[idle(resources = [rx_consumer, uart, itm])] fn idle(cx: idle::Context) -> ! { let mut host_packet = [0u8; MAX_PACKET_LENGHT * 2]; let queue = cx.resources.rx_consumer; let uarte = cx.resources.uart; let itm_port = &mut cx.resources.itm.stim[0]; iprintln!(itm_port, "~ listening ~"); loop { if let Ok(grant) = queue.read() { let packet_length = grant[0] as usize; match esercom::com_encode( esercom::MessageType::RadioReceive, &grant[1..packet_length], &mut host_packet, ) { Ok(written) => { uarte.write(&host_packet[..written]).unwrap(); } Err(_) => { iprintln!(itm_port, "Failed to encode packet"); } } grant.release(packet_length); } } } };
#![no_main] #![no_std] use panic_itm as _; use cortex_m::{iprintln, peripheral::ITM}; use rtic::app; use bbqueue::{self, BBBuffer}; use nrf52840_hal::{clocks, gpio, uarte}; use nrf52840_pac as pac; use psila_nrf52::radio::{Radio, MAX_PACKET_LENGHT}; const PACKET_BUFFER_SIZE: usize = 2048; static PKT_BUFFER: BBBuffer<PACKET_BUFFER_SIZE> = BBBuffer::new(); #[app(device = nrf52840_pac, peripherals = true)] const APP: () = { struct Resources { radio: Radio, itm: ITM, uart: uarte::Uarte<pac::UARTE0>, rx_producer: bbqueue::Producer<'static, PACKET_BUFFER_SIZE>, rx_consumer: bbqueue::Consumer<'static, PACKET_BUFFER_SIZE>, } #[init] fn init(cx: init::Context) -> init::LateResources { let port0 = gpio::p0::Parts::new(cx.device.P0); let _clocks = clocks::Clocks::new(cx.device.CLOCK) .enable_ext_hfosc() .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass) .start_lfclk(); let uarte0 = uarte::Uarte::new( cx.device.UARTE0, uarte::Pins { txd: port0 .p0_06 .into_push_pull_output(gpio::Level::High) .degrade(), rxd: port0.p0_08.into_floating_input().degrade(), cts: Some(port0.p0_07.into_floating_input().degrade()), rts: Some( port0 .p0_05 .into_push_pull_output(gpio::Level::High) .degrade(), ), }, uarte::Parity::EXCLUDED, uarte::Baudrate::BAUD115200, ); let (q_producer, q_consumer) = PKT_BUFFER.try_split().unwrap(); let mut radio = Radio::new(cx.device.RADIO); radio.set_channel(15); radio.set_transmission_power(8); radio.receive_prepare(); init::LateResources { radio, itm: cx.core.ITM, uart: uarte0, rx_producer: q_producer, rx_consumer: q_consumer, } } #[task(binds = RADIO, resources = [radio, rx_producer],)] fn radio(cx: radio::Context) { let radio = cx.resources.radio; let queue
let mut buffer = [0u8; MAX_PACKET_LENGHT]; let _ = radio.receive(&mut buffer); } } } #[idle(resources = [rx_consumer, uart, itm])] fn idle(cx: idle::Context) -> ! { let mut host_packet = [0u8; MAX_PACKET_LENGHT * 2]; let queue = cx.resources.rx_consumer; let uarte = cx.resources.uart; let itm_port = &mut cx.resources.itm.stim[0]; iprintln!(itm_port, "~ listening ~"); loop { if let Ok(grant) = queue.read() { let packet_length = grant[0] as usize; match esercom::com_encode( esercom::MessageType::RadioReceive, &grant[1..packet_length], &mut host_packet, ) { Ok(written) => { uarte.write(&host_packet[..written]).unwrap(); } Err(_) => { iprintln!(itm_port, "Failed to encode packet"); } } grant.release(packet_length); } } } };
= cx.resources.rx_producer; match queue.grant_exact(MAX_PACKET_LENGHT) { Ok(mut grant) => { if grant.buf().len() < MAX_PACKET_LENGHT { grant.commit(0); } else { if let Ok(packet_len) = radio.receive_slice(grant.buf()) { grant.commit(packet_len); } else { grant.commit(0); } } } Err(_) => {
function_block-random_span
[ { "content": "fn clear(slice: &mut [u8]) {\n\n for v in slice.iter_mut() {\n\n *v = 0;\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum EncryptDecrypt {\n\n /// Encryp operation\n\n Encrypt = 0,\n\n /// Decryp operation\n\n Decrypt = 1,\n\n}\n\n\n\n/// Block cipher key type\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum KeyType {\n\n /// 128-bit AES key\n\n Aes128 = 0,\n\n /// 192-bit AES key\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 0, "score": 58724.10068552845 }, { "content": "fn main() {\n\n let crate_path = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n println!(\n\n \"cargo:rustc-link-search={}\",\n\n crate_path\n\n .join(\"nrf_cc310/lib/cortex-m4/hard-float/no-interrupts\")\n\n .to_string_lossy()\n\n );\n\n println!(\"cargo:rustc-link-lib=static=nrf_cc310_0.9.13\");\n\n}\n", "file_path": "nrf52-cryptocell/build.rs", "rank": 1, "score": 35361.30912793017 }, { "content": "#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\nstruct KeyData {\n\n /// Key data\n\n pub key: *const u8,\n\n /// Key length\n\n pub size: usize,\n\n}\n\n\n\n/// CryptoCell AES context\n\npub struct AesContext {\n\n context: CryptoCellAesContext,\n\n}\n\n\n\nimpl AesContext {\n\n pub fn new(encrypt: EncryptDecrypt, mode: AesOperationMode, padding_type: PaddingType) -> Self {\n\n let mut context = CryptoCellAesContext { buff: [0u32; 19] };\n\n let ctx_ptr = &mut context as *mut CryptoCellAesContext;\n\n let result =\n\n unsafe { SaSi_AesInit(ctx_ptr, encrypt as u32, mode as u32, padding_type as u32) };\n\n if result != 0 {\n\n panic!(\"Failed to initialize AES context {:08x}\", result);\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 2, "score": 34967.02513803529 }, { "content": "#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\nstruct CryptoCellAesContext {\n\n buff: [u32; 19usize],\n\n}\n\n\n\nextern \"C\" {\n\n /// Initialize CryptoCell runtime library\n\n fn SaSi_LibInit() -> u32;\n\n /// Finalize library operations\n\n fn SaSi_LibFini();\n\n /// Initialize AES context\n\n fn SaSi_AesInit(\n\n // The context to initalize\n\n context: *mut CryptoCellAesContext,\n\n // Encrypt (0) or decrypt (1)\n\n decrypt: u32,\n\n // AES operation mode,\n\n // 0 - ECB\n\n // 1 - CBC\n\n // 2 - CBC-MAC\n\n // 3 - CTR\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 3, "score": 33102.88066948175 }, { "content": "/// Terminates the application and makes `probe-run` exit with exit-code = 0\n\npub fn exit() -> ! {\n\n loop {\n\n cortex_m::asm::bkpt();\n\n }\n\n}\n", "file_path": "nrf52840-dk/src/lib.rs", "rank": 4, "score": 31959.53850338287 }, { "content": "/// Terminates the application and makes `probe-run` exit with exit-code = 0\n\npub fn exit() -> ! {\n\n loop {\n\n cortex_m::asm::bkpt();\n\n }\n\n}\n", "file_path": "adafruit-feather-nrf52840-express/src/lib.rs", "rank": 5, "score": 30102.016251821366 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse nrf52840_dk as _;\n\n\n\nuse rtic::app;\n\n\n\nuse nrf52840_hal::{clocks, gpio, uarte};\n\n\n\nuse psila_nrf52::{\n\n pac::{self, radio::state::STATE_A},\n\n radio::{Radio, MAX_PACKET_LENGHT},\n\n};\n\n\n\n#[app(device = nrf52840_pac, peripherals = true)]\n\nconst APP: () = {\n\n struct Resources {\n\n radio: Radio,\n\n uart: uarte::Uarte<pac::UARTE0>,\n\n #[init(11)]\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-energy-detect.rs", "rank": 6, "score": 27.130683920748645 }, { "content": "#[app(device = nrf52840_pac, peripherals = true)]\n\nconst APP: () = {\n\n struct Resources {\n\n radio: Radio,\n\n uart: uarte::Uarte<pac::UARTE0>,\n\n rx_producer: bbqueue::Producer<'static, PACKET_BUFFER_SIZE>,\n\n rx_consumer: bbqueue::Consumer<'static, PACKET_BUFFER_SIZE>,\n\n }\n\n\n\n #[init]\n\n fn init(cx: init::Context) -> init::LateResources {\n\n let port0 = gpio::p0::Parts::new(cx.device.P0);\n\n // Configure to use external clocks, and start them\n\n let _clocks = clocks::Clocks::new(cx.device.CLOCK)\n\n .enable_ext_hfosc()\n\n .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass)\n\n .start_lfclk();\n\n let uarte0 = uarte::Uarte::new(\n\n cx.device.UARTE0,\n\n uarte::Pins {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-listener.rs", "rank": 8, "score": 25.620668752034653 }, { "content": "static PKT_BUFFER: BBBuffer<PACKET_BUFFER_SIZE> = BBBuffer::new();\n\n\n\n#[app(device = nrf52840_pac, peripherals = true)]\n\nconst APP: () = {\n\n struct Resources {\n\n radio: Radio,\n\n uart: uarte::Uarte<pac::UARTE0>,\n\n rx_producer: bbqueue::Producer<'static, PACKET_BUFFER_SIZE>,\n\n rx_consumer: bbqueue::Consumer<'static, PACKET_BUFFER_SIZE>,\n\n }\n\n\n\n #[init]\n\n fn init(cx: init::Context) -> init::LateResources {\n\n rtt_init_print!();\n\n\n\n let port0 = gpio::p0::Parts::new(cx.device.P0);\n\n // Configure to use external clocks, and start them\n\n let _clocks = clocks::Clocks::new(cx.device.CLOCK)\n\n .enable_ext_hfosc()\n\n .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass)\n", "file_path": "nrf52840-mdk/examples/nrf52840-mdk-listener.rs", "rank": 9, "score": 23.989435104516335 }, { "content": " channel: u8,\n\n }\n\n\n\n #[init]\n\n fn init(cx: init::Context) -> init::LateResources {\n\n let port0 = gpio::p0::Parts::new(cx.device.P0);\n\n // Configure to use external clocks, and start them\n\n let _clocks = clocks::Clocks::new(cx.device.CLOCK)\n\n .enable_ext_hfosc()\n\n .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass)\n\n .start_lfclk();\n\n let uarte0 = uarte::Uarte::new(\n\n cx.device.UARTE0,\n\n uarte::Pins {\n\n txd: port0\n\n .p0_06\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n rxd: port0.p0_08.into_floating_input().degrade(),\n\n cts: Some(port0.p0_07.into_floating_input().degrade()),\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-energy-detect.rs", "rank": 10, "score": 22.29105132198954 }, { "content": " rts: Some(\n\n port0\n\n .p0_05\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n ),\n\n },\n\n uarte::Parity::EXCLUDED,\n\n uarte::Baudrate::BAUD115200,\n\n );\n\n\n\n let mut radio = Radio::new(cx.device.RADIO);\n\n radio.set_channel(15);\n\n radio.start_energy_detect(65536);\n\n\n\n init::LateResources {\n\n radio,\n\n uart: uarte0,\n\n }\n\n }\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-energy-detect.rs", "rank": 11, "score": 21.96106697297344 }, { "content": " .start_lfclk();\n\n let uarte0 = uarte::Uarte::new(\n\n cx.device.UARTE0,\n\n uarte::Pins {\n\n txd: port0\n\n .p0_20\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n rxd: port0.p0_19.into_floating_input().degrade(),\n\n cts: None,\n\n rts: None,\n\n },\n\n uarte::Parity::EXCLUDED,\n\n uarte::Baudrate::BAUD115200,\n\n );\n\n\n\n let (q_producer, q_consumer) = PKT_BUFFER.try_split().unwrap();\n\n\n\n let mut radio = Radio::new(cx.device.RADIO);\n\n radio.set_channel(15);\n", "file_path": "nrf52840-mdk/examples/nrf52840-mdk-listener.rs", "rank": 12, "score": 21.892879650229386 }, { "content": " txd: port0\n\n .p0_06\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n rxd: port0.p0_08.into_floating_input().degrade(),\n\n cts: Some(port0.p0_07.into_floating_input().degrade()),\n\n rts: Some(\n\n port0\n\n .p0_05\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n ),\n\n },\n\n uarte::Parity::EXCLUDED,\n\n uarte::Baudrate::BAUD115200,\n\n );\n\n\n\n let (q_producer, q_consumer) = PKT_BUFFER.try_split().unwrap();\n\n\n\n let mut radio = Radio::new(cx.device.RADIO);\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-listener.rs", "rank": 13, "score": 20.582804563444427 }, { "content": " txd: port0\n\n .p0_06\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n rxd: port0.p0_08.into_floating_input().degrade(),\n\n cts: Some(port0.p0_07.into_floating_input().degrade()),\n\n rts: Some(\n\n port0\n\n .p0_05\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade(),\n\n ),\n\n },\n\n uarte::Parity::EXCLUDED,\n\n uarte::Baudrate::BAUD115200,\n\n );\n\n\n\n init::LateResources {\n\n timer_0: cx.device.TIMER0,\n\n button_1,\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 14, "score": 20.337433228022114 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse nrf52840_dk as _;\n\n\n\nuse core::fmt::Write;\n\n\n\nuse rtic::app;\n\n\n\nuse embedded_hal::digital::v2::{InputPin, OutputPin};\n\n\n\nuse crate::hal::pac;\n\nuse nrf52840_hal as hal;\n\n\n\nuse hal::{clocks, gpio, timer::Instance, uarte};\n\nuse pac::{RTC0, TIMER0, UARTE0};\n\n\n\n#[app(device = crate::hal::pac, peripherals = true)]\n\nconst APP: () = {\n\n struct Resources {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 17, "score": 19.52429217982299 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse panic_rtt_target as _;\n\n\n\nuse rtt_target::{rprintln, rtt_init_print};\n\n\n\nuse rtic::app;\n\n\n\nuse bbqueue::{self, BBBuffer};\n\n\n\nuse nrf52840_hal::{clocks, gpio, uarte};\n\n\n\nuse nrf52840_pac as pac;\n\n\n\nuse psila_nrf52::radio::{Radio, MAX_PACKET_LENGHT};\n\n\n\n// Use a packet buffer that can hold 16 packages\n\nconst PACKET_BUFFER_SIZE: usize = 2048;\n\n\n", "file_path": "nrf52840-mdk/examples/nrf52840-mdk-listener.rs", "rank": 18, "score": 18.866307623961006 }, { "content": " }\n\n (_, _, _) => Err(ClusterLibraryStatus::UnsupportedClusterCommand),\n\n }\n\n }\n\n}\n\n\n\n#[app(device = nrf52840_pac, peripherals = true)]\n\nconst APP: () = {\n\n struct Resources {\n\n timer: pac::TIMER1,\n\n radio: Radio,\n\n service: PsilaService<'static, CryptoCellBackend, ClusterHandler, TX_BUFFER_SIZE>,\n\n rx_producer: bbqueue::Producer<'static, RX_BUFFER_SIZE>,\n\n rx_consumer: bbqueue::Consumer<'static, RX_BUFFER_SIZE>,\n\n tx_consumer: bbqueue::Consumer<'static, TX_BUFFER_SIZE>,\n\n }\n\n\n\n #[init]\n\n fn init(cx: init::Context) -> init::LateResources {\n\n let mut timer0 = cx.device.TIMER0;\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 19, "score": 18.755241682603653 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse nrf52840_dk as _;\n\n\n\nuse rtic::app;\n\n\n\nuse bbqueue::{self, BBBuffer};\n\n\n\nuse nrf52840_hal::{clocks, gpio, uarte};\n\n\n\nuse nrf52840_pac as pac;\n\n\n\nuse psila_nrf52::radio::{Radio, MAX_PACKET_LENGHT};\n\n\n\n// Use a packet buffer that can hold 16 packages\n\nconst PACKET_BUFFER_SIZE: usize = 2048;\n\n\n\nstatic PKT_BUFFER: BBBuffer<PACKET_BUFFER_SIZE> = BBBuffer::new();\n\n\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-listener.rs", "rank": 20, "score": 18.41452679740816 }, { "content": " radio.set_transmission_power(8);\n\n radio.receive_prepare();\n\n\n\n rprintln!(\"Initialise late resources\");\n\n\n\n init::LateResources {\n\n radio,\n\n uart: uarte0,\n\n rx_producer: q_producer,\n\n rx_consumer: q_consumer,\n\n }\n\n }\n\n\n\n #[task(binds = RADIO, resources = [radio, rx_producer])]\n\n fn radio(cx: radio::Context) {\n\n let radio = cx.resources.radio;\n\n let queue = cx.resources.rx_producer;\n\n\n\n match queue.grant_exact(MAX_PACKET_LENGHT) {\n\n Ok(mut grant) => {\n", "file_path": "nrf52840-mdk/examples/nrf52840-mdk-listener.rs", "rank": 22, "score": 16.947570631450073 }, { "content": " radio.set_channel(15);\n\n radio.set_transmission_power(8);\n\n radio.receive_prepare();\n\n\n\n init::LateResources {\n\n radio,\n\n uart: uarte0,\n\n rx_producer: q_producer,\n\n rx_consumer: q_consumer,\n\n }\n\n }\n\n\n\n #[task(binds = RADIO, resources = [radio, rx_producer],)]\n\n fn radio(cx: radio::Context) {\n\n let radio = cx.resources.radio;\n\n let queue = cx.resources.rx_producer;\n\n\n\n match queue.grant_exact(MAX_PACKET_LENGHT) {\n\n Ok(mut grant) => {\n\n if grant.buf().len() < MAX_PACKET_LENGHT {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-listener.rs", "rank": 24, "score": 16.31374268483256 }, { "content": " let led_1 = port0\n\n .p0_13\n\n .into_push_pull_output(gpio::Level::Low)\n\n .degrade();\n\n let led_2 = port0\n\n .p0_14\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade();\n\n let led_3 = port0\n\n .p0_15\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade();\n\n let led_4 = port0\n\n .p0_16\n\n .into_push_pull_output(gpio::Level::High)\n\n .degrade();\n\n\n\n let uart = uarte::Uarte::new(\n\n cx.device.UARTE0,\n\n uarte::Pins {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 25, "score": 15.588792557439781 }, { "content": " (_, _, _) => {\n\n defmt::info!(\"Operation {=u16} {=u16} {=u8}\", profile, cluster, command);\n\n Err(ClusterLibraryStatus::UnsupportedClusterCommand)\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[app(device = nrf52840_pac, peripherals = true)]\n\nconst APP: () = {\n\n struct Resources {\n\n timer: pac::TIMER1,\n\n radio: Radio,\n\n service: PsilaService<'static, CryptoCellBackend, ClusterHandler, TX_BUFFER_SIZE>,\n\n rx_producer: bbqueue::Producer<'static, RX_BUFFER_SIZE>,\n\n rx_consumer: bbqueue::Consumer<'static, RX_BUFFER_SIZE>,\n\n tx_consumer: bbqueue::Consumer<'static, TX_BUFFER_SIZE>,\n\n }\n\n\n\n #[init]\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 26, "score": 15.312019955789205 }, { "content": " let mut host_packet = [0u8; MAX_PACKET_LENGHT * 2];\n\n let queue = cx.resources.rx_consumer;\n\n let uart = cx.resources.uart;\n\n\n\n loop {\n\n if let Ok(grant) = queue.read() {\n\n let packet_length = grant[0] as usize;\n\n match esercom::com_encode(\n\n esercom::MessageType::RadioReceive,\n\n &grant[1..packet_length],\n\n &mut host_packet,\n\n ) {\n\n Ok(written) => {\n\n uart.write(&host_packet[..written]).unwrap();\n\n }\n\n Err(_) => {}\n\n }\n\n\n\n grant.release(packet_length);\n\n }\n\n }\n\n }\n\n};\n", "file_path": "nrf52840-mdk/examples/nrf52840-mdk-listener.rs", "rank": 27, "score": 14.10370902397062 }, { "content": " fn init(cx: init::Context) -> init::LateResources {\n\n let mut timer0 = cx.device.TIMER0;\n\n timer0.init();\n\n\n\n // Configure to use external clocks, and start them\n\n let _clocks = clocks::Clocks::new(cx.device.CLOCK)\n\n .enable_ext_hfosc()\n\n .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass)\n\n .start_lfclk();\n\n\n\n let port0 = gpio::p0::Parts::new(cx.device.P0);\n\n let led = port0\n\n .p0_16\n\n .into_push_pull_output(gpio::Level::Low)\n\n .degrade();\n\n\n\n let handler = ClusterHandler::new(led, cx.device.PWM0);\n\n\n\n // MAC (EUI-48) address to EUI-64\n\n // Add FF FE in the middle\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 28, "score": 13.567700558903931 }, { "content": " button_1: gpio::Pin<gpio::Input<gpio::PullUp>>,\n\n button_2: gpio::Pin<gpio::Input<gpio::PullUp>>,\n\n button_3: gpio::Pin<gpio::Input<gpio::PullUp>>,\n\n button_4: gpio::Pin<gpio::Input<gpio::PullUp>>,\n\n led_1: gpio::Pin<gpio::Output<gpio::PushPull>>,\n\n led_2: gpio::Pin<gpio::Output<gpio::PushPull>>,\n\n led_3: gpio::Pin<gpio::Output<gpio::PushPull>>,\n\n led_4: gpio::Pin<gpio::Output<gpio::PushPull>>,\n\n #[init(false)]\n\n on_off: bool,\n\n rtc_0: hal::rtc::Rtc<RTC0>,\n\n timer_0: TIMER0,\n\n uart: uarte::Uarte<UARTE0>,\n\n }\n\n\n\n #[init]\n\n fn init(cx: init::Context) -> init::LateResources {\n\n // Configure to use external clocks, and start them\n\n let _clocks = clocks::Clocks::new(cx.device.CLOCK)\n\n .enable_ext_hfosc()\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 29, "score": 13.544490400195194 }, { "content": "\n\n #[task(binds = RADIO, resources = [channel, radio, uart],)]\n\n fn radio(cx: radio::Context) {\n\n let uarte = cx.resources.uart;\n\n let radio = cx.resources.radio;\n\n let mut host_packet = [0u8; (MAX_PACKET_LENGHT as usize) * 2];\n\n\n\n let energy_level = radio.report_energy_detect();\n\n if let Some(energy_level) = energy_level {\n\n let mut packet = [0u8; 2];\n\n packet[0] = radio.get_channel();\n\n packet[1] = energy_level;\n\n match esercom::com_encode(\n\n esercom::MessageType::EnergyDetect,\n\n &packet,\n\n &mut host_packet,\n\n ) {\n\n Ok(written) => {\n\n uarte.write(&host_packet[..written]).unwrap();\n\n }\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-energy-detect.rs", "rank": 30, "score": 13.543362412328607 }, { "content": " let mut host_packet = [0u8; MAX_PACKET_LENGHT * 2];\n\n let queue = cx.resources.rx_consumer;\n\n let uarte = cx.resources.uart;\n\n\n\n defmt::info!(\"~ listening ~\");\n\n\n\n loop {\n\n if let Ok(grant) = queue.read() {\n\n let packet_length = grant[0] as usize;\n\n match esercom::com_encode(\n\n esercom::MessageType::RadioReceive,\n\n &grant[1..packet_length],\n\n &mut host_packet,\n\n ) {\n\n Ok(written) => {\n\n uarte.write(&host_packet[..written]).unwrap();\n\n }\n\n Err(_) => {\n\n defmt::info!(\"Failed to encode packet\");\n\n }\n\n }\n\n grant.release(packet_length);\n\n }\n\n }\n\n }\n\n};\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-listener.rs", "rank": 31, "score": 13.368207428775392 }, { "content": "\n\n #[task(binds = TIMER1, resources = [service, timer], spawn = [radio_tx])]\n\n fn timer(cx: timer::Context) {\n\n let timer = cx.resources.timer;\n\n let service = cx.resources.service;\n\n\n\n if timer.is_compare_event(1) {\n\n timer.ack_compare_event(1);\n\n let _ = service.update(timer.now());\n\n timer.fire_in(1, TIMER_SECOND);\n\n }\n\n let _ = cx.spawn.radio_tx();\n\n }\n\n\n\n #[task(binds = RADIO, resources = [radio, service, rx_producer], spawn = [radio_tx])]\n\n fn radio(cx: radio::Context) {\n\n let mut packet = [0u8; MAX_PACKET_LENGHT as usize];\n\n let radio = cx.resources.radio;\n\n let service = cx.resources.service;\n\n let queue = cx.resources.rx_producer;\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 32, "score": 11.447152439420742 }, { "content": " let queue = cx.resources.rx_consumer;\n\n let service = cx.resources.service;\n\n\n\n if let Ok(grant) = queue.read() {\n\n let timestamp = cx.resources.timer.now();\n\n let packet_length = grant[0] as usize;\n\n if let Err(_) = service.receive(timestamp, &grant[1..packet_length - 1]) {\n\n defmt::warn!(\"service receive failed\");\n\n }\n\n grant.release(packet_length);\n\n let _ = cx.spawn.radio_tx();\n\n }\n\n }\n\n\n\n #[task(resources = [radio, tx_consumer], spawn = [radio_rx])]\n\n fn radio_tx(cx: radio_tx::Context) {\n\n let queue = cx.resources.tx_consumer;\n\n let radio = cx.resources.radio;\n\n\n\n if !radio.is_tx_busy() {\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 33, "score": 11.417247574143875 }, { "content": " let timer = cx.resources.timer;\n\n\n\n if let Ok(grant) = queue.read() {\n\n let timestamp = timer.now();\n\n let packet_length = grant[0] as usize;\n\n if let Err(_) = service.receive(timestamp, &grant[1..packet_length - 1]) {\n\n defmt::warn!(\"service receive failed\");\n\n }\n\n grant.release(packet_length);\n\n let _ = cx.spawn.radio_tx();\n\n }\n\n }\n\n\n\n #[task(resources = [radio, tx_consumer], spawn = [radio_rx])]\n\n fn radio_tx(cx: radio_tx::Context) {\n\n let queue = cx.resources.tx_consumer;\n\n let radio = cx.resources.radio;\n\n\n\n if !radio.is_tx_busy() {\n\n if let Ok(grant) = queue.read() {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 34, "score": 11.19796420060341 }, { "content": "use psila_nrf52::{\n\n radio::{Radio, MAX_PACKET_LENGHT},\n\n timer::Timer,\n\n};\n\nuse psila_service::{self, ClusterLibraryHandler, PsilaService};\n\n\n\nconst TX_BUFFER_SIZE: usize = 1024;\n\nconst RX_BUFFER_SIZE: usize = 1024;\n\n\n\nstatic RX_BUFFER: BBBuffer<RX_BUFFER_SIZE> = BBBuffer::new();\n\nstatic TX_BUFFER: BBBuffer<TX_BUFFER_SIZE> = BBBuffer::new();\n\n\n\nuse nrf_smartled::pwm::Pwm;\n\nuse smart_leds::{gamma, RGB8};\n\nuse smart_leds_trait::SmartLedsWrite;\n\n\n\nuse palette::{Pixel, Srgb, Yxy};\n\n\n\nuse byteorder::{ByteOrder, LittleEndian};\n\n\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 35, "score": 10.235944364176104 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse adafruit_feather_nrf52840_express as _;\n\n\n\nuse rtic::app;\n\n\n\nuse nrf52840_hal::{clocks, gpio};\n\n\n\nuse nrf52840_pac as pac;\n\n\n\nuse bbqueue::{self, BBBuffer};\n\n\n\nuse nrf52_cryptocell::CryptoCellBackend;\n\nuse psila_data::{\n\n cluster_library::{AttributeDataType, ClusterLibraryStatus},\n\n device_profile::SimpleDescriptor,\n\n security::DEFAULT_LINK_KEY,\n\n ExtendedAddress, Key,\n\n};\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 36, "score": 10.189601741633508 }, { "content": " ExtendedAddress, Key,\n\n};\n\nuse psila_nrf52::{\n\n radio::{Radio, MAX_PACKET_LENGHT},\n\n timer::Timer,\n\n};\n\nuse psila_service::{self, ClusterLibraryHandler, PsilaService};\n\n\n\nconst TX_BUFFER_SIZE: usize = 1024;\n\nconst RX_BUFFER_SIZE: usize = 1024;\n\n\n\nstatic RX_BUFFER: BBBuffer<RX_BUFFER_SIZE> = BBBuffer::new();\n\nstatic TX_BUFFER: BBBuffer<TX_BUFFER_SIZE> = BBBuffer::new();\n\n\n\nconst TIMER_SECOND: u32 = 1_000_000;\n\n\n\nconst MANUFACTURER_NAME: &'static str = \"ERIK of Sweden\";\n\nconst MODEL_IDENTIFIER: &'static str = \"Lampan\";\n\n\n\npub struct ClusterHandler {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 37, "score": 10.17093949235614 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\nuse nrf52840_dk as _;\n\n\n\nuse rtic::app;\n\n\n\nuse nrf52840_hal::{clocks, gpio};\n\n\n\nuse nrf52840_pac as pac;\n\n\n\nuse bbqueue::{self, BBBuffer};\n\n\n\nuse embedded_hal::digital::v2::OutputPin;\n\n\n\nuse nrf52_cryptocell::CryptoCellBackend;\n\nuse psila_data::{\n\n cluster_library::{AttributeDataType, ClusterLibraryStatus},\n\n device_profile::SimpleDescriptor,\n\n security::DEFAULT_LINK_KEY,\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 38, "score": 10.150450271201088 }, { "content": " timer0.init();\n\n\n\n // Configure to use external clocks, and start them\n\n let _clocks = clocks::Clocks::new(cx.device.CLOCK)\n\n .enable_ext_hfosc()\n\n .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass)\n\n .start_lfclk();\n\n\n\n let port0 = gpio::p0::Parts::new(cx.device.P0);\n\n let led_1 = port0\n\n .p0_13\n\n .into_push_pull_output(gpio::Level::Low)\n\n .degrade();\n\n\n\n let handler = ClusterHandler::new(led_1);\n\n\n\n // MAC (EUI-48) address to EUI-64\n\n // Add FF FE in the middle\n\n //\n\n // 01 23 45 67 89 AB\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 39, "score": 10.130953186591778 }, { "content": " }\n\n }\n\n\n\n #[task(binds = TIMER1, resources = [service, timer], spawn = [radio_tx])]\n\n fn timer(cx: timer::Context) {\n\n let timer = cx.resources.timer;\n\n let service = cx.resources.service;\n\n\n\n if timer.is_compare_event(1) {\n\n timer.ack_compare_event(1);\n\n let _ = service.update(timer.now());\n\n timer.fire_in(1, TIMER_SECOND);\n\n }\n\n let _ = cx.spawn.radio_tx();\n\n }\n\n\n\n #[task(binds = RADIO, resources = [radio, service, rx_producer], spawn = [radio_tx])]\n\n fn radio(cx: radio::Context) {\n\n let mut packet = [0u8; MAX_PACKET_LENGHT as usize];\n\n let radio = cx.resources.radio;\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 40, "score": 9.996421920747414 }, { "content": " grant.commit(0);\n\n } else {\n\n match radio.receive_slice(grant.buf()) {\n\n Ok(packet_len) => {\n\n grant.commit(packet_len);\n\n }\n\n Err(_) => {}\n\n }\n\n }\n\n }\n\n Err(_) => {\n\n // Drop package\n\n let mut buffer = [0u8; MAX_PACKET_LENGHT];\n\n let _ = radio.receive(&mut buffer);\n\n }\n\n }\n\n }\n\n\n\n #[idle(resources = [rx_consumer, uart])]\n\n fn idle(cx: idle::Context) -> ! {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-listener.rs", "rank": 42, "score": 9.591392810293144 }, { "content": " if grant.buf().len() < MAX_PACKET_LENGHT {\n\n grant.commit(0);\n\n } else {\n\n if let Ok(packet_len) = radio.receive_slice(grant.buf()) {\n\n grant.commit(packet_len);\n\n } else {\n\n grant.commit(0);\n\n }\n\n }\n\n }\n\n Err(_) => {\n\n // Drop package\n\n let mut buffer = [0u8; MAX_PACKET_LENGHT];\n\n let _ = radio.receive(&mut buffer);\n\n }\n\n }\n\n }\n\n\n\n #[idle(resources = [rx_consumer, uart])]\n\n fn idle(cx: idle::Context) -> ! {\n", "file_path": "nrf52840-mdk/examples/nrf52840-mdk-listener.rs", "rank": 43, "score": 9.111272498172937 }, { "content": " defmt::warn!(\"service handle acknowledge failed, queue full\");\n\n }\n\n _ => {\n\n defmt::warn!(\"service handle acknowledge failed\");\n\n }\n\n },\n\n }\n\n }\n\n }\n\n Err(psila_nrf52::radio::Error::CcaBusy) => {\n\n defmt::warn!(\"CCA Busy\");\n\n }\n\n }\n\n let _ = cx.spawn.radio_tx();\n\n }\n\n\n\n #[task(resources = [rx_consumer, service, timer], spawn = [radio_tx])]\n\n fn radio_rx(cx: radio_rx::Context) {\n\n let queue = cx.resources.rx_consumer;\n\n let service = cx.resources.service;\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 44, "score": 8.964392510192644 }, { "content": " let service = cx.resources.service;\n\n let queue = cx.resources.rx_producer;\n\n\n\n match radio.receive(&mut packet) {\n\n Ok(packet_len) => {\n\n if packet_len > 0 {\n\n match service.handle_acknowledge(&packet[1..packet_len - 1]) {\n\n Ok(to_me) => {\n\n if to_me {\n\n if let Ok(mut grant) = queue.grant_exact(packet_len) {\n\n grant.copy_from_slice(&packet[..packet_len]);\n\n grant.commit(packet_len);\n\n }\n\n }\n\n }\n\n Err(e) => match e {\n\n psila_service::Error::MalformedPacket => {\n\n defmt::warn!(\n\n \"service handle acknowledge failed, malformed package\"\n\n );\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 45, "score": 8.905486634166412 }, { "content": " *cx.resources.on_off = !*cx.resources.on_off;\n\n }\n\n\n\n #[task(binds = RTC0, resources = [rtc_0, button_4, led_4, uart])]\n\n fn rtc(cx: rtc::Context) {\n\n let _ = cx\n\n .resources\n\n .rtc_0\n\n .is_event_triggered(hal::rtc::RtcInterrupt::Tick);\n\n let button_4 = cx.resources.button_4;\n\n let led_4 = cx.resources.led_4;\n\n let uart = cx.resources.uart;\n\n\n\n match button_4.is_low() {\n\n Ok(true) => {\n\n let _ = write!(uart, \"Button 4\\r\\n\");\n\n let _ = led_4.set_low();\n\n }\n\n Ok(false) => {\n\n let _ = led_4.set_high();\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 46, "score": 8.781182091323435 }, { "content": " .set_lfclk_src_external(clocks::LfOscConfiguration::NoExternalNoBypass)\n\n .start_lfclk();\n\n\n\n cx.device.TIMER0.set_periodic();\n\n cx.device.TIMER0.enable_interrupt();\n\n cx.device.TIMER0.timer_start(1_000_000u32);\n\n\n\n let mut rtc_0 = match hal::rtc::Rtc::new(cx.device.RTC0, 4095) {\n\n Ok(r) => r,\n\n Err(_) => unreachable!(),\n\n };\n\n rtc_0.enable_event(hal::rtc::RtcInterrupt::Tick);\n\n rtc_0.enable_interrupt(hal::rtc::RtcInterrupt::Tick, None);\n\n rtc_0.enable_counter();\n\n\n\n let port0 = gpio::p0::Parts::new(cx.device.P0);\n\n let button_1 = port0.p0_11.into_pullup_input().degrade();\n\n let button_2 = port0.p0_12.into_pullup_input().degrade();\n\n let button_3 = port0.p0_24.into_pullup_input().degrade();\n\n let button_4 = port0.p0_25.into_pullup_input().degrade();\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 47, "score": 8.618945810009583 }, { "content": " if let Ok(grant) = queue.read() {\n\n let packet_length = grant[0] as usize;\n\n let data = &grant[1..=packet_length];\n\n let _ = radio.queue_transmission(data);\n\n grant.release(packet_length + 1);\n\n }\n\n let _ = cx.spawn.radio_rx();\n\n }\n\n }\n\n\n\n extern \"C\" {\n\n fn QDEC();\n\n }\n\n};\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 48, "score": 8.560689927898338 }, { "content": "const TIMER_SECOND: u32 = 1_000_000;\n\n\n\nconst MANUFACTURER_NAME: &'static str = \"ERIK of Sweden\";\n\nconst MODEL_IDENTIFIER: &'static str = \"Lampan\";\n\n\n\npub struct ClusterHandler {\n\n on_off: bool,\n\n neopixel: Pwm<pac::PWM0>,\n\n colour: Yxy,\n\n}\n\n\n\nimpl ClusterHandler {\n\n pub fn new(pin: gpio::Pin<gpio::Output<gpio::PushPull>>, pwm: pac::PWM0) -> Self {\n\n let colour = Yxy::new(0.0, 0.0, 0.0);\n\n let neopixel = Pwm::new(pwm, pin);\n\n Self {\n\n on_off: false,\n\n neopixel,\n\n colour,\n\n }\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 49, "score": 8.379840500173305 }, { "content": " let packet_length = grant[0] as usize;\n\n let data = &grant[1..=packet_length];\n\n let _ = radio.queue_transmission(data);\n\n grant.release(packet_length + 1);\n\n }\n\n let _ = cx.spawn.radio_rx();\n\n }\n\n }\n\n\n\n extern \"C\" {\n\n fn QDEC();\n\n }\n\n};\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 50, "score": 8.133001539230646 }, { "content": "#![no_std]\n\n\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nuse defmt_rtt as _; // global logger\n\nuse panic_probe as _;\n\n// TODO(5) adjust HAL import\n\n// use some_hal as _; // memory layout\n\n\n\ndefmt::timestamp! {\n\n \"{=u64}\", {\n\n static COUNT: AtomicUsize = AtomicUsize::new(0);\n\n // NOTE(no-CAS) `timestamps` runs with interrupts disabled\n\n let n = COUNT.load(Ordering::Relaxed);\n\n COUNT.store(n + 1, Ordering::Relaxed);\n\n n as u64\n\n }\n\n}\n\n\n\n/// Terminates the application and makes `probe-run` exit with exit-code = 0\n", "file_path": "adafruit-feather-nrf52840-express/src/lib.rs", "rank": 51, "score": 8.040901910522852 }, { "content": "#![no_std]\n\n\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nuse defmt_rtt as _; // global logger\n\nuse panic_probe as _;\n\n// TODO(5) adjust HAL import\n\n// use some_hal as _; // memory layout\n\n\n\ndefmt::timestamp! {\n\n \"{=u64}\", {\n\n static COUNT: AtomicUsize = AtomicUsize::new(0);\n\n // NOTE(no-CAS) `timestamps` runs with interrupts disabled\n\n let n = COUNT.load(Ordering::Relaxed);\n\n COUNT.store(n + 1, Ordering::Relaxed);\n\n n as u64\n\n }\n\n}\n\n\n\n/// Terminates the application and makes `probe-run` exit with exit-code = 0\n", "file_path": "nrf52840-dk/src/lib.rs", "rank": 52, "score": 8.040901910522852 }, { "content": " }\n\n psila_service::Error::NotEnoughSpace => {\n\n defmt::warn!(\"service handle acknowledge failed, queue full\");\n\n }\n\n _ => {\n\n defmt::warn!(\"service handle acknowledge failed\");\n\n }\n\n },\n\n }\n\n }\n\n }\n\n Err(psila_nrf52::radio::Error::CcaBusy) => {\n\n defmt::warn!(\"CCA Busy\");\n\n }\n\n }\n\n let _ = cx.spawn.radio_tx();\n\n }\n\n\n\n #[task(resources = [rx_consumer, service, timer], spawn = [radio_tx])]\n\n fn radio_rx(cx: radio_rx::Context) {\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 53, "score": 7.157208665173293 }, { "content": " //\n\n // 01 23 45 67 89 AB\n\n // / / / \\ \\ \\\n\n // 01 23 45 FF FE 67 89 AB\n\n let devaddr_lo = cx.device.FICR.deviceaddr[0].read().bits();\n\n let devaddr_hi = cx.device.FICR.deviceaddr[1].read().bits() as u16;\n\n let extended_address = u64::from(devaddr_hi) << 48\n\n | u64::from(devaddr_lo & 0xff00_0000) << 40\n\n | u64::from(devaddr_lo & 0x00ff_ffff)\n\n | 0x0000_00ff_fe00_0000u64;\n\n let extended_address = ExtendedAddress::new(extended_address);\n\n\n\n let mut timer1 = cx.device.TIMER1;\n\n timer1.init();\n\n timer1.fire_in(1, TIMER_SECOND);\n\n\n\n let mut radio = Radio::new(cx.device.RADIO);\n\n radio.set_channel(15);\n\n radio.set_transmission_power(8);\n\n radio.receive_prepare();\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 54, "score": 6.884079407347516 }, { "content": " // / / / \\ \\ \\\n\n // 01 23 45 FF FE 67 89 AB\n\n let devaddr_lo = cx.device.FICR.deviceaddr[0].read().bits();\n\n let devaddr_hi = cx.device.FICR.deviceaddr[1].read().bits() as u16;\n\n let extended_address = u64::from(devaddr_hi) << 48\n\n | u64::from(devaddr_lo & 0xff00_0000) << 40\n\n | u64::from(devaddr_lo & 0x00ff_ffff)\n\n | 0x0000_00ff_fe00_0000u64;\n\n let extended_address = ExtendedAddress::new(extended_address);\n\n\n\n let mut timer1 = cx.device.TIMER1;\n\n timer1.init();\n\n timer1.fire_in(1, TIMER_SECOND);\n\n\n\n let mut radio = Radio::new(cx.device.RADIO);\n\n radio.set_channel(15);\n\n radio.set_transmission_power(8);\n\n radio.receive_prepare();\n\n\n\n let (rx_producer, rx_consumer) = RX_BUFFER.try_split().unwrap();\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 55, "score": 6.660706058980898 }, { "content": " on_off: bool,\n\n led: gpio::Pin<gpio::Output<gpio::PushPull>>,\n\n}\n\n\n\nimpl ClusterHandler {\n\n pub fn new(mut led: gpio::Pin<gpio::Output<gpio::PushPull>>) -> Self {\n\n let _ = led.set_high();\n\n Self { on_off: false, led }\n\n }\n\n\n\n pub fn set_on_off(&mut self, enable: bool) {\n\n self.on_off = enable;\n\n if self.on_off {\n\n let _ = self.led.set_low();\n\n } else {\n\n let _ = self.led.set_high();\n\n }\n\n }\n\n}\n\n\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 56, "score": 6.33731428799031 }, { "content": "\n\n match radio.receive(&mut packet) {\n\n Ok(packet_len) => {\n\n if packet_len > 0 {\n\n match service.handle_acknowledge(&packet[1..packet_len - 1]) {\n\n Ok(to_me) => {\n\n if to_me {\n\n if let Ok(mut grant) = queue.grant_exact(packet_len) {\n\n grant.copy_from_slice(&packet[..packet_len]);\n\n grant.commit(packet_len);\n\n }\n\n }\n\n }\n\n Err(e) => match e {\n\n psila_service::Error::MalformedPacket => {\n\n defmt::warn!(\n\n \"service handle acknowledge failed, malformed package\"\n\n );\n\n }\n\n psila_service::Error::NotEnoughSpace => {\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 57, "score": 5.850106411713368 }, { "content": " // AES context\n\n context: *mut CryptoCellAesContext,\n\n // Size of data to process\n\n dataInSize: usize,\n\n // Data to process\n\n dataIn: *const u8,\n\n // Size of data buffer provided\n\n dataInBufferSize: usize,\n\n // Pointer to output data\n\n dataOut: *mut u8,\n\n // Size of output buffer provided\n\n DataOutBufferSize: *mut usize,\n\n ) -> u32;\n\n /// Clean up a AES context\n\n fn SaSi_AesFree(\n\n // AES context\n\n context: *mut CryptoCellAesContext,\n\n ) -> u32;\n\n}\n\n\n\n/// CryptoCell Key Data\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 58, "score": 5.797527106265164 }, { "content": " button_2,\n\n button_3,\n\n button_4,\n\n led_1,\n\n led_2,\n\n led_3,\n\n led_4,\n\n rtc_0,\n\n uart,\n\n }\n\n }\n\n\n\n #[task(binds = TIMER0, resources = [timer_0, led_3, on_off])]\n\n fn timer(cx: timer::Context) {\n\n cx.resources.timer_0.timer_reset_event();\n\n if *cx.resources.on_off {\n\n let _ = cx.resources.led_3.set_low();\n\n } else {\n\n let _ = cx.resources.led_3.set_high();\n\n }\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 59, "score": 5.738155380018945 }, { "content": " Err(_) => {\n\n defmt::info!(\"Failed to encode packet\");\n\n }\n\n }\n\n let channel = cx.resources.channel.wrapping_add(1);\n\n let channel = if channel > 26 { 11 } else { channel };\n\n radio.set_channel(channel);\n\n *cx.resources.channel = channel;\n\n radio.start_energy_detect(65536);\n\n } else {\n\n match radio.state() {\n\n STATE_A::DISABLED => {\n\n defmt::info!(\"DISABLED\");\n\n }\n\n STATE_A::RXRU => {\n\n defmt::info!(\"RXRU\");\n\n }\n\n STATE_A::RXIDLE => {\n\n defmt::info!(\"RX IDLE\");\n\n }\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-energy-detect.rs", "rank": 60, "score": 5.662698742019388 }, { "content": " }\n\n Err(_) => {}\n\n }\n\n }\n\n\n\n #[idle(resources = [button_2, led_2])]\n\n fn idle(cx: idle::Context) -> ! {\n\n let button_2 = cx.resources.button_2;\n\n let led_2 = cx.resources.led_2;\n\n\n\n loop {\n\n match button_2.is_low() {\n\n Ok(true) => {\n\n let _ = led_2.set_low();\n\n }\n\n Ok(false) => {\n\n let _ = led_2.set_high();\n\n }\n\n Err(_) => {}\n\n }\n\n }\n\n }\n\n};\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-blinky.rs", "rank": 61, "score": 5.623726381079125 }, { "content": " let (tx_producer, tx_consumer) = TX_BUFFER.try_split().unwrap();\n\n\n\n let cryptocell = CryptoCellBackend::new(cx.device.CRYPTOCELL);\n\n let default_link_key = Key::from(DEFAULT_LINK_KEY);\n\n\n\n init::LateResources {\n\n timer: timer1,\n\n radio,\n\n service: PsilaService::new(\n\n cryptocell,\n\n tx_producer,\n\n extended_address,\n\n default_link_key,\n\n handler,\n\n ),\n\n rx_producer,\n\n rx_consumer,\n\n tx_consumer,\n\n }\n\n }\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 62, "score": 5.586683164679361 }, { "content": " }\n\n }\n\n\n\n if valid {\n\n Ok(encrypted.len())\n\n } else {\n\n clear(decrypted);\n\n Ok(0)\n\n }\n\n }\n\n\n\n fn ccmstar_encrypt(\n\n &mut self,\n\n key: &[u8],\n\n nonce: &[u8],\n\n message: &[u8],\n\n mic: &mut [u8],\n\n aad: &[u8],\n\n output: &mut [u8],\n\n ) -> Result<usize, psila_crypto::Error> {\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 64, "score": 5.46137565737215 }, { "content": " output.as_mut_ptr(),\n\n (&mut output_length) as *mut usize,\n\n )\n\n };\n\n if result != 0 {\n\n return Err(Error::Other(result));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl core::ops::Drop for AesContext {\n\n fn drop(&mut self) {\n\n unsafe { SaSi_AesFree(self.context()) };\n\n }\n\n}\n\n\n\npub struct CryptoCellBackend {\n\n cryptocell: CRYPTOCELL,\n\n cipher: AesContext,\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 65, "score": 5.384616858833958 }, { "content": "# IEEE 802.15.4 with nRF52840\n\n\n\nWork in progress 802.15.4 radio for nRF52840-DK.\n\n\n\n## Running\n\n\n\nThese examples use `probe-run` to flash an run them. For example,\n\n\n\n```\n\ncargo run --example nrf52840-dk-psila\n\n```\n\n\n\n## Examples\n\n\n\n### Blinky\n\n\n\nSimple led and button example\n\n\n\n### Energy Detect\n\n\n\nExploring energy detect feature of the nRF52 radio.\n\n\n\n### Listener\n\n\n\nListen for 802.15.4 messages and sending them to the host using serial.\n\n\n\n### Psila\n\n\n\nA Zigbee on/off light\n", "file_path": "nrf52840-dk/README.md", "rank": 66, "score": 5.333220084576846 }, { "content": " EncryptDecrypt::Encrypt,\n\n AesOperationMode::Ecb,\n\n PaddingType::None,\n\n );\n\n\n\n Self { cryptocell, cipher }\n\n }\n\n\n\n /// Create\n\n fn make_flag(a_length: usize, big_m: usize, big_l: usize) -> u8 {\n\n let mut flag = if a_length > 0 { 0x40 } else { 0 };\n\n flag = if big_m > 0 {\n\n flag | ((((big_m - 2) / 2) as u8) & 0x07) << 3\n\n } else {\n\n flag\n\n };\n\n flag |= 0x07 & ((big_l - 1) as u8);\n\n flag\n\n }\n\n}\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 67, "score": 5.188904615024052 }, { "content": "\n\n let (rx_producer, rx_consumer) = RX_BUFFER.try_split().unwrap();\n\n let (tx_producer, tx_consumer) = TX_BUFFER.try_split().unwrap();\n\n\n\n let cryptocell = CryptoCellBackend::new(cx.device.CRYPTOCELL);\n\n let default_link_key = Key::from(DEFAULT_LINK_KEY);\n\n\n\n init::LateResources {\n\n timer: timer1,\n\n radio,\n\n service: PsilaService::new(\n\n cryptocell,\n\n tx_producer,\n\n extended_address,\n\n default_link_key,\n\n handler,\n\n ),\n\n rx_producer,\n\n rx_consumer,\n\n tx_consumer,\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 68, "score": 5.188818488263028 }, { "content": "# Experiments with nRF52840-MDK\n\n\n\nWork in progress 802.15.4 radio for Makerdiary nRF52840-MDK.\n\n\n\n## Running\n\n\n\nThese examples use cargo embed to run them. For example,\n\n\n\n```\n\ncargo embed --target thumbv7em-none-eabihf --example nrf52840-mdk-listener\n\n```\n\n\n\n## Examples\n\n\n\n### Listener\n\n\n\nListen for 802.15.4 messages and sending them to the host using serial.\n", "file_path": "nrf52840-mdk/README.md", "rank": 69, "score": 5.035075605525288 }, { "content": "}\n\n\n\nimpl core::ops::Drop for CryptoCellBackend {\n\n fn drop(&mut self) {\n\n unsafe { SaSi_LibFini() }\n\n self.cryptocell.enable.write(|w| w.enable().clear_bit());\n\n }\n\n}\n\n\n\nconst AAD_B0_LEN: usize = BLOCK_SIZE - LENGTH_FIELD_LENGTH;\n\n\n\nimpl CryptoCellBackend {\n\n pub fn new(cryptocell: CRYPTOCELL) -> Self {\n\n cryptocell.enable.write(|w| w.enable().set_bit());\n\n\n\n if unsafe { SaSi_LibInit() } != 0 {\n\n panic!(\"Failed to initialize SaSi library\");\n\n }\n\n\n\n let cipher = AesContext::new(\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 70, "score": 5.01172997240783 }, { "content": " // Size of the key struct\n\n keyDataSize: usize,\n\n ) -> u32;\n\n /// Set IV (or counter) for the AES context\n\n fn SaSi_AesSetIv(context: *mut CryptoCellAesContext, iv: *const u8) -> u32;\n\n /// Get IV (or counter) for the AES context\n\n fn SaSi_AesGetIv(context: *mut CryptoCellAesContext, iv: *mut u8) -> u32;\n\n /// Process a block of data\n\n fn SaSi_AesBlock(\n\n // AES context\n\n context: *mut CryptoCellAesContext,\n\n // Data to process\n\n dataIn: *const u8,\n\n // Size of data to process\n\n dataInSize: usize,\n\n // Pointer to output data\n\n dataOut: *mut u8,\n\n ) -> u32;\n\n /// Finalize a cipher operation\n\n fn SaSi_AesFinish(\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 71, "score": 5.005934765175564 }, { "content": "# IEEE 802.15.4 with nRF52840\n\n\n\nWork in progress 802.15.4 radio for Adafruit feather nRF52840 express.\n\n\n\n## Running\n\n\n\nThese examples use `probe-run` to flash an run them. For example,\n\n\n\n```\n\ncargo run --example feather-express-psila\n\n```\n\n\n\n## Examples\n\n\n\n### Listener\n\n\n\nListen for 802.15.4 messages and sending them to the host using serial.\n\n\n\n### Psila\n\n\n\nA Zigbee colour light\n", "file_path": "adafruit-feather-nrf52840-express/README.md", "rank": 72, "score": 4.990301476801718 }, { "content": " }\n\n\n\n Self { context }\n\n }\n\n\n\n fn context(&mut self) -> *mut CryptoCellAesContext {\n\n &mut self.context as *mut CryptoCellAesContext\n\n }\n\n}\n\n\n\nimpl BlockCipher for AesContext {\n\n /// Set the key to be used in the cipher operation\n\n fn set_key(&mut self, key: &[u8]) -> Result<(), Error> {\n\n assert!(key.len() == KEY_SIZE);\n\n let user_key = KeyData {\n\n key: key.as_ptr(),\n\n size: key.len(),\n\n };\n\n let result = unsafe {\n\n SaSi_AesSetKey(\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 73, "score": 4.929784233694747 }, { "content": "//! # nRF52840 CryptoCell for Psila\n\n//!\n\n//! Functions for using the CryptoCell in the nRF52840 for the security in\n\n//! Psila. This uses the nrf_cc310 library provided by Nordic in their SDK.\n\n//!\n\n//! Note that there is a CCM* implementation in the CC310 library provided by\n\n//! Nordic. But that implementation requires the last byte of the nonce to\n\n//! only indicate the MIC length and encryption. For the usage in Psila this\n\n//! byte contains more bits of information.\n\n\n\n#![no_std]\n\n\n\nuse nrf52840_pac::CRYPTOCELL;\n\npub use psila_crypto::{\n\n BlockCipher, CryptoBackend, Error, BLOCK_SIZE, KEY_SIZE, LENGTH_FIELD_LENGTH,\n\n};\n\n\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 74, "score": 4.905926620697567 }, { "content": " }\n\n None => {\n\n let mut block = [0u8; BLOCK_SIZE];\n\n let input = iter.remainder();\n\n block[..input.len()].copy_from_slice(input);\n\n // Must feed a full block into finish here, otherwise the\n\n // result will be wrong\n\n cipher.finish(&block, &mut output)?;\n\n break;\n\n }\n\n }\n\n block_index += 1;\n\n }\n\n }\n\n\n\n let mut valid = true;\n\n for (a, b) in tag[..mic.len()].iter().zip(output[..mic.len()].iter()) {\n\n if a != b {\n\n valid = false;\n\n break;\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 75, "score": 4.816964602123809 }, { "content": " cipher.process_block(&block, &mut tag)?;\n\n\n\n if enc_full_block_length > 0 {\n\n cipher.process_block(&encrypted[..enc_full_block_length], decrypted)?;\n\n }\n\n\n\n cipher.finish(\n\n &encrypted[enc_full_block_length..],\n\n &mut decrypted[enc_full_block_length..],\n\n )?;\n\n }\n\n let mut output = [0u8; BLOCK_SIZE];\n\n {\n\n // Validate MIC using AES128-CBC-MAC\n\n let mut cipher = AesContext::new(\n\n EncryptDecrypt::Encrypt,\n\n AesOperationMode::CbcMac,\n\n PaddingType::None,\n\n );\n\n cipher.set_key(key)?;\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 76, "score": 4.537277168303753 }, { "content": "\n\nimpl CryptoBackend for CryptoCellBackend {\n\n fn ccmstar_decrypt(\n\n &mut self,\n\n key: &[u8],\n\n nonce: &[u8],\n\n encrypted: &[u8],\n\n mic: &[u8],\n\n aad: &[u8],\n\n decrypted: &mut [u8],\n\n ) -> Result<usize, Error> {\n\n assert!(key.len() == KEY_SIZE);\n\n assert!(nonce.len() == 13);\n\n assert!(decrypted.len() >= encrypted.len());\n\n\n\n let enc_full_block_length = (encrypted.len() / BLOCK_SIZE) * BLOCK_SIZE;\n\n\n\n let mut tag = [0; BLOCK_SIZE];\n\n {\n\n // Decrypt data\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 77, "score": 4.537250223471913 }, { "content": " fn run(\n\n &mut self,\n\n profile: u16,\n\n cluster: u16,\n\n _endpoint: u8,\n\n command: u8,\n\n _arguments: &[u8],\n\n ) -> Result<(), ClusterLibraryStatus> {\n\n match (profile, cluster, command) {\n\n (0x0104, 0x0006, 0x00) => {\n\n self.set_on_off(false);\n\n Ok(())\n\n }\n\n (0x0104, 0x0006, 0x01) => {\n\n self.set_on_off(true);\n\n Ok(())\n\n }\n\n (0x0104, 0x0006, 0x02) => {\n\n self.set_on_off(!self.on_off);\n\n Ok(())\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 78, "score": 4.1135066874144215 }, { "content": "# Work in progress IEEE 802.15.4 for nRF52840\n\n\n\nThis is some experiments with using the nRF52840 radio in 802.14.5 mode. The\n\nexamples in this repository assumes that one of the nRF52840-DK or\n\nnRF52840-MDK, ... boards is used.\n\n\n\nThe host program has been tested with Fedora and Ubuntu Linux.\n\n\n\nThe code is split into following parts.\n\n\n\n## Parts\n\n\n\n### nRF52840 crypto cell\n\n\n\n`nrf52-cryptocell` is a crate for using the nRF52480 crypto cell for AES\n\ncrypto.\n\n\n\n### Target examples\n\n\n\n#### Adafruit Feather nRF52840 Express\n\n\n\n#### Nordic nRF52840-DK\n\n\n\n#### Makerdiary nRF52840-MDK\n\n\n\n### Host\n\n\n\nThe host tool, psila-host, is found in the psila repository.\n\n\n\n## Usage\n\n\n\n 1. Start the host application listening to the nrf52840 USB-to-serial device\n\n 2. Start the target application on the nRF52840\n\n\n\n## License\n\n\n\nLicensed under the MIT license. See LICENSE.\n", "file_path": "README.md", "rank": 79, "score": 4.009387431110561 }, { "content": "use std::env;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "nrf52-cryptocell/build.rs", "rank": 80, "score": 3.6486787883281746 }, { "content": " // 4 - XCBC-MAC\n\n // 5 - CMAC\n\n // 6 - XTS\n\n // 7 - CBC-CTS\n\n // 8 - OFB\n\n operationMode: u32,\n\n // Padding type, None (0) or PKCS7 (1)\n\n paddingType: u32,\n\n ) -> u32;\n\n /// Set a key for the AES context\n\n fn SaSi_AesSetKey(\n\n // AES context\n\n context: *mut CryptoCellAesContext,\n\n // Key type used\n\n // 0 - User key\n\n // 1 - Platform key\n\n // 2 - Customer key\n\n keyType: u32,\n\n // Pointer to key struct\n\n keyData: *const cty::c_void,\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 81, "score": 3.509111133862452 }, { "content": " cluster: u16,\n\n _endpoint: u8,\n\n attribute: u16,\n\n value: &mut [u8],\n\n ) -> Result<(AttributeDataType, usize), ClusterLibraryStatus> {\n\n defmt::info!(\n\n \"Read attribute: {=u16} {=u16} {=u16}\",\n\n profile,\n\n cluster,\n\n attribute\n\n );\n\n match (profile, cluster, attribute) {\n\n (0x0104, 0x0000, 0x0000) => {\n\n value[0] = 0x02;\n\n Ok((AttributeDataType::Unsigned8, 1))\n\n }\n\n (0x0104, 0x0000, 0x0004) => {\n\n value[0] = MANUFACTURER_NAME.len() as u8;\n\n let end = MANUFACTURER_NAME.len() + 1;\n\n value[1..end].copy_from_slice(MANUFACTURER_NAME.as_bytes());\n", "file_path": "nrf52840-dk/examples/nrf52840-dk-psila.rs", "rank": 82, "score": 3.3063060451413464 }, { "content": " cipher.finish(&input, &mut new_mic)?;\n\n break;\n\n }\n\n }\n\n None => {\n\n let mut block = [0u8; BLOCK_SIZE];\n\n block[..iter.remainder().len()].copy_from_slice(iter.remainder());\n\n cipher.finish(&block, &mut new_mic)?;\n\n break;\n\n }\n\n }\n\n block_index += 1;\n\n }\n\n }\n\n clear(&mut buffer[..]);\n\n {\n\n let message_blocks = (message.len() + (BLOCK_SIZE - 1)) / BLOCK_SIZE;\n\n let mut offset = 0;\n\n\n\n buffer[..message.len()].copy_from_slice(message);\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 83, "score": 3.2336851721812767 }, { "content": " block[..input.len()].copy_from_slice(input);\n\n cipher.process_block(&block, &mut output)?;\n\n }\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n let block_last = ((encrypted.len() + (BLOCK_SIZE - 1)) / BLOCK_SIZE) - 1;\n\n let mut block_index = 0;\n\n let mut iter = decrypted[..encrypted.len()].chunks_exact(BLOCK_SIZE);\n\n loop {\n\n match iter.next() {\n\n Some(input) => {\n\n if block_index < block_last {\n\n cipher.process_block(input, &mut output)?;\n\n } else {\n\n cipher.finish(input, &mut output)?;\n\n }\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 84, "score": 3.1879254763485227 }, { "content": " input.len(),\n\n output.as_mut_ptr(),\n\n )\n\n };\n\n if result != 0 {\n\n return Err(Error::Other(result));\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Finish the cipher operation\n\n fn finish(&mut self, input: &[u8], output: &mut [u8]) -> Result<(), Error> {\n\n assert!(input.len() <= output.len());\n\n let mut output_length = output.len();\n\n let result = unsafe {\n\n SaSi_AesFinish(\n\n self.context(),\n\n 16,\n\n input.as_ptr(),\n\n 16,\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 85, "score": 3.1710977172358326 }, { "content": "# nRF52840 CryptoCell for Psila\n\n\n\nFunctions for using the CryptoCell in the nRF52840 for the security in Psila.\n\nThis uses the nrf_cc310 library provided by Nordic in their SDK.\n\n\n\nCopy the directory `external/nrf_cc310` from the SDK into this directory before\n\nbuilding.\n", "file_path": "nrf52-cryptocell/README.md", "rank": 86, "score": 3.1260563624137943 }, { "content": " }\n\n\n\n /// Get the IV\n\n fn get_iv(&mut self, iv: &mut [u8]) -> Result<(), Error> {\n\n assert!(iv.len() == 16);\n\n let result = unsafe { SaSi_AesGetIv(self.context(), iv.as_mut_ptr()) };\n\n if result != 0 {\n\n return Err(Error::Other(result));\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Process a block of data\n\n fn process_block(&mut self, input: &[u8], output: &mut [u8]) -> Result<(), Error> {\n\n assert!(input.len() <= output.len());\n\n assert!(input.len() <= 65535);\n\n let result = unsafe {\n\n SaSi_AesBlock(\n\n self.context(),\n\n input.as_ptr(),\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 87, "score": 3.121663745893242 }, { "content": " let mut cipher = AesContext::new(\n\n EncryptDecrypt::Decrypt,\n\n AesOperationMode::Ctr,\n\n PaddingType::None,\n\n );\n\n cipher.set_key(key)?;\n\n\n\n let mut block = [0u8; BLOCK_SIZE];\n\n {\n\n let (flag, other) = block.split_at_mut(1);\n\n let (_nonce, _counter) = other.split_at_mut(nonce.len());\n\n flag[0] = Self::make_flag(0, 0, LENGTH_FIELD_LENGTH);\n\n _nonce.copy_from_slice(&nonce);\n\n }\n\n\n\n cipher.set_iv(&block)?;\n\n\n\n let mut block = [0u8; BLOCK_SIZE];\n\n block[..mic.len()].copy_from_slice(&mic);\n\n\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 88, "score": 3.121663745893242 }, { "content": " output: &mut [u8],\n\n ) -> Result<(), Error> {\n\n self.cipher.process_block(input, output)\n\n }\n\n /// Process the last bits and bobs and finish\n\n fn aes128_ecb_encrypt_finish(&mut self, input: &[u8], output: &mut [u8]) -> Result<(), Error> {\n\n self.cipher.finish(input, output)\n\n }\n\n}\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 89, "score": 3.107311268900629 }, { "content": " }\n\n\n\n fn update_led(&mut self) {\n\n let mut pixel = RGB8::default();\n\n if self.on_off {\n\n let raw: [u8; 3] = Srgb::from(self.colour).into_format().into_raw();\n\n pixel.r = raw[0];\n\n pixel.g = raw[1];\n\n pixel.b = raw[2];\n\n } else {\n\n pixel.r = 0;\n\n pixel.g = 0;\n\n pixel.b = 0;\n\n }\n\n let pixels = [pixel; 1];\n\n let _ = self.neopixel.write(gamma(pixels.iter().cloned()));\n\n }\n\n\n\n pub fn set_on_off(&mut self, enable: bool) {\n\n self.on_off = enable;\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 90, "score": 3.0205148647413935 }, { "content": " offset += message_blocks * BLOCK_SIZE;\n\n\n\n let mut block = [0u8; BLOCK_SIZE];\n\n block[0] = Self::make_flag(0, 0, LENGTH_FIELD_LENGTH);\n\n block[1..=nonce.len()].copy_from_slice(nonce);\n\n\n\n let mut cipher = AesContext::new(\n\n EncryptDecrypt::Encrypt,\n\n AesOperationMode::Ctr,\n\n PaddingType::None,\n\n );\n\n cipher.set_key(key)?;\n\n cipher.set_iv(&block)?;\n\n\n\n let mut block = [0u8; BLOCK_SIZE];\n\n let mut tag = [0u8; 16];\n\n block[..mic.len()].copy_from_slice(&new_mic[..mic.len()]);\n\n cipher.process_block(&block, &mut tag)?;\n\n\n\n for (o, i) in output[..offset]\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 91, "score": 3.0070754215167375 }, { "content": "\n\n let length_field = encrypted.len() as u16;\n\n\n\n let mut block = [0u8; BLOCK_SIZE];\n\n {\n\n let (flag, other) = block.split_at_mut(1);\n\n let (_nonce, length) = other.split_at_mut(nonce.len());\n\n flag[0] = Self::make_flag(aad.len(), mic.len(), LENGTH_FIELD_LENGTH);\n\n _nonce.copy_from_slice(&nonce);\n\n length[0] = (length_field >> 8) as u8;\n\n length[1] = (length_field & 0x00ff) as u8;\n\n }\n\n\n\n cipher.process_block(&block, &mut output)?;\n\n\n\n // Feed the additional data\n\n let mut block = [0u8; BLOCK_SIZE];\n\n let aad_length = aad.len() as u16;\n\n block[0] = (aad_length >> 8) as u8;\n\n block[1] = (aad_length & 0x00ff) as u8;\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 92, "score": 2.997074062021671 }, { "content": " let len = if aad.len() < AAD_B0_LEN {\n\n aad.len()\n\n } else {\n\n AAD_B0_LEN\n\n };\n\n block[2..2 + len].copy_from_slice(&aad[..len]);\n\n\n\n cipher.process_block(&block, &mut output)?;\n\n\n\n if aad.len() > AAD_B0_LEN {\n\n let mut iter = aad[AAD_B0_LEN..].chunks_exact(BLOCK_SIZE);\n\n loop {\n\n match iter.next() {\n\n Some(input) => {\n\n cipher.process_block(input, &mut output)?;\n\n }\n\n None => {\n\n let input = iter.remainder();\n\n if !input.is_empty() {\n\n let mut block = [0u8; BLOCK_SIZE];\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 93, "score": 2.9265069596141244 }, { "content": " attribute: u16,\n\n value: &mut [u8],\n\n ) -> Result<(AttributeDataType, usize), ClusterLibraryStatus> {\n\n match (profile, cluster, attribute) {\n\n (0x0104, 0x0000, 0x0000) => {\n\n value[0] = 0x02;\n\n Ok((AttributeDataType::Unsigned8, 1))\n\n }\n\n (0x0104, 0x0000, 0x0004) => {\n\n value[0] = MANUFACTURER_NAME.len() as u8;\n\n let end = MANUFACTURER_NAME.len() + 1;\n\n value[1..end].copy_from_slice(MANUFACTURER_NAME.as_bytes());\n\n Ok((AttributeDataType::CharacterString, end))\n\n }\n\n (0x0104, 0x0000, 0x0005) => {\n\n value[0] = MODEL_IDENTIFIER.len() as u8;\n\n let end = MODEL_IDENTIFIER.len() + 1;\n\n value[1..end].copy_from_slice(MODEL_IDENTIFIER.as_bytes());\n\n Ok((AttributeDataType::CharacterString, end))\n\n }\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 94, "score": 2.9228427151636134 }, { "content": " .chunks_mut(BLOCK_SIZE)\n\n .zip(buffer.chunks(BLOCK_SIZE))\n\n {\n\n cipher.process_block(i, o)?;\n\n }\n\n\n\n mic.copy_from_slice(&tag[..mic.len()]);\n\n }\n\n\n\n Ok(message.len())\n\n }\n\n\n\n /// Set the key\n\n fn aes128_ecb_encrypt_set_key(&mut self, key: &[u8]) -> Result<(), Error> {\n\n self.cipher.set_key(key)\n\n }\n\n /// Process blocks of data\n\n fn aes128_ecb_encrypt_process_block(\n\n &mut self,\n\n input: &[u8],\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 95, "score": 2.8307442973230863 }, { "content": " offset += (aad_blocks * BLOCK_SIZE) - 2;\n\n buffer[offset..offset + message.len()].copy_from_slice(message);\n\n offset += message.len();\n\n\n\n let mut cipher = AesContext::new(\n\n EncryptDecrypt::Encrypt,\n\n AesOperationMode::CbcMac,\n\n PaddingType::None,\n\n );\n\n cipher.set_key(key)?;\n\n\n\n let block_last = ((offset + (BLOCK_SIZE - 1)) / BLOCK_SIZE) - 1;\n\n let mut block_index = 0;\n\n let mut iter = buffer[..offset].chunks_exact(BLOCK_SIZE);\n\n loop {\n\n match iter.next() {\n\n Some(input) => {\n\n if block_index < block_last {\n\n cipher.process_block(&input, &mut new_mic)?;\n\n } else {\n", "file_path": "nrf52-cryptocell/src/lib.rs", "rank": 96, "score": 2.812338937782624 }, { "content": " }\n\n\n\n pub fn get_y(&self) -> u16 {\n\n (self.colour.y * 65536.0) as u16\n\n }\n\n\n\n pub fn set_y(&mut self, y: u16) {\n\n self.colour.y = (y as f32) / 65536.0;\n\n self.update_led();\n\n }\n\n\n\n pub fn set_color(&mut self, x: u16, y: u16) {\n\n self.colour.x = (x as f32) / 65536.0;\n\n self.colour.y = (y as f32) / 65536.0;\n\n self.update_led();\n\n }\n\n}\n\n\n\nimpl ClusterLibraryHandler for ClusterHandler {\n\n fn active_endpoints(&self) -> &[u8] {\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 97, "score": 2.6966192558040376 }, { "content": " self.update_led();\n\n }\n\n\n\n pub fn get_level(&self) -> u8 {\n\n (self.colour.luma * 254.0) as u8\n\n }\n\n\n\n pub fn set_level(&mut self, level: u8) {\n\n let level_float = (level as f32) / 254.0;\n\n self.colour.luma = level_float;\n\n self.update_led();\n\n }\n\n\n\n pub fn get_x(&self) -> u16 {\n\n (self.colour.x * 65536.0) as u16\n\n }\n\n\n\n pub fn set_x(&mut self, x: u16) {\n\n self.colour.x = (x as f32) / 65536.0;\n\n self.update_led();\n", "file_path": "adafruit-feather-nrf52840-express/examples/feather-express-psila.rs", "rank": 98, "score": 2.618267122463484 }, { "content": "#![allow(non_upper_case_globals)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n#![allow(dead_code)]\n\n\n\nuse cty;\n", "file_path": "nrf52-cryptocell/bindings_header.rs", "rank": 99, "score": 2.6073523199778768 } ]
Rust
src/elastic/src/types/string/macros.rs
reinfer/elastic
78191a70d3774295ba66e1cf35f72e216e9fbf2a
macro_rules! impl_string_type { ($wrapper_ty:ident, $mapping_ty:ident, $field_type:ident) => { impl<TMapping> $field_type<TMapping> for $wrapper_ty<TMapping> where TMapping: $mapping_ty {} impl_mapping_type!(String, $wrapper_ty, $mapping_ty); impl<'a, TMapping> From<$wrapper_ty<TMapping>> for String where TMapping: $mapping_ty, { fn from(wrapper: $wrapper_ty<TMapping>) -> Self { wrapper.value } } impl<'a, TMapping> From<&'a $wrapper_ty<TMapping>> for std::borrow::Cow<'a, str> where TMapping: $mapping_ty, { fn from(wrapper: &'a $wrapper_ty<TMapping>) -> Self { wrapper.as_ref().into() } } impl<'a, TMapping> From<&'a $wrapper_ty<TMapping>> for &'a str where TMapping: $mapping_ty, { fn from(wrapper: &'a $wrapper_ty<TMapping>) -> Self { wrapper.as_ref() } } impl<'a, TMapping> From<$wrapper_ty<TMapping>> for std::borrow::Cow<'a, str> where TMapping: $mapping_ty, { fn from(wrapper: $wrapper_ty<TMapping>) -> Self { String::from(wrapper).into() } } impl<TMapping> AsRef<str> for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn as_ref(&self) -> &str { &self.value } } impl<'a, TMapping> PartialEq<&'a str> for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn eq(&self, other: &&'a str) -> bool { PartialEq::eq(&self.value, *other) } } impl<'a, TMapping> PartialEq<$wrapper_ty<TMapping>> for &'a str where TMapping: $mapping_ty, { fn eq(&self, other: &$wrapper_ty<TMapping>) -> bool { PartialEq::eq(*self, &other.value) } } impl<TMapping> Serialize for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&self.value) } } impl<'de, TMapping> Deserialize<'de> for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn deserialize<D>(deserializer: D) -> Result<$wrapper_ty<TMapping>, D::Error> where D: Deserializer<'de>, { struct StringVisitor<TMapping> { _m: PhantomData<TMapping>, } impl<'de, TMapping> Visitor<'de> for StringVisitor<TMapping> where TMapping: $mapping_ty, { type Value = $wrapper_ty<TMapping>; fn expecting( &self, formatter: &mut ::std::fmt::Formatter, ) -> ::std::fmt::Result { write!(formatter, "a json string") } fn visit_str<E>(self, v: &str) -> Result<$wrapper_ty<TMapping>, E> where E: Error, { Ok($wrapper_ty::new(v)) } } deserializer.deserialize_any(StringVisitor { _m: PhantomData }) } } }; }
macro_rules! impl_string_type { ($wrapper_ty:ident, $mapping_ty:ident, $field_type:ident) => { impl<TMapping> $field_type<TMapping> for $wrapper_ty<TMapping> where TMapping: $mapping_ty {} impl_mapping_type!(String, $wrapper_ty, $mapping_ty); impl<'a, TMapping> From<$wrapper_ty<TMapping>> for String where TMapping: $mapping_ty, { fn from(wrapper: $wrapper
orrow::Cow<'a, str> where TMapping: $mapping_ty, { fn from(wrapper: &'a $wrapper_ty<TMapping>) -> Self { wrapper.as_ref().into() } } impl<'a, TMapping> From<&'a $wrapper_ty<TMapping>> for &'a str where TMapping: $mapping_ty, { fn from(wrapper: &'a $wrapper_ty<TMapping>) -> Self { wrapper.as_ref() } } impl<'a, TMapping> From<$wrapper_ty<TMapping>> for std::borrow::Cow<'a, str> where TMapping: $mapping_ty, { fn from(wrapper: $wrapper_ty<TMapping>) -> Self { String::from(wrapper).into() } } impl<TMapping> AsRef<str> for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn as_ref(&self) -> &str { &self.value } } impl<'a, TMapping> PartialEq<&'a str> for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn eq(&self, other: &&'a str) -> bool { PartialEq::eq(&self.value, *other) } } impl<'a, TMapping> PartialEq<$wrapper_ty<TMapping>> for &'a str where TMapping: $mapping_ty, { fn eq(&self, other: &$wrapper_ty<TMapping>) -> bool { PartialEq::eq(*self, &other.value) } } impl<TMapping> Serialize for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { serializer.serialize_str(&self.value) } } impl<'de, TMapping> Deserialize<'de> for $wrapper_ty<TMapping> where TMapping: $mapping_ty, { fn deserialize<D>(deserializer: D) -> Result<$wrapper_ty<TMapping>, D::Error> where D: Deserializer<'de>, { struct StringVisitor<TMapping> { _m: PhantomData<TMapping>, } impl<'de, TMapping> Visitor<'de> for StringVisitor<TMapping> where TMapping: $mapping_ty, { type Value = $wrapper_ty<TMapping>; fn expecting( &self, formatter: &mut ::std::fmt::Formatter, ) -> ::std::fmt::Result { write!(formatter, "a json string") } fn visit_str<E>(self, v: &str) -> Result<$wrapper_ty<TMapping>, E> where E: Error, { Ok($wrapper_ty::new(v)) } } deserializer.deserialize_any(StringVisitor { _m: PhantomData }) } } }; }
_ty<TMapping>) -> Self { wrapper.value } } impl<'a, TMapping> From<&'a $wrapper_ty<TMapping>> for std::b
random
[ { "content": "fn dedup_urls(endpoint: (String, Endpoint)) -> (String, Endpoint) {\n\n let (name, mut endpoint) = endpoint;\n\n\n\n let mut deduped_paths = BTreeMap::new();\n\n\n\n for path in endpoint.url.paths {\n\n let key = path.params().join(\"\");\n\n\n\n deduped_paths.insert(key, path);\n\n }\n\n\n\n endpoint.url.paths = deduped_paths.into_iter().map(|(_, p)| p).collect();\n\n\n\n (name, endpoint)\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 0, "score": 154857.69803643157 }, { "content": "fn strip_methods(endpoint: (String, Endpoint)) -> (String, Endpoint) {\n\n let (name, mut endpoint) = endpoint;\n\n\n\n let preferred_method = endpoint\n\n .preferred_method()\n\n .expect(\"there should always be at least 1 method\");\n\n\n\n endpoint.methods = vec![preferred_method];\n\n\n\n (name, endpoint)\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 1, "score": 154857.69803643157 }, { "content": "fn from_reader<R>(name: String, rdr: &mut R) -> Result<(String, Endpoint), String>\n\nwhere\n\n R: Read,\n\n{\n\n let endpoint: BTreeMap<String, Endpoint> = serde_json::from_reader(rdr)\n\n .map_err(|e| format!(\"Failed to parse {} because: {}\", name, e))?;\n\n\n\n Ok(endpoint.endpoint())\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 2, "score": 149297.7289025095 }, { "content": "fn from_dir(path: &str) -> Result<Vec<(String, Endpoint)>, String> {\n\n let mut all_parsed: Vec<(String, Endpoint)> = Vec::new();\n\n\n\n let paths = read_dir(path).unwrap();\n\n\n\n for path in paths {\n\n let path = path.unwrap().path();\n\n let name = path.file_name().map(|path| path.to_string_lossy());\n\n let display = path.to_string_lossy().into_owned();\n\n\n\n if name.map(|name| !name.starts_with('_')).unwrap_or(true) {\n\n let mut f = File::open(&path).unwrap();\n\n let parsed = from_reader(display, &mut f)?;\n\n\n\n all_parsed.push(parsed);\n\n }\n\n }\n\n\n\n // Sort the endpoints parsed from disk so we have a stable ordering\n\n all_parsed.sort_by(|&(ref a_name, _), &(ref b_name, _)| a_name.cmp(b_name));\n\n\n\n Ok(all_parsed)\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 3, "score": 147819.14544536272 }, { "content": "fn get_string_from_lit<'a>(lit: &'a Lit) -> Result<String, &'static str> {\n\n match *lit {\n\n Lit::Str(ref s) => Ok(s.value()),\n\n _ => Err(\"Unable to get String from Lit\"),\n\n }\n\n}\n", "file_path": "src/elastic_derive/src/lib.rs", "rank": 4, "score": 134043.88824389648 }, { "content": "// Get the format string supplied by an #[elastic()] attribute\n\nfn get_format_from_attr(item: &DeriveInput) -> Option<String> {\n\n let val = get_elastic_meta_items(&item.attrs);\n\n\n\n let val = val\n\n .iter()\n\n .filter_map(|meta| expect_name_value(\"date_format\", &meta))\n\n .next();\n\n\n\n val.and_then(|v| get_string_from_lit(v).ok().map(Into::into))\n\n}\n\n\n", "file_path": "src/elastic_derive/src/date_format/mod.rs", "rank": 5, "score": 131274.71425702336 }, { "content": "// Get the name string supplied by an #[elastic()] attribute\n\nfn get_name_from_attr(item: &DeriveInput) -> Option<String> {\n\n let val = get_elastic_meta_items(&item.attrs);\n\n\n\n let val = val\n\n .iter()\n\n .filter_map(|meta| expect_name_value(\"date_format_name\", &meta))\n\n .next();\n\n\n\n val.and_then(|v| get_string_from_lit(v).ok().map(Into::into))\n\n}\n\n\n\nimpl<'a> parse::DateFormatToken<'a> {\n\n fn into_tokens(self, crate_root: &proc_macro2::TokenStream) -> proc_macro2::TokenStream {\n\n use self::parse::DateFormatToken::*;\n\n\n\n match self {\n\n Year => {\n\n quote!(#crate_root::__derive::Item::Numeric(#crate_root::__derive::Numeric::Year, #crate_root::__derive::Pad::Zero))\n\n }\n\n Month => {\n", "file_path": "src/elastic_derive/src/date_format/mod.rs", "rank": 6, "score": 131274.71425702336 }, { "content": "// Ensure all deserialized paths have a leading `/`\n\nfn rooted_path_string<'de, D>(deserializer: D) -> Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n\n\n if !s.starts_with('/') {\n\n Ok(format!(\"/{}\", s))\n\n } else {\n\n Ok(s)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Path {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl Path {\n", "file_path": "tools/generate_requests/src/parse/mod.rs", "rank": 7, "score": 130048.89417475907 }, { "content": "fn doc() -> Doc {\n\n Doc { id: \"1\".to_owned() }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"raw query string\";\n\n\n\n type Response = SearchResponse<Doc>;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(Doc::static_index()).delete().send();\n\n\n\n let index_reqs = future::join_all((0..10).map(move |_| {\n\n client\n\n .document()\n\n .index(doc())\n\n .params_fluent(|p| p.url_param(\"refresh\", true))\n\n .send()\n\n }));\n", "file_path": "tests/integration/src/tests/search/raw_query_string.rs", "rank": 8, "score": 119719.90569861015 }, { "content": "fn params_mod(tokens: &mut Tokens, params_to_emit: BTreeMap<String, bool>) {\n\n let header = quote!(\n\n #![allow(missing_docs)]\n\n #![allow(clippy::all)]\n\n );\n\n\n\n tokens.append(header);\n\n\n\n tokens.append(\"\\n\\n\");\n\n\n\n tokens.append(r#\"include!(\"genned.params.rs\");\"#);\n\n tokens.append(\"\\n\\n\");\n\n\n\n let params_to_emit = params_to_emit.iter().filter(|&(_, is_emitted)| *is_emitted);\n\n\n\n for (ty, _) in params_to_emit {\n\n let ty_item = gen::params::tokens(ty);\n\n\n\n tokens.append(quote!(#ty_item));\n\n\n\n tokens.append(\"\\n\\n\");\n\n }\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 9, "score": 119688.06491361938 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap();\n\n}\n", "file_path": "src/elastic/examples/update.rs", "rank": 10, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init_from_env(\"ELASTIC_LOG\");\n\n\n\n let matches = App::new(\"elastic_integration_tests\")\n\n .arg(\n\n Arg::with_name(\"runs\")\n\n .default_value(\"default\")\n\n .takes_value(true)\n\n .multiple(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"filter\")\n\n .takes_value(true)\n\n .short(\"f\")\n\n .long(\"filter\"),\n\n )\n\n .get_matches();\n\n\n\n let mut failed = Vec::<run_tests::TestResult>::new();\n", "file_path": "tests/integration/src/main.rs", "rank": 11, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/bulk.rs", "rank": 12, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap();\n\n}\n", "file_path": "src/elastic/examples/index.rs", "rank": 13, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/get.rs", "rank": 14, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap();\n\n}\n", "file_path": "src/elastic/examples/basic.rs", "rank": 15, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/raw.rs", "rank": 16, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/ping.rs", "rank": 17, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/typed.rs", "rank": 18, "score": 82410.58556863356 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/custom_response.rs", "rank": 19, "score": 81353.24338760754 }, { "content": "fn main() {\n\n run().unwrap()\n\n}\n", "file_path": "examples/account_sample/src/main.rs", "rank": 20, "score": 81353.24338760754 }, { "content": "#[test]\n\nfn aggregation_search() {\n\n let j = include_str!(\"complex.json\");\n\n\n\n let _s: Query = serde_json::from_str(j).unwrap();\n\n}\n\n\n", "file_path": "src/queries/tests/mod.rs", "rank": 21, "score": 81353.24338760754 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/pre_send.rs", "rank": 22, "score": 81353.24338760754 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/bulk_with_source.rs", "rank": 23, "score": 81353.24338760754 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap();\n\n}\n", "file_path": "src/elastic/examples/basic_async.rs", "rank": 24, "score": 81353.24338760754 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/bulk_async.rs", "rank": 25, "score": 81353.24338760754 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/update_with_source.rs", "rank": 26, "score": 81353.24338760754 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/typed_async.rs", "rank": 27, "score": 81353.24338760754 }, { "content": "fn main() {\n\n start_comment_block_for_logging();\n\n\n\n println!(\"This code is automatically generated\");\n\n println!(\"run the `tools/generate_requests.sh` script to update it\");\n\n\n\n let dir = \"./tools/generate_requests/spec\";\n\n\n\n // BTreeMap<String, bool> : <url param type name, is emitted>\n\n let mut params_to_emit = BTreeMap::new();\n\n params_to_emit.insert(String::from(\"vertices\"), false);\n\n\n\n let mut tokens = quote::Tokens::new();\n\n\n\n let mut endpoints = from_dir(dir)\n\n .expect(\"Couldn't parse the REST API spec\")\n\n .add_simple_search()\n\n .add_get_ping_req();\n\n\n\n endpoints = endpoints\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 28, "score": 81353.24338760754 }, { "content": "fn search(\n\n client: AsyncClient,\n\n query: &'static str,\n\n) -> Box<dyn Future<Item = SearchResponse<MyType>, Error = Error>> {\n\n let search = client\n\n .search()\n\n .index(MyType::static_index())\n\n .body(json!({\n\n \"query\": {\n\n \"query_string\": {\n\n \"query\": query\n\n }\n\n }\n\n }))\n\n .send();\n\n\n\n Box::new(search)\n\n}\n\n\n", "file_path": "src/elastic/examples/typed_async.rs", "rank": 29, "score": 81353.24338760754 }, { "content": "#[test]\n\nfn nested_aggregation_search() {\n\n let j = include_str!(\"nested.json\");\n\n\n\n let _s: Query = serde_json::from_str(j).unwrap();\n\n}\n", "file_path": "src/queries/tests/mod.rs", "rank": 30, "score": 80339.8207880749 }, { "content": "fn call_future(\n\n client: AsyncClient,\n\n cases: impl IntoIterator<Item = Test>,\n\n max_concurrent_tests: usize,\n\n) -> Box<dyn Future<Item = Vec<TestResult>, Error = ()>> {\n\n let all_tests = cases\n\n .into_iter()\n\n .map(move |t| t(client.clone()))\n\n .collect::<Vec<_>>();\n\n\n\n println!(\"Running {} tests\", all_tests.len());\n\n\n\n let test_stream = stream::futures_unordered(all_tests)\n\n .map(Ok)\n\n .buffer_unordered(max_concurrent_tests);\n\n\n\n Box::new(test_stream.collect())\n\n}\n", "file_path": "tests/integration/src/run_tests.rs", "rank": 31, "score": 80339.8207880749 }, { "content": "#[test]\n\nfn simple_aggregation_search() {\n\n let j = include_str!(\"simpleagg.json\");\n\n\n\n let _s: Query = serde_json::from_str(j).unwrap();\n\n}\n\n\n", "file_path": "src/queries/tests/mod.rs", "rank": 32, "score": 80339.8207880749 }, { "content": "fn main() {}\n", "file_path": "tests/derive_compile_test/src/main.rs", "rank": 33, "score": 80339.8207880749 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap();\n\n}\n", "file_path": "src/elastic/examples/load_balanced_async.rs", "rank": 34, "score": 80339.8207880749 }, { "content": "fn main() {\n\n env_logger::init();\n\n run().unwrap()\n\n}\n", "file_path": "src/elastic/examples/bulk_async_stream.rs", "rank": 35, "score": 80339.8207880749 }, { "content": "fn endpoints_mod(\n\n tokens: &mut Tokens,\n\n http_mod: &'static str,\n\n endpoints: Vec<(String, Endpoint)>,\n\n params_to_emit: &mut BTreeMap<String, bool>,\n\n) {\n\n let mut http_mod_tokens = Tokens::new();\n\n http_mod_tokens.append(http_mod);\n\n\n\n let header = quote!(\n\n #![allow(missing_docs)]\n\n #![allow(clippy::all)]\n\n\n\n use super:: #http_mod_tokens ::*;\n\n use super::params::*;\n\n );\n\n\n\n tokens.append(header);\n\n tokens.append(\"\\n\\n\");\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 36, "score": 80339.8207880749 }, { "content": "fn build_req(\n\n endpoint: Endpoint<impl Into<SyncBody>>,\n\n params: RequestParams,\n\n) -> Result<SyncHttpRequest, Error> {\n\n let endpoint = SyncHttpRequest {\n\n url: Url::parse(&build_url(&endpoint.url, &params)).map_err(error::request)?,\n\n method: endpoint.method,\n\n headers: params.get_headers(),\n\n body: endpoint.body.map(|body| body.into()),\n\n };\n\n\n\n Ok(endpoint)\n\n}\n\n\n\n/** Build a synchronous `reqwest::RequestBuilder` from an Elasticsearch request. */\n", "file_path": "src/elastic/src/http/sender/synchronous.rs", "rank": 37, "score": 79367.63696843102 }, { "content": "#[macro_use] extern crate serde_derive;\n\n# use elastic::types::prelude::*;\n\n#[derive(Serialize)]\n\nstruct MyIpField(String);\n\n\n\nimpl IpFieldType<DefaultIpMapping> for MyIpField {}\n\n```\n\n\n\n# Links\n\n\n\n- [Elasticsearch Doc](https://www.elastic.co/guide/en/elasticsearch/reference/current/ip.html)\n\n*/\n\n\n\npub mod mapping;\n\n\n\nmod impls;\n\npub use self::impls::*;\n\n\n\npub mod prelude {\n\n /*!\n\n Includes all types for the `ip` type.\n\n\n\n This is a convenience module to make it easy to build mappings for multiple types without too many `use` statements.\n\n */\n\n\n\n pub use super::{\n\n impls::*,\n\n mapping::*,\n\n };\n\n}\n", "file_path": "src/elastic/src/types/ip/mod.rs", "rank": 38, "score": 79273.78457529591 }, { "content": "fn end_comment_block_for_logging() {\n\n stdout().write_all(b\"*/\").unwrap();\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 39, "score": 78434.22495395335 }, { "content": "fn get_mapping(\n\n crate_root: &proc_macro2::TokenStream,\n\n input: &DeriveInput,\n\n) -> ElasticDocumentMapping {\n\n // Define a struct for the mapping with a few defaults\n\n fn define_mapping(vis: &Visibility, name: &Ident) -> proc_macro2::TokenStream {\n\n quote!(\n\n #[derive(Default, Clone, Copy, Debug)]\n\n #vis struct #name;\n\n )\n\n }\n\n\n\n // Get the default mapping name\n\n fn get_default_mapping(item: &DeriveInput) -> Ident {\n\n quote::format_ident!(\"{}Mapping\", &item.ident)\n\n }\n\n\n\n // Get the mapping ident supplied by an #[elastic()] attribute or create a default one\n\n fn get_mapping_from_attr(item: &DeriveInput) -> Option<Ident> {\n\n let val = get_elastic_meta_items(&item.attrs);\n", "file_path": "src/elastic_derive/src/elastic_type/mod.rs", "rank": 40, "score": 78434.22495395335 }, { "content": "fn start_comment_block_for_logging() {\n\n stdout().write_all(b\"/*\\n\").unwrap();\n\n}\n\n\n", "file_path": "tools/generate_requests/src/main.rs", "rank": 41, "score": 78434.22495395335 }, { "content": "// Implement DateFormat for the type being derived with the mapping\n\nfn impl_date_format(\n\n crate_root: proc_macro2::TokenStream,\n\n item: &DeriveInput,\n\n name: &str,\n\n format: &[proc_macro2::TokenStream],\n\n) -> proc_macro2::TokenStream {\n\n let ty = &item.ident;\n\n\n\n let parse_fn = quote!(\n\n fn parse(date: &str) -> ::std::result::Result<#crate_root::__derive::DateValue, #crate_root::__derive::ParseError> {\n\n let fmt = vec![ #(#format),* ];\n\n\n\n #crate_root::__derive::parse_from_tokens(date, fmt)\n\n }\n\n );\n\n\n\n let format_fn = quote!(\n\n fn format<'a>(date: &'a #crate_root::__derive::DateValue) -> #crate_root::__derive::FormattedDate<'a> {\n\n let fmt = vec![ #(#format),* ];\n\n\n", "file_path": "src/elastic_derive/src/date_format/mod.rs", "rank": 42, "score": 77537.31069445338 }, { "content": "#[macro_use] extern crate serde_derive;\n\n# fn main() {\n\n# use elastic::types::prelude::*;\n\n#[derive(Serialize)]\n\nstruct MyGeoShapeField(String);\n\n\n\nimpl GeoShapeFieldType<DefaultGeoShapeMapping> for MyGeoShapeField {}\n\n# }\n\n```\n\n\n\n# Links\n\n\n\n- [Elasticsearch Doc](https://www.elastic.co/guide/en/elasticsearch/reference/current/geo-shape.html)\n\n*/\n\n\n\npub mod mapping;\n\n\n\nmod impls;\n\npub use self::impls::*;\n\n\n\npub mod prelude {\n\n /*!\n\n Includes all types for the `geo_shape` types.\n\n\n\n This is a convenience module to make it easy to build mappings for multiple types without too many `use` statements.\n\n */\n\n\n\n pub use super::{\n\n impls::*,\n\n mapping::*,\n\n };\n\n}\n", "file_path": "src/elastic/src/types/geo/shape/mod.rs", "rank": 43, "score": 77511.37824173871 }, { "content": "#[derive(Debug)]\n\nstruct BulkRequestError(String);\n\n\n\nimpl fmt::Display for BulkRequestError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n writeln!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl StdError for BulkRequestError {\n\n fn description(&self) -> &str {\n\n \"error building bulk request body\"\n\n }\n\n}\n\n\n\n/**\n\nA bulk request body.\n\n\n\nThe body can receive a bulk operation for any type of document.\n\n*/\n", "file_path": "src/elastic/src/client/requests/bulk/mod.rs", "rank": 44, "score": 77506.19221046861 }, { "content": "fn expect_list<'a>(\n\n name: &str,\n\n meta_item: &'a NestedMeta,\n\n) -> Option<impl Iterator<Item = &'a NestedMeta>> {\n\n match *meta_item {\n\n NestedMeta::Meta(Meta::List(MetaList {\n\n ref path,\n\n ref nested,\n\n ..\n\n })) if path.get_ident() == Some(&quote::format_ident!(\"{}\", name)) => Some(nested.iter()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/elastic_derive/src/lib.rs", "rank": 45, "score": 77334.3950023987 }, { "content": "pub fn call(\n\n client: AsyncClient,\n\n cases: impl IntoIterator<Item = Test>,\n\n max_concurrent_tests: usize,\n\n) -> Result<Vec<TestResult>, ()> {\n\n tokio::runtime::current_thread::block_on_all(call_future(client, cases, max_concurrent_tests))\n\n}\n\n\n", "file_path": "tests/integration/src/run_tests.rs", "rank": 46, "score": 77334.3950023987 }, { "content": "#[test]\n\nfn success_parse_with_errors() {\n\n let f = include_bytes!(\"bulk_error.json\");\n\n let deserialized = parse::<BulkResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.is_err());\n\n\n\n assert_eq!(1, deserialized.iter().filter(Result::is_err).count());\n\n assert_eq!(1, deserialized.iter().filter(Result::is_ok).count());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 47, "score": 76674.79456684637 }, { "content": "#[test]\n\nfn success_into_document() {\n\n let f = include_bytes!(\"get_found.json\");\n\n let deserialized = parse::<GetResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n match deserialized.into_document() {\n\n Some(doc) => {\n\n let id = doc\n\n .as_object()\n\n .and_then(|src| src.get(\"id\"))\n\n .and_then(|id| id.as_u64());\n\n\n\n assert_eq!(Some(1), id);\n\n }\n\n _ => panic!(\"expected deserialised doc to be Some\"),\n\n }\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/document_get/mod.rs", "rank": 48, "score": 76674.79456684637 }, { "content": "#[test]\n\nfn success_parse_empty() {\n\n let f = include_bytes!(\"search_empty.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.hits().count(), 0);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 49, "score": 76674.79456684637 }, { "content": "#[test]\n\nfn deserialise_nodes() {\n\n let f = include_bytes!(\"nodes_info.json\");\n\n let deserialized = parse::<NodesInfoResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n let expected = vec![\"1.1.1.1:9200\", \"1.1.1.2:9200\"];\n\n\n\n assert_eq!(expected, deserialized.iter_addrs().collect::<Vec<_>>());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/nodes_info/mod.rs", "rank": 50, "score": 76674.79456684637 }, { "content": "#[test]\n\nfn success_aggs_when_not_present() {\n\n let f = include_bytes!(\"search_hits_only.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.aggs().count(), 0);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 51, "score": 76674.79456684637 }, { "content": "// Implement PropertiesMapping for the mapping\n\nfn get_props_impl_block(\n\n crate_root: &proc_macro2::TokenStream,\n\n props_ty: &Ident,\n\n fields: &[(Ident, &Field)],\n\n) -> proc_macro2::TokenStream {\n\n // Get the serde serialisation statements for each of the fields on the type being derived\n\n fn get_field_ser_stmts(\n\n crate_root: &proc_macro2::TokenStream,\n\n fields: &[(Ident, &Field)],\n\n ) -> Vec<proc_macro2::TokenStream> {\n\n let fields: Vec<proc_macro2::TokenStream> = fields\n\n .iter()\n\n .cloned()\n\n .map(|(name, field)| {\n\n let lit = Lit::Str(LitStr::new(\n\n &name.to_string(),\n\n proc_macro2::Span::call_site(),\n\n ));\n\n let ty = &field.ty;\n\n\n", "file_path": "src/elastic_derive/src/elastic_type/mod.rs", "rank": 52, "score": 76674.79456684637 }, { "content": "fn doc() -> Doc {\n\n Doc { id: ID.to_owned() }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"delete existing document\";\n\n\n\n type Response = (GetResponse<Doc>, DeleteResponse, GetResponse<Doc>);\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client\n\n .index(Doc::static_index())\n\n .delete()\n\n .send()\n\n .map(|_| ());\n\n\n\n Box::new(delete_res)\n\n }\n\n\n", "file_path": "tests/integration/src/tests/document/delete.rs", "rank": 53, "score": 76362.21118275482 }, { "content": "fn insert_value<'a>(\n\n fieldname: &str,\n\n json_object: &'a Object,\n\n keyname: &str,\n\n rowdata: &mut RowData<'a>,\n\n) {\n\n if let Some(v) = json_object.get(fieldname) {\n\n let field_name = format!(\"{}_{}\", keyname, fieldname);\n\n rowdata.insert(Cow::Owned(field_name), v);\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Aggs<'a> {\n\n type Item = RowData<'a>;\n\n\n\n fn next(&mut self) -> Option<RowData<'a>> {\n\n if self.current_row.is_none() {\n\n // New row\n\n self.current_row = Some(BTreeMap::new())\n\n }\n", "file_path": "src/elastic/src/client/responses/search.rs", "rank": 54, "score": 76362.21118275482 }, { "content": "fn doc() -> Doc {\n\n Doc { id: ID.to_owned() }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"index and delete a document\";\n\n\n\n type Response = BulkResponse;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client\n\n .index(Doc::static_index())\n\n .delete()\n\n .send()\n\n .map(|_| ());\n\n\n\n Box::new(delete_res)\n\n }\n\n\n", "file_path": "tests/integration/src/tests/bulk/delete.rs", "rank": 55, "score": 76362.21118275482 }, { "content": "#[test]\n\nfn success_parse_3level_aggs() {\n\n let f = include_bytes!(\"search_aggregation_3level.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.aggs().count(), 201);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 56, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_hits_no_score() {\n\n let f = include_bytes!(\"search_null_score.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.hits().count(), 1);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 57, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_with_errors_errors_only() {\n\n let f = include_bytes!(\"bulk_error.json\");\n\n let deserialized = parse::<BulkErrorsResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.is_err());\n\n\n\n assert_eq!(1, deserialized.iter().count());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 58, "score": 75844.73496589893 }, { "content": "// Implement DocumentType for the type being derived with the mapping\n\nfn get_doc_ty_impl_block(\n\n crate_root: &proc_macro2::TokenStream,\n\n item: &DeriveInput,\n\n fields: &[(Ident, &Field)],\n\n mapping: &Ident,\n\n) -> proc_macro2::TokenStream {\n\n struct MetadataBlock {\n\n instance_methods: proc_macro2::TokenStream,\n\n static_impls: proc_macro2::TokenStream,\n\n }\n\n\n\n // Implement DocumentMetadata for the type being derived with the mapping\n\n fn get_doc_ty_methods(\n\n crate_root: &proc_macro2::TokenStream,\n\n item: &DeriveInput,\n\n fields: &[(Ident, &Field)],\n\n ) -> MetadataBlock {\n\n struct ElasticMetadataMethods {\n\n index: proc_macro2::TokenStream,\n\n index_is_static: bool,\n", "file_path": "src/elastic_derive/src/elastic_type/mod.rs", "rank": 59, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_response() {\n\n let f = include_bytes!(\"index_success.json\");\n\n let deserialized = parse::<IndexResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.created());\n\n assert_eq!(\"testindex\", deserialized.index());\n\n assert_eq!(\"testtype\", deserialized.ty());\n\n assert_eq!(\"1\", deserialized.id());\n\n assert_eq!(Some(1), deserialized.version());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/document_index/mod.rs", "rank": 60, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_index_ops() {\n\n let f = include_bytes!(\"bulk_index.json\");\n\n let deserialized = parse::<BulkResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.is_ok());\n\n\n\n assert_eq!(0, deserialized.iter().filter(Result::is_err).count());\n\n assert_eq!(5, deserialized.iter().filter(Result::is_ok).count());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 61, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn deserialise_nodes_empty() {\n\n let f = include_bytes!(\"nodes_info_empty.json\");\n\n let deserialized = parse::<NodesInfoResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(0, deserialized.iter_addrs().count());\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/nodes_info/mod.rs", "rank": 62, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_simple_aggs() {\n\n let f = include_bytes!(\"search_aggregation_simple.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n let _agg = deserialized\n\n .aggs()\n\n .filter_map(|agg| agg.get(\"myagg\").and_then(|val| val.as_f64()))\n\n .nth(0);\n\n\n\n // TODO: Enable once we support simple values\n\n // assert_eq!(Some(10f64), agg);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 63, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn error_parse_index_not_found() {\n\n let f = include_bytes!(\"../error/error_index_not_found.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::NOT_FOUND, f as &[_])\n\n .unwrap_err();\n\n\n\n let valid = match deserialized {\n\n ResponseError::Api(ApiError::IndexNotFound { ref index }) if index == \"carrots\" => true,\n\n _ => false,\n\n };\n\n\n\n assert!(valid);\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 64, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_ping_response() {\n\n let f = include_bytes!(\"ping_success.json\");\n\n let deserialized = parse::<PingResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(\"Scorcher\", deserialized.name());\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/ping/mod.rs", "rank": 65, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_hits_simple() {\n\n let f = include_bytes!(\"search_hits_only.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.hits().count(), 5);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 66, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_hits_simple_of_t() {\n\n #[allow(dead_code)]\n\n #[derive(Deserialize)]\n\n struct Event {\n\n #[serde(rename = \"@version\")]\n\n version: String,\n\n #[serde(rename = \"@timestamp\")]\n\n timestamp: String,\n\n port: u16,\n\n #[serde(rename = \"type\")]\n\n ty: String,\n\n tags: Vec<String>,\n\n #[serde(rename = \"destinationAddress\")]\n\n destination_address: String,\n\n #[serde(rename = \"countryCode\", default)]\n\n country_code: String,\n\n #[serde(rename = \"countryName\", default)]\n\n country_name: String,\n\n #[serde(rename = \"cityName\", default)]\n\n city_name: String,\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 67, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_command_response() {\n\n let f = include_bytes!(\"acknowledged.json\");\n\n let deserialized = parse::<CommandResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.acknowledged());\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/command/mod.rs", "rank": 68, "score": 75844.73496589893 }, { "content": "#[test]\n\nfn success_parse_multi_ops() {\n\n let f = include_bytes!(\"bulk_multiple_ops.json\");\n\n let deserialized = parse::<BulkResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.is_ok());\n\n\n\n let mut index_count = 0;\n\n let mut create_count = 0;\n\n let mut update_count = 0;\n\n let mut delete_count = 0;\n\n\n\n for item in deserialized.into_iter().filter_map(Result::ok) {\n\n match item.action() {\n\n bulk::Action::Index => index_count += 1,\n\n bulk::Action::Create => create_count += 1,\n\n bulk::Action::Update => update_count += 1,\n\n bulk::Action::Delete => delete_count += 1,\n\n }\n\n }\n\n\n\n assert_eq!(\n\n (1, 1, 1, 1),\n\n (index_count, create_count, update_count, delete_count)\n\n );\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 69, "score": 75844.73496589893 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"Not edited title\".to_owned(),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"update with script\";\n\n\n\n type Response = (UpdateResponse, GetResponse<Doc>);\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(Doc::static_index()).delete().send();\n\n\n\n let index_res = client\n\n .document()\n\n .index(doc())\n\n .params_fluent(|p| p.url_param(\"refresh\", true))\n", "file_path": "tests/integration/src/tests/document/update_with_script.rs", "rank": 70, "score": 75428.79916827715 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"A document title\".to_owned(),\n\n timestamp: Date::build(2017, 3, 24, 13, 44, 0, 0),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"raw upsert then get\";\n\n\n\n type Response = BulkResponse;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(INDEX).delete().send().map(|_| ());\n\n\n\n Box::new(delete_res)\n\n }\n\n\n", "file_path": "tests/integration/src/tests/bulk/raw_upsert.rs", "rank": 71, "score": 75428.79916827715 }, { "content": "fn doc() -> Doc {\n\n Doc { id: \"1\".to_owned() }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"empty query\";\n\n\n\n type Response = SearchResponse<Doc>;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(Doc::static_index()).delete().send();\n\n\n\n let index_reqs = future::join_all((0..10).map(move |_| {\n\n client\n\n .document()\n\n .index(doc())\n\n .params_fluent(|p| p.url_param(\"refresh\", true))\n\n .send()\n\n }));\n", "file_path": "tests/integration/src/tests/search/empty_query.rs", "rank": 72, "score": 75428.79916827715 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"Not edited title\".to_owned(),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"update and return source\";\n\n\n\n type Response = UpdateResponse;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client\n\n .index(Doc::static_index())\n\n .delete()\n\n .send()\n\n .map(|_| ());\n\n\n", "file_path": "tests/integration/src/tests/document/update_with_source.rs", "rank": 73, "score": 75428.79916827715 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"Not edited title\".to_owned(),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"update with new document\";\n\n\n\n type Response = (UpdateResponse, GetResponse<Doc>);\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client\n\n .index(Doc::static_index())\n\n .delete()\n\n .send()\n\n .map(|_| ());\n\n\n", "file_path": "tests/integration/src/tests/document/update_with_doc.rs", "rank": 74, "score": 75428.79916827715 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"A document title\".to_owned(),\n\n timestamp: Date::build(2017, 3, 24, 13, 44, 0, 0),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"create a doc along with its index\";\n\n\n\n type Response = BulkResponse;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(INDEX).delete().send().map(|_| ());\n\n\n\n Box::new(delete_res)\n\n }\n\n\n", "file_path": "tests/integration/src/tests/bulk/index_create.rs", "rank": 75, "score": 75428.79916827715 }, { "content": "#[test]\n\nfn success_parse_hits_bank_sample() {\n\n let f = include_bytes!(\"search_bank_sample.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.hits().count(), 10);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 76, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_simple_nested_aggs() {\n\n let f = include_bytes!(\"search_aggregation_simple_nested.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized.aggs().count(), 124);\n\n\n\n let doc_count = deserialized\n\n .aggs_raw()\n\n .and_then(|aggs| aggs[\"timechart\"][\"buckets\"][0][\"doc_count\"].as_u64());\n\n\n\n assert_eq!(Some(101), doc_count);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 77, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_response_exists() {\n\n let deserialized = parse::<IndicesExistsResponse>()\n\n .from_slice(StatusCode::OK, b\"\")\n\n .unwrap();\n\n\n\n assert!(deserialized.exists());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/index_exists/mod.rs", "rank": 78, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn error_parse_index_not_found() {\n\n let f = include_bytes!(\"../error/error_index_not_found.json\");\n\n let deserialized = parse::<GetResponse<Value>>()\n\n .from_slice(StatusCode::NOT_FOUND, f as &[_])\n\n .unwrap_err();\n\n\n\n let valid = match deserialized {\n\n ResponseError::Api(ApiError::IndexNotFound { ref index }) if index == \"carrots\" => true,\n\n _ => false,\n\n };\n\n\n\n assert!(valid);\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/document_get/mod.rs", "rank": 79, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_hits_simple_as_value() {\n\n let f = include_bytes!(\"search_hits_only.json\");\n\n let deserialized = parse::<Value>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(deserialized[\"_shards\"][\"total\"].as_u64().unwrap(), 5);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 80, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_3level_multichild_aggs() {\n\n let f = include_bytes!(\"search_aggregation_3level_multichild.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n let mut first = true;\n\n let mut count = 0;\n\n\n\n for i in deserialized.aggs().take(500_000) {\n\n count += 1;\n\n if first {\n\n assert_eq!(&json!(12), i[\"max_ack_pkts_sent\"]);\n\n assert_eq!(&json!(7), i[\"avg_ack_pkts_sent\"]);\n\n assert_eq!(&json!(2), i[\"min_ack_pkts_sent\"]);\n\n\n\n first = false;\n\n }\n\n }\n\n assert_eq!(count, 201);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 81, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_multi_ops_errors_only() {\n\n let f = include_bytes!(\"bulk_multiple_ops.json\");\n\n let deserialized = parse::<BulkErrorsResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.is_ok());\n\n assert_eq!(0, deserialized.iter().count());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 82, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_index_ops_errors_only() {\n\n let f = include_bytes!(\"bulk_index.json\");\n\n let deserialized = parse::<BulkErrorsResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert!(deserialized.is_ok());\n\n assert_eq!(0, deserialized.iter().count());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 83, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_found_response() {\n\n let f = include_bytes!(\"delete_found.json\");\n\n let deserialized = parse::<DeleteResponse>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(\"testindex\", deserialized.index());\n\n assert_eq!(\"testtype\", deserialized.ty());\n\n assert_eq!(\"1\", deserialized.id());\n\n assert_eq!(Some(2), deserialized.version());\n\n\n\n assert!(deserialized.deleted());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/document_delete/mod.rs", "rank": 84, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_response_not_exists() {\n\n let deserialized = parse::<IndicesExistsResponse>()\n\n .from_slice(StatusCode::NOT_FOUND, b\"\")\n\n .unwrap();\n\n\n\n assert!(!deserialized.exists());\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/index_exists/mod.rs", "rank": 85, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_not_found_response() {\n\n let f = include_bytes!(\"delete_not_found.json\");\n\n let deserialized = parse::<DeleteResponse>()\n\n .from_slice(StatusCode::NOT_FOUND, f as &[_])\n\n .unwrap();\n\n\n\n assert!(!deserialized.deleted());\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/document_delete/mod.rs", "rank": 86, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn error_parse_document_missing() {\n\n let f = include_bytes!(\"../error/error_document_missing.json\");\n\n let deserialized = parse::<UpdateResponse>()\n\n .from_slice(StatusCode::NOT_FOUND, f as &[_])\n\n .unwrap_err();\n\n\n\n let valid = match deserialized {\n\n ResponseError::Api(ApiError::DocumentMissing { ref index }) if index == \"carrots\" => true,\n\n _ => false,\n\n };\n\n\n\n assert!(valid);\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/document_update/mod.rs", "rank": 87, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn success_parse_3level_multistats_aggs() {\n\n let f = include_bytes!(\"search_aggregation_3level_multistats.json\");\n\n let deserialized = parse::<SearchResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n let mut first = true;\n\n let mut count = 0;\n\n for i in deserialized.aggs().take(500_000) {\n\n count += 1;\n\n if first {\n\n assert_eq!(&json!(2), i[\"extstats_ack_pkts_sent_min\"]);\n\n assert_eq!(&json!(7), i[\"stats_ack_pkts_sent_avg\"]);\n\n assert_eq!(&json!(12), i[\"extstats_ack_pkts_sent_max\"]);\n\n assert_eq!(\n\n &json!(17),\n\n i[\"extstats_ack_pkts_sent_std_deviation_bounds_upper\"]\n\n );\n\n\n\n first = false;\n\n }\n\n }\n\n assert_eq!(count, 61);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/search/mod.rs", "rank": 88, "score": 75045.33371320958 }, { "content": "#[test]\n\nfn error_parse_action_request_validation() {\n\n let f = include_bytes!(\"../error/error_action_request_validation.json\");\n\n let deserialized = parse::<BulkResponse>()\n\n .from_slice(StatusCode::BAD_REQUEST, f as &[_])\n\n .unwrap_err();\n\n\n\n let valid = match deserialized {\n\n ResponseError::Api(ApiError::ActionRequestValidation { ref reason })\n\n if reason == \"Validation Failed: 1: index is missing;2: type is missing;\" =>\n\n {\n\n true\n\n }\n\n _ => false,\n\n };\n\n\n\n assert!(valid);\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 89, "score": 75045.33371320958 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"Not edited title\".to_owned(),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"update with inline script\";\n\n\n\n type Response = (UpdateResponse, GetResponse<Doc>);\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(Doc::static_index()).delete().send();\n\n\n\n let index_res = client\n\n .document()\n\n .index(doc())\n\n .params_fluent(|p| p.url_param(\"refresh\", true))\n", "file_path": "tests/integration/src/tests/document/update_with_inline_script.rs", "rank": 90, "score": 74531.88490877718 }, { "content": "pub fn expand_derive(\n\n crate_root: proc_macro2::TokenStream,\n\n input: &DeriveInput,\n\n) -> Result<Vec<proc_macro2::TokenStream>, DeriveElasticTypeError> {\n\n // Annotatable item for a struct with struct fields\n\n let fields = match &input.data {\n\n Data::Struct(DataStruct {\n\n fields: Fields::Named(fields),\n\n ..\n\n }) => Ok(&fields.named),\n\n _ => Err(DeriveElasticTypeError::InvalidInput),\n\n }?;\n\n\n\n // Get the serializable fields\n\n let fields: Vec<(Ident, &Field)> = fields\n\n .iter()\n\n .map(|f| get_ser_field(f))\n\n .filter(|f| f.is_some())\n\n .map(|f| f.unwrap())\n\n .collect();\n", "file_path": "src/elastic_derive/src/elastic_type/mod.rs", "rank": 91, "score": 74531.88490877718 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: Some(\"A document title\".to_owned()),\n\n timestamp: Some(Date::build(2017, 3, 24, 13, 44, 0, 0)),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"raw upsert then update, then get\";\n\n\n\n type Response = BulkResponse;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(INDEX).delete().send().map(|_| ());\n\n\n\n Box::new(delete_res)\n\n }\n\n\n", "file_path": "tests/integration/src/tests/bulk/raw_upsert_update.rs", "rank": 92, "score": 74531.88490877718 }, { "content": "pub fn expand_derive(\n\n crate_root: proc_macro2::TokenStream,\n\n input: &DeriveInput,\n\n) -> Result<Vec<proc_macro2::TokenStream>, DeriveDateFormatError> {\n\n // Annotatable item for a unit struct\n\n match input.data {\n\n Data::Struct(ref data) => match data.fields {\n\n Fields::Unit => Ok(()),\n\n _ => Err(DeriveDateFormatError::InvalidInput),\n\n },\n\n _ => Err(DeriveDateFormatError::InvalidInput),\n\n }?;\n\n\n\n let format = get_format_from_attr(input).ok_or(DeriveDateFormatError::MissingFormat)?;\n\n\n\n let name = get_name_from_attr(input).unwrap_or_else(|| format.clone());\n\n\n\n let tokens: Vec<proc_macro2::TokenStream> = parse::to_tokens(&format)?\n\n .into_iter()\n\n .map(|t| t.into_tokens(&crate_root))\n\n .collect();\n\n\n\n let derived = impl_date_format(crate_root, input, &name, &tokens);\n\n\n\n Ok(vec![derived])\n\n}\n\n\n", "file_path": "src/elastic_derive/src/date_format/mod.rs", "rank": 93, "score": 74531.88490877718 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"A document title\".to_owned(),\n\n timestamp: Date::build(2017, 3, 24, 13, 44, 0, 0),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"simple index then get\";\n\n\n\n type Response = GetResponse<Doc>;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client\n\n .index(Doc::static_index())\n\n .delete()\n\n .send()\n\n .map(|_| ());\n", "file_path": "tests/integration/src/tests/document/simple_index_get.rs", "rank": 94, "score": 74531.88490877718 }, { "content": "fn doc() -> Doc {\n\n Doc {\n\n id: ID.to_owned(),\n\n title: \"A document title\".to_owned(),\n\n timestamp: Date::build(2017, 3, 24, 13, 44, 0, 0),\n\n }\n\n}\n\n\n\ntest! {\n\n const description: &'static str = \"raw create then get\";\n\n\n\n type Response = BulkResponse;\n\n\n\n // Ensure the index doesn't exist\n\n fn prepare(&self, client: AsyncClient) -> Box<dyn Future<Item = (), Error = Error>> {\n\n let delete_res = client.index(INDEX).delete().send().map(|_| ());\n\n\n\n Box::new(delete_res)\n\n }\n\n\n", "file_path": "tests/integration/src/tests/bulk/raw_index_create.rs", "rank": 95, "score": 74531.88490877718 }, { "content": "#[test]\n\nfn success_parse_found_doc_response() {\n\n let f = include_bytes!(\"get_found.json\");\n\n let deserialized = parse::<GetResponse<Value>>()\n\n .from_slice(StatusCode::OK, f as &[_])\n\n .unwrap();\n\n\n\n assert_eq!(\"testindex\", deserialized.index());\n\n assert_eq!(\"testtype\", deserialized.ty());\n\n assert_eq!(\"1\", deserialized.id());\n\n assert_eq!(Some(8), deserialized.version());\n\n\n\n assert!(deserialized.found());\n\n assert!(deserialized.into_document().is_some());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/document_get/mod.rs", "rank": 96, "score": 74274.92305363603 }, { "content": "#[test]\n\nfn error_parse_action_request_validation_errors_only() {\n\n let f = include_bytes!(\"../error/error_action_request_validation.json\");\n\n let deserialized = parse::<BulkErrorsResponse>()\n\n .from_slice(StatusCode::BAD_REQUEST, f as &[_])\n\n .unwrap_err();\n\n\n\n let valid = match deserialized {\n\n ResponseError::Api(ApiError::ActionRequestValidation { ref reason })\n\n if reason == \"Validation Failed: 1: index is missing;2: type is missing;\" =>\n\n {\n\n true\n\n }\n\n _ => false,\n\n };\n\n\n\n assert!(valid);\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/bulk/mod.rs", "rank": 97, "score": 74274.92305363603 }, { "content": "#[test]\n\nfn success_parse_not_found_doc_response() {\n\n let f = include_bytes!(\"get_not_found.json\");\n\n let deserialized = parse::<GetResponse<Value>>()\n\n .from_slice(StatusCode::NOT_FOUND, f as &[_])\n\n .unwrap();\n\n\n\n assert!(!deserialized.found());\n\n assert!(deserialized.into_document().is_none());\n\n}\n\n\n", "file_path": "src/elastic/src/client/responses/tests/document_get/mod.rs", "rank": 98, "score": 74274.92305363603 }, { "content": "#[test]\n\nfn error_parse_index_already_exists() {\n\n let f = include_bytes!(\"../error/error_index_already_exists.json\");\n\n let deserialized = parse::<IndexResponse>()\n\n .from_slice(StatusCode::BAD_REQUEST, f as &[_])\n\n .unwrap_err();\n\n\n\n let valid = match deserialized {\n\n ResponseError::Api(ApiError::IndexAlreadyExists { ref index }) if index == \"carrots\" => {\n\n true\n\n }\n\n _ => false,\n\n };\n\n\n\n assert!(valid);\n\n}\n", "file_path": "src/elastic/src/client/responses/tests/document_index/mod.rs", "rank": 99, "score": 74274.92305363603 } ]
Rust
examples/src/bin/orientable_subclass.rs
elmarco/gtk4-rs
a1f7bcc611584c542308bd062ceda0103f96a69a
use std::cell::RefCell; use std::env; use gtk::glib; use gtk::prelude::*; use gtk::subclass::prelude::ObjectSubclass; mod imp { use super::*; use gtk::{glib::translate::ToGlib, subclass::prelude::*}; #[derive(Debug)] pub struct CustomOrientable { first_label: RefCell<Option<gtk::Widget>>, second_label: RefCell<Option<gtk::Widget>>, orientation: RefCell<gtk::Orientation>, } #[glib::object_subclass] impl ObjectSubclass for CustomOrientable { const NAME: &'static str = "ExCustomOrientable"; type Type = super::CustomOrientable; type ParentType = gtk::Widget; type Interfaces = (gtk::Orientable,); fn class_init(klass: &mut Self::Class) { klass.set_layout_manager_type::<gtk::BoxLayout>(); } fn new() -> Self { Self { first_label: RefCell::new(None), second_label: RefCell::new(None), orientation: RefCell::new(gtk::Orientation::Horizontal), } } } impl ObjectImpl for CustomOrientable { fn constructed(&self, obj: &Self::Type) { self.parent_constructed(obj); let first_label = gtk::Label::new(Some("Hello")); let second_label = gtk::Label::new(Some("World!")); let layout_manager = obj .get_layout_manager() .unwrap() .downcast::<gtk::BoxLayout>() .unwrap(); layout_manager.set_spacing(6); first_label.set_parent(obj); second_label.set_parent(obj); self.first_label .replace(Some(first_label.upcast::<gtk::Widget>())); self.second_label .replace(Some(second_label.upcast::<gtk::Widget>())); } fn dispose(&self, _obj: &Self::Type) { if let Some(child) = self.first_label.borrow_mut().take() { child.unparent(); } if let Some(child) = self.second_label.borrow_mut().take() { child.unparent(); } } fn properties() -> &'static [glib::ParamSpec] { use once_cell::sync::Lazy; static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| { vec![glib::ParamSpec::enum_( "orientation", "orientation", "Orientation", gtk::Orientation::static_type(), gtk::Orientation::Horizontal.to_glib(), glib::ParamFlags::READWRITE | glib::ParamFlags::CONSTRUCT, )] }); PROPERTIES.as_ref() } fn set_property( &self, obj: &Self::Type, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec, ) { match pspec.get_name() { "orientation" => { let orientation = value.get().unwrap().unwrap(); self.orientation.replace(orientation); let layout_manager = obj .get_layout_manager() .unwrap() .downcast::<gtk::BoxLayout>() .unwrap(); layout_manager.set_orientation(orientation); } _ => unimplemented!(), } } fn get_property( &self, _obj: &Self::Type, _id: usize, pspec: &glib::ParamSpec, ) -> glib::Value { match pspec.get_name() { "orientation" => self.orientation.borrow().to_value(), _ => unimplemented!(), } } } impl WidgetImpl for CustomOrientable {} impl OrientableImpl for CustomOrientable {} } glib::wrapper! { pub struct CustomOrientable(ObjectSubclass<imp::CustomOrientable>) @extends gtk::Widget, @implements gtk::Orientable; } impl CustomOrientable { pub fn new() -> Self { glib::Object::new(&[]).expect("Failed to create CustomOrientable") } } fn main() { let application = gtk::Application::new( Some("com.github.gtk-rs.examples.orientable_subclass"), Default::default(), ) .expect("Initialization failed..."); application.connect_activate(|app| { let window = gtk::ApplicationWindow::new(app); let bx = gtk::Box::new(gtk::Orientation::Vertical, 6); let orientable = CustomOrientable::new(); let button = gtk::Button::with_label("Switch orientation"); button.connect_clicked(glib::clone!(@weak orientable => move |_| { match orientable.get_orientation() { gtk::Orientation::Horizontal => orientable.set_orientation(gtk::Orientation::Vertical), gtk::Orientation::Vertical => orientable.set_orientation(gtk::Orientation::Horizontal), _ => unreachable!(), }; })); orientable.set_halign(gtk::Align::Center); bx.append(&orientable); bx.append(&button); bx.set_margin_top(18); bx.set_margin_bottom(18); bx.set_margin_start(18); bx.set_margin_end(18); window.set_child(Some(&bx)); window.show(); }); application.run(&env::args().collect::<Vec<_>>()); }
use std::cell::RefCell; use std::env; use gtk::glib; use gtk::prelude::*; use gtk::subclass::prelude::ObjectSubclass; mod imp { use super::*; use gtk::{glib::translate::ToGlib, subclass::prelude::*}; #[derive(Debug)] pub struct CustomOrientable { first_label: RefCell<Option<gtk::Widget>>, second_label: RefCell<Option<gtk::Widget>>, orientation: RefCell<gtk::Orientation>, } #[glib::object_subclass] impl ObjectSubclass for CustomOrientable { const NAME: &'static str = "ExCustomOrientable"; type Type = super::CustomOrientable; type ParentType = gtk::Widget; type Interfaces = (gtk::Orientable,); fn class_init(klass: &mut Self::Class) { klass.set_layout_manager_type::<gtk::BoxLayout>(); } fn new() -> Self { Self { first_label: RefCell::new(None), second_label: RefCell::new(None), orientation: RefCell::new(gtk::Orientation::Horizontal), } } } impl ObjectImpl for CustomOrientable { fn constructed(&self, obj: &Self::Type) { self.parent_constructed(obj); let first_label = gtk::Label::new(Some("Hello")); let second_label = gtk::Label::new(Some("World!")); let layout_manager = obj .get_layout_manager() .unwrap() .downcast::<gtk::BoxLayout>() .unwrap(); layout_manager.set_spacing(6); first_label.set_parent(obj); second_label.set_parent(obj); self.first_label .replace(Some(first_label.upcast::<gtk::Widget>())); self.second_label .replace(Some(second_label.upcast::<gtk::Widget>())); } fn dispose(&self, _obj: &Self::Type) { if let Some(child) = self.first_label.borrow_mut().take() { child.unparent(); } if let Some(child) = self.second_label.borrow_mut().take() { child.unparent(); } } fn properties() -> &'static [glib::ParamSpec] { use once_cell::sync::Lazy; static PROPERTIES: Lazy<Vec<glib::ParamSpec>> = Lazy::new(|| { vec![glib::ParamSpec::enum_( "orientation", "orientation", "Orientation", gtk::Orientation::static_type(), gtk::Orientation::Horizontal.to_glib(), glib::ParamFlags::READWRITE | glib::ParamFlags::CONSTRUCT, )] }); PROPERTIES.as_ref() } fn set_property( &self, obj: &Self::Type, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec, ) { match pspec.get_name() { "orientation" => { let orientation = value.get().unwrap().unwrap(); self.orientation.replace(orientation); let layout_manager = obj .get_layout_manager() .unwrap() .downcast::<gtk::BoxLayout>() .unwrap(); layout_manager.set_orientation(orientation); } _ => unimplemented!(), } } fn get_property( &self, _obj: &Self::Type, _id: usize, pspec: &glib::ParamSpec, ) -> glib::Value { match pspec.get_name() { "orientation" => self.orientation.borrow().to_value(), _ => unimplemented!(), } } } impl WidgetImpl for CustomOrientable {} impl OrientableImpl for CustomOrientable {} } glib::wrapper! { pub struct CustomOrientable(ObjectSubclass<imp::CustomOrientable>) @extends gtk::Widget, @implements gtk::Orientable; } impl CustomOrientable { pub fn new() -> Self { glib::Object::new(&[]).expect("Failed to create CustomOrientable") } } fn main() { let application = gtk::Application::new( Some("com.github.gtk-rs.examples.orientable_subclass"), Default::default(), ) .expect("Initialization failed..."); application.connect_activate(|app| { let window = gtk::ApplicationWindow::new(app); let bx = gtk::Box::new(gtk::Orientation::Vertical, 6); let orientable = CustomOrientable::new(); let button = gtk::Button::with_label("Switch orientation");
bx.append(&orientable); bx.append(&button); bx.set_margin_top(18); bx.set_margin_bottom(18); bx.set_margin_start(18); bx.set_margin_end(18); window.set_child(Some(&bx)); window.show(); }); application.run(&env::args().collect::<Vec<_>>()); }
button.connect_clicked(glib::clone!(@weak orientable => move |_| { match orientable.get_orientation() { gtk::Orientation::Horizontal => orientable.set_orientation(gtk::Orientation::Vertical), gtk::Orientation::Vertical => orientable.set_orientation(gtk::Orientation::Horizontal), _ => unreachable!(), }; })); orientable.set_halign(gtk::Align::Center);
function_block-random_span
[ { "content": "pub trait ApplicationWindowImpl: WindowImpl + 'static {}\n\n\n\nunsafe impl<T: ApplicationWindowImpl> IsSubclassable<T> for ApplicationWindow {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Window as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Window as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/application_window.rs", "rank": 0, "score": 349692.06715673266 }, { "content": "#[doc(alias = \"gtk_show_about_dialog\")]\n\npub fn show_about_dialog<P: IsA<Window>>(parent: Option<&P>, properties: &[(&str, &dyn ToValue)]) {\n\n assert_initialized_main_thread!();\n\n\n\n let about_dialog =\n\n glib::Object::new::<AboutDialog>(properties).expect(\"Failed to crate an about dialog\");\n\n about_dialog.set_transient_for(parent);\n\n about_dialog.show();\n\n}\n", "file_path": "gtk4/src/functions.rs", "rank": 1, "score": 332174.74790913553 }, { "content": "pub trait ApplicationWindowExt: 'static {\n\n #[doc(alias = \"gtk_application_window_get_help_overlay\")]\n\n fn get_help_overlay(&self) -> Option<ShortcutsWindow>;\n\n\n\n #[doc(alias = \"gtk_application_window_get_id\")]\n\n fn get_id(&self) -> u32;\n\n\n\n #[doc(alias = \"gtk_application_window_get_show_menubar\")]\n\n fn get_show_menubar(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_application_window_set_help_overlay\")]\n\n fn set_help_overlay(&self, help_overlay: Option<&ShortcutsWindow>);\n\n\n\n #[doc(alias = \"gtk_application_window_set_show_menubar\")]\n\n fn set_show_menubar(&self, show_menubar: bool);\n\n\n\n fn connect_property_show_menubar_notify<F: Fn(&Self) + 'static>(&self, f: F)\n\n -> SignalHandlerId;\n\n}\n\n\n", "file_path": "gtk4/src/auto/application_window.rs", "rank": 2, "score": 291537.5226463793 }, { "content": "pub trait OrientableImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: OrientableImpl> IsImplementable<T> for Orientable {\n\n fn interface_init(_iface: &mut glib::Interface<Self>) {}\n\n\n\n fn instance_init(_instance: &mut glib::subclass::InitializingObject<T>) {}\n\n}\n", "file_path": "gtk4/src/subclass/orientable.rs", "rank": 3, "score": 269392.6229961822 }, { "content": "pub trait EventKind: StaticType + FromGlibPtrFull<*mut ffi::GdkEvent> + 'static {\n\n fn event_types() -> &'static [EventType];\n\n}\n\n\n\nmacro_rules! define_event {\n\n ($rust_type:ident, $ffi_type:path, $ffi_type_path:path, $event_event_types:expr) => {\n\n // Can't use get_type here as this is not a boxed type but another fundamental type\n\n glib::wrapper! {\n\n pub struct $rust_type(Shared<$ffi_type>);\n\n\n\n match fn {\n\n ref => |ptr| ffi::gdk_event_ref(ptr as *mut ffi::GdkEvent) as *mut $ffi_type,\n\n unref => |ptr| ffi::gdk_event_unref(ptr as *mut ffi::GdkEvent),\n\n }\n\n }\n\n\n\n impl StaticType for $rust_type {\n\n fn static_type() -> Type {\n\n unsafe { from_glib($ffi_type_path()) }\n\n }\n", "file_path": "gdk4/src/event.rs", "rank": 4, "score": 248771.25376822296 }, { "content": "#[doc(alias = \"gdk_intern_mime_type\")]\n\npub fn intern_mime_type(string: &str) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib_none(ffi::gdk_intern_mime_type(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gdk4/src/auto/functions.rs", "rank": 5, "score": 244906.2113214663 }, { "content": "#[doc(alias = \"gtk_accelerator_get_default_mod_mask\")]\n\npub fn accelerator_get_default_mod_mask() -> gdk::ModifierType {\n\n assert_initialized_main_thread!();\n\n unsafe { from_glib(ffi::gtk_accelerator_get_default_mod_mask()) }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 6, "score": 243360.5526942778 }, { "content": "pub trait WindowImpl: WindowImplExt + WidgetImpl {\n\n fn activate_focus(&self, window: &Self::Type) {\n\n self.parent_activate_focus(window)\n\n }\n\n\n\n fn activate_default(&self, window: &Self::Type) {\n\n self.parent_activate_default(window)\n\n }\n\n\n\n fn keys_changed(&self, window: &Self::Type) {\n\n self.parent_keys_changed(window)\n\n }\n\n\n\n fn enable_debugging(&self, window: &Self::Type, toggle: bool) -> bool {\n\n self.parent_enable_debugging(window, toggle)\n\n }\n\n\n\n fn close_request(&self, window: &Self::Type) -> glib::signal::Inhibit {\n\n self.parent_close_request(window)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/window.rs", "rank": 7, "score": 241554.77445989006 }, { "content": "pub trait ButtonImpl: ButtonImplExt + WidgetImpl {\n\n fn activate(&self, button: &Self::Type) {\n\n self.parent_activate(button)\n\n }\n\n\n\n fn clicked(&self, button: &Self::Type) {\n\n self.parent_clicked(button)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/button.rs", "rank": 8, "score": 241471.12367516165 }, { "content": "#[doc(alias = \"gtk_accelerator_parse\")]\n\npub fn accelerator_parse(accelerator: &str) -> Option<(u32, gdk::ModifierType)> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut accelerator_key = mem::MaybeUninit::uninit();\n\n let mut accelerator_mods = mem::MaybeUninit::uninit();\n\n let ret = from_glib(ffi::gtk_accelerator_parse(\n\n accelerator.to_glib_none().0,\n\n accelerator_key.as_mut_ptr(),\n\n accelerator_mods.as_mut_ptr(),\n\n ));\n\n let accelerator_key = accelerator_key.assume_init();\n\n let accelerator_mods = accelerator_mods.assume_init();\n\n if ret {\n\n Some((accelerator_key, from_glib(accelerator_mods)))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n//#[doc(alias = \"gtk_accelerator_parse_with_keycode\")]\n\n//pub fn accelerator_parse_with_keycode(accelerator: &str, display: Option<&gdk::Display>, accelerator_codes: Vec<u32>) -> Option<(u32, gdk::ModifierType)> {\n\n// unsafe { TODO: call ffi:gtk_accelerator_parse_with_keycode() }\n\n//}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 9, "score": 240768.29173417998 }, { "content": "fn parse_field_attr_value_str(name_value: &MetaNameValue) -> Result<String, Error> {\n\n match &name_value.lit {\n\n Lit::Str(s) => Ok(s.value()),\n\n _ => Err(Error::new(\n\n name_value.lit.span(),\n\n \"invalid value type: Expected str literal\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "gtk4-macros/src/attribute_parser.rs", "rank": 10, "score": 236930.55633902512 }, { "content": "pub trait GtkApplicationImpl: ObjectImpl + GtkApplicationImplExt + ApplicationImpl {\n\n fn window_added(&self, application: &Self::Type, window: &Window) {\n\n self.parent_window_added(application, window)\n\n }\n\n\n\n fn window_removed(&self, application: &Self::Type, window: &Window) {\n\n self.parent_window_removed(application, window)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/application.rs", "rank": 11, "score": 236575.12122699118 }, { "content": "pub trait WindowGroupImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: WindowGroupImpl> IsSubclassable<T> for WindowGroup {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Object as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Object as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/window_group.rs", "rank": 12, "score": 236247.15517317766 }, { "content": "pub trait CheckButtonImpl: CheckButtonImplExt + WidgetImpl {\n\n fn toggled(&self, check_button: &Self::Type) {\n\n self.parent_toggled(check_button)\n\n }\n\n\n\n #[cfg(any(feature = \"v4_2\", feature = \"dox\"))]\n\n fn activate(&self, check_button: &Self::Type) {\n\n self.parent_activate(check_button)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/check_button.rs", "rank": 13, "score": 234201.4054754778 }, { "content": "pub trait ScaleButtonImpl: ScaleButtonImplExt + WidgetImpl {\n\n fn value_changed(&self, scale_button: &Self::Type, new_value: f64) {\n\n self.parent_value_changed(scale_button, new_value)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/scale_button.rs", "rank": 14, "score": 234201.4054754778 }, { "content": "#[doc(alias = \"gtk_show_uri\")]\n\npub fn show_uri<P: IsA<Window>>(parent: Option<&P>, uri: &str, timestamp: u32) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_show_uri(\n\n parent.map(|p| p.as_ref()).to_glib_none().0,\n\n uri.to_glib_none().0,\n\n timestamp,\n\n );\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 15, "score": 233835.80338313844 }, { "content": "pub trait OrientableExt: 'static {\n\n #[doc(alias = \"gtk_orientable_get_orientation\")]\n\n fn get_orientation(&self) -> Orientation;\n\n\n\n #[doc(alias = \"gtk_orientable_set_orientation\")]\n\n fn set_orientation(&self, orientation: Orientation);\n\n\n\n fn connect_property_orientation_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n\nimpl<O: IsA<Orientable>> OrientableExt for O {\n\n fn get_orientation(&self) -> Orientation {\n\n unsafe {\n\n from_glib(ffi::gtk_orientable_get_orientation(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn set_orientation(&self, orientation: Orientation) {\n", "file_path": "gtk4/src/auto/orientable.rs", "rank": 16, "score": 231434.50126442977 }, { "content": "pub trait ButtonExt: 'static {\n\n #[doc(alias = \"gtk_button_get_child\")]\n\n fn get_child(&self) -> Option<Widget>;\n\n\n\n #[doc(alias = \"gtk_button_get_has_frame\")]\n\n fn get_has_frame(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_button_get_icon_name\")]\n\n fn get_icon_name(&self) -> Option<glib::GString>;\n\n\n\n #[doc(alias = \"gtk_button_get_label\")]\n\n fn get_label(&self) -> Option<glib::GString>;\n\n\n\n #[doc(alias = \"gtk_button_get_use_underline\")]\n\n fn get_use_underline(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_button_set_child\")]\n\n fn set_child<P: IsA<Widget>>(&self, child: Option<&P>);\n\n\n\n #[doc(alias = \"gtk_button_set_has_frame\")]\n", "file_path": "gtk4/src/auto/button.rs", "rank": 17, "score": 231147.78094660857 }, { "content": "#[doc(alias = \"gtk_accelerator_name\")]\n\npub fn accelerator_name(\n\n accelerator_key: u32,\n\n accelerator_mods: gdk::ModifierType,\n\n) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_accelerator_name(\n\n accelerator_key,\n\n accelerator_mods.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 18, "score": 229193.3002042148 }, { "content": "pub trait GtkApplicationExt: 'static {\n\n #[doc(alias = \"gtk_application_add_window\")]\n\n fn add_window<P: IsA<Window>>(&self, window: &P);\n\n\n\n #[doc(alias = \"gtk_application_get_accels_for_action\")]\n\n fn get_accels_for_action(&self, detailed_action_name: &str) -> Vec<glib::GString>;\n\n\n\n #[doc(alias = \"gtk_application_get_actions_for_accel\")]\n\n fn get_actions_for_accel(&self, accel: &str) -> Vec<glib::GString>;\n\n\n\n #[doc(alias = \"gtk_application_get_active_window\")]\n\n fn get_active_window(&self) -> Option<Window>;\n\n\n\n #[doc(alias = \"gtk_application_get_menu_by_id\")]\n\n fn get_menu_by_id(&self, id: &str) -> Option<gio::Menu>;\n\n\n\n #[doc(alias = \"gtk_application_get_menubar\")]\n\n fn get_menubar(&self) -> Option<gio::MenuModel>;\n\n\n\n #[doc(alias = \"gtk_application_get_window_by_id\")]\n", "file_path": "gtk4/src/auto/application.rs", "rank": 19, "score": 227923.9067365201 }, { "content": "pub trait GtkWindowExt: 'static {\n\n #[doc(alias = \"gtk_window_close\")]\n\n fn close(&self);\n\n\n\n #[doc(alias = \"gtk_window_destroy\")]\n\n fn destroy(&self);\n\n\n\n #[doc(alias = \"gtk_window_fullscreen\")]\n\n fn fullscreen(&self);\n\n\n\n #[doc(alias = \"gtk_window_fullscreen_on_monitor\")]\n\n fn fullscreen_on_monitor(&self, monitor: &gdk::Monitor);\n\n\n\n #[doc(alias = \"gtk_window_get_application\")]\n\n fn get_application(&self) -> Option<Application>;\n\n\n\n #[doc(alias = \"gtk_window_get_child\")]\n\n fn get_child(&self) -> Option<Widget>;\n\n\n\n #[doc(alias = \"gtk_window_get_decorated\")]\n", "file_path": "gtk4/src/auto/window.rs", "rank": 20, "score": 227763.1131602207 }, { "content": "#[doc(alias = \"gdk_set_allowed_backends\")]\n\npub fn set_allowed_backends(backends: &str) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gdk_set_allowed_backends(backends.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "gdk4/src/auto/functions.rs", "rank": 21, "score": 227568.18336984492 }, { "content": "#[doc(alias = \"gtk_test_register_all_types\")]\n\npub fn test_register_all_types() {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n ffi::gtk_test_register_all_types();\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 22, "score": 225298.14660025536 }, { "content": "#[doc(alias = \"gtk_accelerator_name_with_keycode\")]\n\npub fn accelerator_name_with_keycode(\n\n display: Option<&gdk::Display>,\n\n accelerator_key: u32,\n\n keycode: u32,\n\n accelerator_mods: gdk::ModifierType,\n\n) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_accelerator_name_with_keycode(\n\n display.to_glib_none().0,\n\n accelerator_key,\n\n keycode,\n\n accelerator_mods.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 23, "score": 225297.1444529586 }, { "content": "pub trait WindowImplExt: ObjectSubclass {\n\n fn parent_activate_focus(&self, window: &Self::Type);\n\n fn parent_activate_default(&self, window: &Self::Type);\n\n fn parent_keys_changed(&self, window: &Self::Type);\n\n fn parent_enable_debugging(&self, window: &Self::Type, toggle: bool) -> bool;\n\n fn parent_close_request(&self, window: &Self::Type) -> glib::signal::Inhibit;\n\n}\n\n\n\nimpl<T: WindowImpl> WindowImplExt for T {\n\n fn parent_activate_focus(&self, window: &Self::Type) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkWindowClass;\n\n let f = (*parent_class)\n\n .activate_focus\n\n .expect(\"No parent class impl for \\\"activate_focus\\\"\");\n\n f(window.unsafe_cast_ref::<Window>().to_glib_none().0)\n\n }\n\n }\n\n\n", "file_path": "gtk4/src/subclass/window.rs", "rank": 24, "score": 224629.59861258115 }, { "content": "pub trait ButtonImplExt: ObjectSubclass {\n\n fn parent_activate(&self, button: &Self::Type);\n\n fn parent_clicked(&self, button: &Self::Type);\n\n}\n\n\n\nimpl<T: ButtonImpl> ButtonImplExt for T {\n\n fn parent_activate(&self, button: &Self::Type) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkButtonClass;\n\n if let Some(f) = (*parent_class).activate {\n\n f(button.unsafe_cast_ref::<Button>().to_glib_none().0)\n\n }\n\n }\n\n }\n\n\n\n fn parent_clicked(&self, button: &Self::Type) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkButtonClass;\n", "file_path": "gtk4/src/subclass/button.rs", "rank": 25, "score": 224552.11666691292 }, { "content": "pub trait WindowGroupExt: 'static {\n\n #[doc(alias = \"gtk_window_group_add_window\")]\n\n fn add_window<P: IsA<Window>>(&self, window: &P);\n\n\n\n #[doc(alias = \"gtk_window_group_list_windows\")]\n\n fn list_windows(&self) -> Vec<Window>;\n\n\n\n #[doc(alias = \"gtk_window_group_remove_window\")]\n\n fn remove_window<P: IsA<Window>>(&self, window: &P);\n\n}\n\n\n\nimpl<O: IsA<WindowGroup>> WindowGroupExt for O {\n\n fn add_window<P: IsA<Window>>(&self, window: &P) {\n\n unsafe {\n\n ffi::gtk_window_group_add_window(\n\n self.as_ref().to_glib_none().0,\n\n window.as_ref().to_glib_none().0,\n\n );\n\n }\n\n }\n", "file_path": "gtk4/src/auto/window_group.rs", "rank": 26, "score": 224436.02792522224 }, { "content": "pub trait CheckButtonExt: 'static {\n\n #[doc(alias = \"gtk_check_button_get_active\")]\n\n fn get_active(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_check_button_get_inconsistent\")]\n\n fn get_inconsistent(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_check_button_get_label\")]\n\n fn get_label(&self) -> Option<glib::GString>;\n\n\n\n #[doc(alias = \"gtk_check_button_get_use_underline\")]\n\n fn get_use_underline(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_check_button_set_active\")]\n\n fn set_active(&self, setting: bool);\n\n\n\n #[doc(alias = \"gtk_check_button_set_group\")]\n\n fn set_group<P: IsA<CheckButton>>(&self, group: Option<&P>);\n\n\n\n #[doc(alias = \"gtk_check_button_set_inconsistent\")]\n", "file_path": "gtk4/src/auto/check_button.rs", "rank": 27, "score": 224358.54597955404 }, { "content": "pub trait ScaleButtonExt: 'static {\n\n #[doc(alias = \"gtk_scale_button_get_adjustment\")]\n\n fn get_adjustment(&self) -> Adjustment;\n\n\n\n #[doc(alias = \"gtk_scale_button_get_minus_button\")]\n\n fn get_minus_button(&self) -> Button;\n\n\n\n #[doc(alias = \"gtk_scale_button_get_plus_button\")]\n\n fn get_plus_button(&self) -> Button;\n\n\n\n #[doc(alias = \"gtk_scale_button_get_popup\")]\n\n fn get_popup(&self) -> Widget;\n\n\n\n #[doc(alias = \"gtk_scale_button_get_value\")]\n\n fn get_value(&self) -> f64;\n\n\n\n #[doc(alias = \"gtk_scale_button_set_adjustment\")]\n\n fn set_adjustment<P: IsA<Adjustment>>(&self, adjustment: &P);\n\n\n\n #[doc(alias = \"gtk_scale_button_set_icons\")]\n", "file_path": "gtk4/src/auto/scale_button.rs", "rank": 28, "score": 224358.54597955404 }, { "content": "pub trait SpinButtonExtManual: 'static {\n\n fn connect_input<F>(&self, input_func: F) -> SignalHandlerId\n\n where\n\n F: Fn(&Self) -> Option<Result<f64, ()>> + 'static;\n\n}\n\n\n\nimpl<T: IsA<SpinButton>> SpinButtonExtManual for T {\n\n fn connect_input<F>(&self, f: F) -> SignalHandlerId\n\n where\n\n F: Fn(&Self) -> Option<Result<f64, ()>> + 'static,\n\n {\n\n unsafe {\n\n let f: Box_<F> = Box_::new(f);\n\n connect_raw(\n\n self.to_glib_none().0 as *mut _,\n\n b\"input\\0\".as_ptr() as *mut _,\n\n Some(transmute(input_trampoline::<Self, F> as usize)),\n\n Box_::into_raw(f),\n\n )\n\n }\n", "file_path": "gtk4/src/spin_button.rs", "rank": 29, "score": 224358.54597955404 }, { "content": "pub trait ToggleButtonExt: 'static {\n\n #[doc(alias = \"gtk_toggle_button_get_active\")]\n\n fn get_active(&self) -> bool;\n\n\n\n #[doc(alias = \"gtk_toggle_button_set_active\")]\n\n fn set_active(&self, is_active: bool);\n\n\n\n #[doc(alias = \"gtk_toggle_button_set_group\")]\n\n fn set_group<P: IsA<ToggleButton>>(&self, group: Option<&P>);\n\n\n\n #[doc(alias = \"gtk_toggle_button_toggled\")]\n\n fn toggled(&self);\n\n\n\n fn connect_toggled<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n\n\n fn connect_property_group_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;\n\n}\n\n\n", "file_path": "gtk4/src/auto/toggle_button.rs", "rank": 30, "score": 224358.54597955404 }, { "content": "pub trait GtkApplicationImplExt: ObjectSubclass {\n\n fn parent_window_added(&self, application: &Self::Type, window: &Window);\n\n fn parent_window_removed(&self, application: &Self::Type, window: &Window);\n\n}\n\n\n\nimpl<T: GtkApplicationImpl> GtkApplicationImplExt for T {\n\n fn parent_window_added(&self, application: &Self::Type, window: &Window) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkApplicationClass;\n\n if let Some(f) = (*parent_class).window_added {\n\n f(\n\n application\n\n .unsafe_cast_ref::<Application>()\n\n .to_glib_none()\n\n .0,\n\n window.to_glib_none().0,\n\n )\n\n }\n\n }\n", "file_path": "gtk4/src/subclass/application.rs", "rank": 31, "score": 221587.70550853308 }, { "content": "pub trait ToggleButtonImplExt: ObjectSubclass {\n\n fn parent_toggled(&self, toggle_button: &Self::Type);\n\n}\n\n\n\nimpl<T: ToggleButtonImpl> ToggleButtonImplExt for T {\n\n fn parent_toggled(&self, toggle_button: &Self::Type) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkToggleButtonClass;\n\n if let Some(f) = (*parent_class).toggled {\n\n f(toggle_button\n\n .unsafe_cast_ref::<ToggleButton>()\n\n .to_glib_none()\n\n .0)\n\n }\n\n }\n\n }\n\n}\n\n\n\nunsafe impl<T: ToggleButtonImpl> IsSubclassable<T> for ToggleButton {\n", "file_path": "gtk4/src/subclass/toggle_button.rs", "rank": 32, "score": 218279.28613753832 }, { "content": "pub trait ScaleButtonImplExt: ObjectSubclass {\n\n fn parent_value_changed(&self, scale_button: &Self::Type, new_value: f64);\n\n}\n\n\n\nimpl<T: ScaleButtonImpl> ScaleButtonImplExt for T {\n\n fn parent_value_changed(&self, scale_button: &Self::Type, new_value: f64) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkScaleButtonClass;\n\n if let Some(f) = (*parent_class).value_changed {\n\n f(\n\n scale_button\n\n .unsafe_cast_ref::<ScaleButton>()\n\n .to_glib_none()\n\n .0,\n\n new_value,\n\n )\n\n }\n\n }\n\n }\n", "file_path": "gtk4/src/subclass/scale_button.rs", "rank": 33, "score": 218279.28613753832 }, { "content": "pub trait CheckButtonImplExt: ObjectSubclass {\n\n fn parent_toggled(&self, check_button: &Self::Type);\n\n #[cfg(any(feature = \"v4_2\", feature = \"dox\"))]\n\n fn parent_activate(&self, check_button: &Self::Type);\n\n}\n\n\n\nimpl<T: CheckButtonImpl> CheckButtonImplExt for T {\n\n fn parent_toggled(&self, check_button: &Self::Type) {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut ffi::GtkCheckButtonClass;\n\n if let Some(f) = (*parent_class).toggled {\n\n f(check_button\n\n .unsafe_cast_ref::<CheckButton>()\n\n .to_glib_none()\n\n .0)\n\n }\n\n }\n\n }\n\n\n", "file_path": "gtk4/src/subclass/check_button.rs", "rank": 34, "score": 218279.28613753832 }, { "content": "#[inline]\n\npub fn is_initialized_main_thread() -> bool {\n\n skip_assert_initialized!();\n\n IS_MAIN_THREAD.with(|c| c.get())\n\n}\n\n\n\n/// Informs this crate that GTK has been initialized and the current thread is the main one.\n\n///\n\n/// # Panics\n\n///\n\n/// This function will panic if you attempt to initialize GTK from more than\n\n/// one thread.\n\n///\n\n/// # Safety\n\n///\n\n/// You must only call this if:\n\n///\n\n/// 1. You have initialized the underlying GTK library yourself.\n\n/// 2. You did 1 on the thread with which you are calling this function\n\n/// 3. You ensure that this thread is the main thread for the process.\n\npub unsafe fn set_initialized() {\n", "file_path": "gtk4/src/rt.rs", "rank": 35, "score": 217292.06724304488 }, { "content": "pub trait ToggleButtonImpl: ToggleButtonImplExt + ButtonImpl {\n\n fn toggled(&self, toggle_button: &Self::Type) {\n\n self.parent_toggled(toggle_button)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/toggle_button.rs", "rank": 36, "score": 215066.88786304602 }, { "content": "#[doc(alias = \"gtk_get_interface_age\")]\n\npub fn get_interface_age() -> u32 {\n\n skip_assert_initialized!();\n\n unsafe { ffi::gtk_get_interface_age() }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 37, "score": 213592.35211356744 }, { "content": "pub fn crate_ident_new() -> Ident {\n\n let crate_name = match crate_name(\"gtk4\") {\n\n Ok(x) => x,\n\n Err(_) => \"gtk4\".to_owned(),\n\n };\n\n\n\n Ident::new(&crate_name, Span::call_site())\n\n}\n", "file_path": "gtk4-macros/src/util.rs", "rank": 38, "score": 213585.54933047158 }, { "content": "#[inline]\n\npub fn is_initialized_main_thread() -> bool {\n\n skip_assert_initialized!();\n\n IS_MAIN_THREAD.with(|c| c.get())\n\n}\n", "file_path": "gdk4-x11/src/rt.rs", "rank": 39, "score": 213572.1018151241 }, { "content": "#[doc(alias = \"gtk_print_run_page_setup_dialog\")]\n\npub fn print_run_page_setup_dialog<P: IsA<Window>>(\n\n parent: Option<&P>,\n\n page_setup: Option<&PageSetup>,\n\n settings: &PrintSettings,\n\n) -> Option<PageSetup> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_print_run_page_setup_dialog(\n\n parent.map(|p| p.as_ref()).to_glib_none().0,\n\n page_setup.to_glib_none().0,\n\n settings.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 40, "score": 213471.7545942758 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gdk4/sys/tests/abi.rs", "rank": 41, "score": 202765.53603714367 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gtk4/sys/tests/abi.rs", "rank": 42, "score": 202765.53603714367 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gsk4/sys/tests/abi.rs", "rank": 43, "score": 202765.53603714367 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gdk4-x11/sys/tests/abi.rs", "rank": 44, "score": 200196.45744783775 }, { "content": "#[doc(alias = \"gtk_test_accessible_has_property\")]\n\npub fn test_accessible_has_property<P: IsA<Accessible>>(\n\n accessible: &P,\n\n property: AccessibleProperty,\n\n) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gtk_test_accessible_has_property(\n\n accessible.as_ref().to_glib_none().0,\n\n property.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 45, "score": 199055.9337107421 }, { "content": "fn main() {\n\n manage_docs();\n\n}\n\n\n\n#[cfg(all(\n\n any(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"),\n\n not(all(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"))\n\n))]\n", "file_path": "gtk4/build.rs", "rank": 46, "score": 196677.88982990858 }, { "content": "fn main() {\n\n manage_docs();\n\n}\n\n\n\n#[cfg(all(\n\n any(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"),\n\n not(all(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"))\n\n))]\n", "file_path": "gdk4/build.rs", "rank": 47, "score": 196677.88982990858 }, { "content": "fn main() {\n\n manage_docs();\n\n}\n\n\n\n#[cfg(all(\n\n any(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"),\n\n not(all(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"))\n\n))]\n", "file_path": "gsk4/build.rs", "rank": 48, "score": 196677.88982990858 }, { "content": "fn get_c_output(name: &str) -> Result<String, Box<dyn Error>> {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir()?;\n\n let exe = tmpdir.path().join(name);\n\n let c_file = Path::new(\"tests\").join(name).with_extension(\"c\");\n\n\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n cc.compile(&c_file, &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\", &abi_cmd, &output).into());\n\n }\n\n\n\n Ok(String::from_utf8(output.stdout)?)\n\n}\n\n\n\nconst RUST_LAYOUTS: &[(&str, Layout)] = &[\n\n (\n\n \"GtkAccessibleAutocomplete\",\n", "file_path": "gtk4/sys/tests/abi.rs", "rank": 49, "score": 196092.55725494152 }, { "content": "fn get_c_output(name: &str) -> Result<String, Box<dyn Error>> {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir()?;\n\n let exe = tmpdir.path().join(name);\n\n let c_file = Path::new(\"tests\").join(name).with_extension(\"c\");\n\n\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n cc.compile(&c_file, &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\", &abi_cmd, &output).into());\n\n }\n\n\n\n Ok(String::from_utf8(output.stdout)?)\n\n}\n\n\n\nconst RUST_LAYOUTS: &[(&str, Layout)] = &[\n\n (\n\n \"GskBlendMode\",\n", "file_path": "gsk4/sys/tests/abi.rs", "rank": 50, "score": 196092.55725494152 }, { "content": "fn get_c_output(name: &str) -> Result<String, Box<dyn Error>> {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir()?;\n\n let exe = tmpdir.path().join(name);\n\n let c_file = Path::new(\"tests\").join(name).with_extension(\"c\");\n\n\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n cc.compile(&c_file, &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\", &abi_cmd, &output).into());\n\n }\n\n\n\n Ok(String::from_utf8(output.stdout)?)\n\n}\n\n\n\nconst RUST_LAYOUTS: &[(&str, Layout)] = &[\n\n (\n\n \"GdkAnchorHints\",\n", "file_path": "gdk4/sys/tests/abi.rs", "rank": 51, "score": 196092.55725494152 }, { "content": "pub trait DialogImpl: DialogImplExt + WindowImpl {\n\n fn response(&self, dialog: &Self::Type, response: ResponseType) {\n\n self.parent_response(dialog, response)\n\n }\n\n\n\n fn close(&self, dialog: &Self::Type) {\n\n self.parent_close(dialog)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/dialog.rs", "rank": 52, "score": 195789.43098013895 }, { "content": "fn main() {\n\n manage_docs();\n\n}\n\n\n\n#[cfg(all(\n\n any(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"),\n\n not(all(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"))\n\n))]\n", "file_path": "gdk4-x11/build.rs", "rank": 53, "score": 193514.4366126932 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gsk4/sys/build.rs", "rank": 54, "score": 193514.4366126932 }, { "content": "fn main() {\n\n manage_docs();\n\n}\n\n\n\n#[cfg(all(\n\n any(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"),\n\n not(all(feature = \"embed-lgpl-docs\", feature = \"purge-lgpl-docs\"))\n\n))]\n", "file_path": "gdk4-wayland/build.rs", "rank": 55, "score": 193514.4366126932 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gtk4/sys/build.rs", "rank": 56, "score": 193514.4366126932 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gdk4/sys/build.rs", "rank": 57, "score": 193514.4366126932 }, { "content": "fn get_c_output(name: &str) -> Result<String, Box<dyn Error>> {\n\n let tmpdir = Builder::new().prefix(\"abi\").tempdir()?;\n\n let exe = tmpdir.path().join(name);\n\n let c_file = Path::new(\"tests\").join(name).with_extension(\"c\");\n\n\n\n let cc = Compiler::new().expect(\"configured compiler\");\n\n cc.compile(&c_file, &exe)?;\n\n\n\n let mut abi_cmd = Command::new(exe);\n\n let output = abi_cmd.output()?;\n\n if !output.status.success() {\n\n return Err(format!(\"command {:?} failed, {:?}\", &abi_cmd, &output).into());\n\n }\n\n\n\n Ok(String::from_utf8(output.stdout)?)\n\n}\n\n\n\nconst RUST_LAYOUTS: &[(&str, Layout)] = &[(\n\n \"GdkX11DeviceType\",\n\n Layout {\n", "file_path": "gdk4-x11/sys/tests/abi.rs", "rank": 58, "score": 193093.22866918152 }, { "content": "pub trait WidgetImpl: WidgetImplExt + ObjectImpl {\n\n fn compute_expand(&self, widget: &Self::Type, hexpand: &mut bool, vexpand: &mut bool) {\n\n self.parent_compute_expand(widget, hexpand, vexpand)\n\n }\n\n\n\n fn contains(&self, widget: &Self::Type, x: f64, y: f64) -> bool {\n\n self.parent_contains(widget, x, y)\n\n }\n\n\n\n fn direction_changed(&self, widget: &Self::Type, previous_direction: TextDirection) {\n\n self.parent_direction_changed(widget, previous_direction)\n\n }\n\n\n\n fn focus(&self, widget: &Self::Type, direction_type: DirectionType) -> bool {\n\n self.parent_focus(widget, direction_type)\n\n }\n\n\n\n fn get_request_mode(&self, widget: &Self::Type) -> SizeRequestMode {\n\n self.parent_get_request_mode(widget)\n\n }\n", "file_path": "gtk4/src/subclass/widget.rs", "rank": 59, "score": 192938.02512189865 }, { "content": "pub fn content_serialize_async_future<P: IsA<gio::OutputStream> + Clone + 'static>(\n\n stream: &P,\n\n mime_type: &str,\n\n value: &glib::Value,\n\n io_priority: i32,\n\n) -> Pin<Box<dyn future::Future<Output = Result<(), glib::Error>> + 'static>> {\n\n assert_initialized_main_thread!();\n\n\n\n let stream = stream.clone();\n\n let mime_type = String::from(mime_type);\n\n let value = value.clone();\n\n Box::pin(gio::GioFuture::new(&(), move |_obj, send| {\n\n let cancellable = gio::Cancellable::new();\n\n content_serialize_async(\n\n &stream,\n\n &mime_type,\n\n &value,\n\n io_priority,\n\n Some(&cancellable),\n\n move |res| {\n\n send.resolve(res);\n\n },\n\n );\n\n\n\n cancellable\n\n }))\n\n}\n", "file_path": "gdk4/src/functions.rs", "rank": 60, "score": 192287.34478306907 }, { "content": "pub fn content_deserialize_async_future<P: IsA<gio::InputStream> + Clone + 'static>(\n\n stream: &P,\n\n mime_type: &str,\n\n type_: glib::types::Type,\n\n io_priority: i32,\n\n) -> Pin<Box<dyn future::Future<Output = Result<glib::Value, glib::Error>> + 'static>> {\n\n assert_initialized_main_thread!();\n\n\n\n let stream = stream.clone();\n\n let mime_type = String::from(mime_type);\n\n Box::pin(gio::GioFuture::new(&(), move |_obj, send| {\n\n let cancellable = gio::Cancellable::new();\n\n content_deserialize_async(\n\n &stream,\n\n &mime_type,\n\n type_,\n\n io_priority,\n\n Some(&cancellable),\n\n move |res| {\n\n send.resolve(res);\n\n },\n\n );\n\n\n\n cancellable\n\n }))\n\n}\n\n\n", "file_path": "gdk4/src/functions.rs", "rank": 61, "score": 192287.34478306907 }, { "content": "#[doc(alias = \"gtk_enumerate_printers\")]\n\npub fn enumerate_printers<P: Fn(&Printer) -> bool + Send + Sync + 'static>(func: P, wait: bool) {\n\n assert_initialized_main_thread!();\n\n let func_data: Box_<P> = Box_::new(func);\n\n unsafe extern \"C\" fn func_func<P: Fn(&Printer) -> bool + Send + Sync + 'static>(\n\n printer: *mut ffi::GtkPrinter,\n\n data: glib::ffi::gpointer,\n\n ) -> glib::ffi::gboolean {\n\n let printer = from_glib_borrow(printer);\n\n let callback: &P = &*(data as *mut _);\n\n let res = (*callback)(&printer);\n\n res.to_glib()\n\n }\n\n let func = Some(func_func::<P> as _);\n\n unsafe extern \"C\" fn destroy_func<P: Fn(&Printer) -> bool + Send + Sync + 'static>(\n\n data: glib::ffi::gpointer,\n\n ) {\n\n let _callback: Box_<P> = Box_::from_raw(data as *mut _);\n\n }\n\n let destroy_call2 = Some(destroy_func::<P> as _);\n\n let super_callback0: Box_<P> = func_data;\n\n unsafe {\n\n ffi::gtk_enumerate_printers(\n\n func,\n\n Box_::into_raw(super_callback0) as *mut _,\n\n destroy_call2,\n\n wait.to_glib(),\n\n );\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 62, "score": 191863.76525032974 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gdk4-wayland/sys/build.rs", "rank": 63, "score": 190508.08488071783 }, { "content": "#[cfg(not(feature = \"dox\"))]\n\nfn main() {\n\n if let Err(s) = system_deps::Config::new().probe() {\n\n println!(\"cargo:warning={}\", s);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "gdk4-x11/sys/build.rs", "rank": 64, "score": 190508.08488071783 }, { "content": "#[doc(alias = \"gtk_tree_create_row_drag_content\")]\n\npub fn tree_create_row_drag_content<P: IsA<TreeModel>>(\n\n tree_model: &P,\n\n path: &mut TreePath,\n\n) -> Option<gdk::ContentProvider> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_tree_create_row_drag_content(\n\n tree_model.as_ref().to_glib_none().0,\n\n path.to_glib_none_mut().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 65, "score": 189272.81533360138 }, { "content": "pub fn impl_composite_template(input: &syn::DeriveInput) -> TokenStream {\n\n let name = &input.ident;\n\n let crate_ident = crate_ident_new();\n\n\n\n let source = match parse_template_source(&input) {\n\n Ok(v) => v,\n\n Err(e) => abort_call_site!(\n\n \"{}: derive(CompositeTemplate) requires #[template(...)] to specify 'file', 'resource', or 'string'\",\n\n e\n\n ),\n\n };\n\n\n\n let set_template = gen_set_template(source);\n\n\n\n let fields = match input.data {\n\n Data::Struct(ref s) => &s.fields,\n\n _ => abort_call_site!(\"derive(CompositeTemplate) only supports structs\"),\n\n };\n\n\n\n let template_children = gen_template_child_bindings(&fields);\n", "file_path": "gtk4-macros/src/composite_template_derive.rs", "rank": 66, "score": 176644.44033602852 }, { "content": "pub trait ActionableImpl: WidgetImpl {\n\n fn get_action_name(&self, actionable: &Self::Type) -> Option<GString>;\n\n fn get_action_target_value(&self, actionable: &Self::Type) -> Option<Variant>;\n\n fn set_action_name(&self, actionable: &Self::Type, name: Option<&str>);\n\n fn set_action_target_value(&self, actionable: &Self::Type, value: Option<&Variant>);\n\n}\n\n\n", "file_path": "gtk4/src/subclass/actionable.rs", "rank": 67, "score": 174460.58736302293 }, { "content": "pub trait BoxImpl: WidgetImpl {}\n\n\n\nunsafe impl<T: BoxImpl> IsSubclassable<T> for Box {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Widget as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Widget as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/box_.rs", "rank": 68, "score": 174460.58736302293 }, { "content": "pub trait FixedImpl: WidgetImpl {}\n\n\n\nunsafe impl<T: FixedImpl> IsSubclassable<T> for Fixed {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Widget as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Widget as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/fixed.rs", "rank": 69, "score": 174460.58736302293 }, { "content": "pub trait NativeImpl: WidgetImpl {}\n\n\n\nunsafe impl<T: NativeImpl> IsImplementable<T> for Native {\n\n fn interface_init(_iface: &mut glib::Interface<Self>) {}\n\n\n\n fn instance_init(_instance: &mut glib::subclass::InitializingObject<T>) {}\n\n}\n", "file_path": "gtk4/src/subclass/native.rs", "rank": 70, "score": 174460.58736302293 }, { "content": "pub trait EditableImpl: WidgetImpl {\n\n fn insert_text(&self, editable: &Self::Type, text: &str, length: i32, position: &mut i32) {\n\n self.parent_insert_text(editable, text, length, position);\n\n }\n\n\n\n fn delete_text(&self, editable: &Self::Type, start_position: i32, end_position: i32) {\n\n self.parent_delete_text(editable, start_position, end_position)\n\n }\n\n\n\n fn changed(&self, editable: &Self::Type) {\n\n self.parent_changed(editable)\n\n }\n\n\n\n fn get_text(&self, editable: &Self::Type) -> GString {\n\n self.parent_get_text(editable)\n\n }\n\n\n\n fn get_delegate(&self, editable: &Self::Type) -> Option<Editable> {\n\n self.parent_get_delegate(editable)\n\n }\n", "file_path": "gtk4/src/subclass/editable.rs", "rank": 71, "score": 174460.58736302293 }, { "content": "pub trait GridImpl: WidgetImpl {}\n\n\n\nunsafe impl<T: GridImpl> IsSubclassable<T> for Grid {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Widget as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Widget as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/grid.rs", "rank": 72, "score": 174460.58736302293 }, { "content": "pub trait AccessibleImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: AccessibleImpl> IsImplementable<T> for Accessible {\n\n fn interface_init(_iface: &mut glib::Interface<Self>) {}\n\n\n\n fn instance_init(_instance: &mut glib::subclass::InitializingObject<T>) {}\n\n}\n", "file_path": "gtk4/src/subclass/accessible.rs", "rank": 73, "score": 174444.57409683793 }, { "content": "pub trait ScrollableImpl: ObjectImpl {\n\n fn get_border(&self, scrollable: &Self::Type) -> Option<Border> {\n\n self.parent_get_border(scrollable)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/scrollable.rs", "rank": 74, "score": 174444.57409683793 }, { "content": "pub trait PaintableImpl: ObjectImpl {\n\n fn get_current_image(&self, paintable: &Self::Type) -> Paintable {\n\n self.parent_get_current_image(paintable)\n\n }\n\n\n\n fn get_flags(&self, paintable: &Self::Type) -> PaintableFlags {\n\n self.parent_get_flags(paintable)\n\n }\n\n\n\n fn get_intrinsic_width(&self, paintable: &Self::Type) -> i32 {\n\n self.parent_get_intrinsic_width(paintable)\n\n }\n\n\n\n fn get_intrinsic_height(&self, paintable: &Self::Type) -> i32 {\n\n self.parent_get_intrinsic_height(paintable)\n\n }\n\n\n\n fn get_intrinsic_aspect_ratio(&self, paintable: &Self::Type) -> f64 {\n\n self.parent_get_intrinsic_aspect_ratio(paintable)\n\n }\n\n\n\n fn snapshot(&self, paintable: &Self::Type, snapshot: &Snapshot, width: f64, height: f64);\n\n}\n\n\n", "file_path": "gdk4/src/subclass/paintable.rs", "rank": 75, "score": 174444.57409683793 }, { "content": "#[doc(alias = \"gdk_content_deserialize_async\")]\n\npub fn content_deserialize_async<\n\n P: IsA<gio::InputStream>,\n\n Q: IsA<gio::Cancellable>,\n\n R: FnOnce(Result<glib::Value, glib::Error>) + Send + 'static,\n\n>(\n\n stream: &P,\n\n mime_type: &str,\n\n type_: glib::types::Type,\n\n io_priority: i32,\n\n cancellable: Option<&Q>,\n\n callback: R,\n\n) {\n\n assert_initialized_main_thread!();\n\n let user_data: Box<R> = Box::new(callback);\n\n unsafe extern \"C\" fn content_deserialize_async_trampoline<\n\n R: FnOnce(Result<glib::Value, glib::Error>) + Send + 'static,\n\n >(\n\n _source_object: *mut glib::gobject_ffi::GObject,\n\n res: *mut gio::ffi::GAsyncResult,\n\n user_data: glib::ffi::gpointer,\n", "file_path": "gdk4/src/functions.rs", "rank": 76, "score": 172910.4713691228 }, { "content": "#[doc(alias = \"gtk_disable_setlocale\")]\n\npub fn disable_setlocale() {\n\n assert_not_initialized!();\n\n unsafe {\n\n ffi::gtk_disable_setlocale();\n\n }\n\n}\n\n\n\n//#[doc(alias = \"gtk_distribute_natural_allocation\")]\n\n//pub fn distribute_natural_allocation(extra_space: i32, sizes: /*Ignored*/&[&RequestedSize]) -> i32 {\n\n// unsafe { TODO: call ffi:gtk_distribute_natural_allocation() }\n\n//}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 77, "score": 172910.4713691228 }, { "content": "#[doc(alias = \"gtk_check_version\")]\n\npub fn check_version(\n\n required_major: u32,\n\n required_minor: u32,\n\n required_micro: u32,\n\n) -> Option<glib::GString> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_none(ffi::gtk_check_version(\n\n required_major,\n\n required_minor,\n\n required_micro,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 78, "score": 172910.4713691228 }, { "content": "#[doc(alias = \"gdk_content_register_deserializer\")]\n\npub fn content_register_deserializer<\n\n T: 'static,\n\n P: Fn(&ContentDeserializer, &mut Option<T>) + 'static,\n\n>(\n\n mime_type: &str,\n\n type_: glib::types::Type,\n\n deserialize: P,\n\n) {\n\n assert_initialized_main_thread!();\n\n let deserialize_data: Box<P> = Box::new(deserialize);\n\n unsafe extern \"C\" fn deserialize_func<\n\n T: 'static,\n\n P: Fn(&ContentDeserializer, &mut Option<T>) + 'static,\n\n >(\n\n deserializer: *mut ffi::GdkContentDeserializer,\n\n ) {\n\n let deserializer: ContentDeserializer = from_glib_full(deserializer);\n\n let callback: &P =\n\n &*(ffi::gdk_content_deserializer_get_user_data(deserializer.to_glib_none().0)\n\n as *mut _);\n", "file_path": "gdk4/src/functions.rs", "rank": 79, "score": 172910.4713691228 }, { "content": "#[doc(alias = \"gdk_content_register_serializer\")]\n\npub fn content_register_serializer<\n\n T: 'static,\n\n P: Fn(&ContentSerializer, &mut Option<T>) + 'static,\n\n>(\n\n type_: glib::types::Type,\n\n mime_type: &str,\n\n serialize: P,\n\n) {\n\n assert_initialized_main_thread!();\n\n let serialize_data: Box<P> = Box::new(serialize);\n\n unsafe extern \"C\" fn serialize_func<\n\n T: 'static,\n\n P: Fn(&ContentSerializer, &mut Option<T>) + 'static,\n\n >(\n\n serializer: *mut ffi::GdkContentSerializer,\n\n ) {\n\n let serializer: ContentSerializer = from_glib_full(serializer);\n\n let callback: &P =\n\n &*(ffi::gdk_content_serializer_get_user_data(serializer.to_glib_none().0) as *mut _);\n\n\n", "file_path": "gdk4/src/functions.rs", "rank": 80, "score": 172910.4713691228 }, { "content": "#[doc(alias = \"gdk_content_serialize_async\")]\n\npub fn content_serialize_async<\n\n P: IsA<gio::OutputStream>,\n\n Q: IsA<gio::Cancellable>,\n\n R: FnOnce(Result<(), glib::Error>) + Send + 'static,\n\n>(\n\n stream: &P,\n\n mime_type: &str,\n\n value: &glib::Value,\n\n io_priority: i32,\n\n cancellable: Option<&Q>,\n\n callback: R,\n\n) {\n\n assert_initialized_main_thread!();\n\n let user_data: Box<R> = Box::new(callback);\n\n unsafe extern \"C\" fn content_serialize_async_trampoline<\n\n R: FnOnce(Result<(), glib::Error>) + Send + 'static,\n\n >(\n\n _source_object: *mut glib::gobject_ffi::GObject,\n\n res: *mut gio::ffi::GAsyncResult,\n\n user_data: glib::ffi::gpointer,\n", "file_path": "gdk4/src/functions.rs", "rank": 81, "score": 172910.4713691228 }, { "content": "#[doc(alias = \"gtk_accelerator_get_label\")]\n\npub fn accelerator_get_label(\n\n accelerator_key: u32,\n\n accelerator_mods: gdk::ModifierType,\n\n) -> Option<glib::GString> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gtk_accelerator_get_label(\n\n accelerator_key,\n\n accelerator_mods.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/auto/functions.rs", "rank": 82, "score": 170314.8630679436 }, { "content": "#[doc(alias = \"gdk_pixbuf_get_from_surface\")]\n\npub fn pixbuf_get_from_surface(\n\n surface: &cairo::Surface,\n\n src_x: i32,\n\n src_y: i32,\n\n width: i32,\n\n height: i32,\n\n) -> Option<gdk_pixbuf::Pixbuf> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib_full(ffi::gdk_pixbuf_get_from_surface(\n\n mut_override(surface.to_glib_none().0),\n\n src_x,\n\n src_y,\n\n width,\n\n height,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gdk4/src/auto/functions.rs", "rank": 83, "score": 170314.8630679436 }, { "content": "pub trait TextMarkImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: TextMarkImpl> IsSubclassable<T> for TextMark {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Object as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Object as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/text_mark.rs", "rank": 84, "score": 170235.6673238962 }, { "content": "pub trait LayoutChildImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: LayoutChildImpl> IsSubclassable<T> for LayoutChild {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Object as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Object as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/layout_child.rs", "rank": 85, "score": 170235.6673238962 }, { "content": "pub trait TextTagImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: TextTagImpl> IsSubclassable<T> for TextTag {\n\n fn class_init(class: &mut glib::Class<Self>) {\n\n <Object as IsSubclassable<T>>::class_init(class);\n\n }\n\n\n\n fn instance_init(instance: &mut glib::subclass::InitializingObject<T>) {\n\n <Object as IsSubclassable<T>>::instance_init(instance);\n\n }\n\n}\n", "file_path": "gtk4/src/subclass/text_tag.rs", "rank": 86, "score": 170235.6673238962 }, { "content": "pub trait ConstraintTargetImpl: ObjectImpl {}\n\n\n\nunsafe impl<T: ConstraintTargetImpl> IsImplementable<T> for ConstraintTarget {\n\n fn interface_init(_iface: &mut glib::Interface<Self>) {}\n\n\n\n fn instance_init(_instance: &mut glib::subclass::InitializingObject<T>) {}\n\n}\n", "file_path": "gtk4/src/subclass/constraint_target.rs", "rank": 87, "score": 170235.6673238962 }, { "content": "pub trait ShortcutManagerImpl: ObjectImpl {\n\n fn add_controller(&self, shortcut_manager: &Self::Type, controller: &ShortcutController) {\n\n self.parent_add_controller(shortcut_manager, controller);\n\n }\n\n\n\n fn remove_controller(&self, shortcut_manager: &Self::Type, controller: &ShortcutController) {\n\n self.parent_remove_controller(shortcut_manager, controller)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/shortcut_manager.rs", "rank": 88, "score": 170235.6673238962 }, { "content": "pub trait ColorChooserImpl: ObjectImpl {\n\n fn add_palette(\n\n &self,\n\n color_chooser: &Self::Type,\n\n orientation: Orientation,\n\n colors_per_line: i32,\n\n colors: &[RGBA],\n\n ) {\n\n self.parent_add_palette(color_chooser, orientation, colors_per_line, colors);\n\n }\n\n\n\n fn color_activated(&self, color_chooser: &Self::Type, rgba: RGBA) {\n\n self.parent_color_activated(color_chooser, rgba);\n\n }\n\n\n\n fn get_rgba(&self, color_chooser: &Self::Type) -> RGBA;\n\n fn set_rgba(&self, color_chooser: &Self::Type, rgba: RGBA);\n\n}\n\n\n", "file_path": "gtk4/src/subclass/color_chooser.rs", "rank": 89, "score": 170235.6673238962 }, { "content": "pub trait BuilderScopeImpl: ObjectImpl {\n\n fn get_type_from_name(\n\n &self,\n\n builder_scope: &Self::Type,\n\n builder: &Builder,\n\n type_name: &str,\n\n ) -> glib::Type {\n\n self.parent_get_type_from_name(builder_scope, builder, type_name)\n\n }\n\n\n\n fn get_type_from_function(\n\n &self,\n\n builder_scope: &Self::Type,\n\n builder: &Builder,\n\n function_name: &str,\n\n ) -> glib::Type {\n\n self.parent_get_type_from_function(builder_scope, builder, function_name)\n\n }\n\n\n\n fn create_closure(\n\n &self,\n\n builder_scope: &Self::Type,\n\n builder: &Builder,\n\n function_name: &str,\n\n flags: BuilderClosureFlags,\n\n object: Option<&glib::Object>,\n\n ) -> Result<glib::Closure, glib::Error>;\n\n}\n\n\n", "file_path": "gtk4/src/subclass/builder_scope.rs", "rank": 90, "score": 170235.6673238962 }, { "content": "pub trait RangeImpl: RangeImplExt + WidgetImpl {\n\n fn adjust_bounds(&self, range: &Self::Type, new_value: f64) {\n\n self.parent_adjust_bounds(range, new_value)\n\n }\n\n\n\n fn change_value(&self, range: &Self::Type, scroll_type: ScrollType, new_value: f64) -> bool {\n\n self.parent_change_value(range, scroll_type, new_value)\n\n }\n\n\n\n fn get_range_border(&self, range: &Self::Type) -> Border {\n\n self.parent_get_range_border(range)\n\n }\n\n\n\n fn move_slider(&self, range: &Self::Type, scroll_type: ScrollType) {\n\n self.parent_move_slider(range, scroll_type)\n\n }\n\n\n\n fn value_changed(&self, range: &Self::Type) {\n\n self.parent_value_changed(range)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/range.rs", "rank": 91, "score": 169282.04135182637 }, { "content": "pub trait EntryImpl: EntryImplExt + WidgetImpl {\n\n fn activate(&self, entry: &Self::Type) {\n\n self.parent_activate(entry)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/entry.rs", "rank": 92, "score": 169282.04135182637 }, { "content": "pub trait FrameImpl: FrameImplExt + WidgetImpl {\n\n fn compute_child_allocation(&self, frame: &Self::Type) -> Allocation {\n\n self.parent_compute_child_allocation(frame)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/frame.rs", "rank": 93, "score": 169282.04135182637 }, { "content": "pub trait PopoverImpl: PopoverImplExt + WidgetImpl {\n\n fn activate_default(&self, button: &Self::Type) {\n\n self.parent_activate_default(button)\n\n }\n\n\n\n fn closed(&self, button: &Self::Type) {\n\n self.parent_closed(button)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/popover.rs", "rank": 94, "score": 169282.04135182637 }, { "content": "pub trait FilterImpl: FilterImplExt + ObjectImpl {\n\n fn get_strictness(&self, filter: &Self::Type) -> FilterMatch {\n\n self.parent_get_strictness(filter)\n\n }\n\n fn match_(&self, filter: &Self::Type, item: &Object) -> bool {\n\n self.parent_match_(filter, item)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/filter.rs", "rank": 95, "score": 169267.8941837917 }, { "content": "pub trait SorterImpl: SorterImplExt + ObjectImpl {\n\n fn compare(&self, sorter: &Self::Type, item1: &Object, item2: &Object) -> Ordering {\n\n self.parent_compare(sorter, item1, item2)\n\n }\n\n fn get_order(&self, sorter: &Self::Type) -> SorterOrder {\n\n self.parent_get_order(sorter)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/sorter.rs", "rank": 96, "score": 169267.8941837917 }, { "content": "pub trait AdjustmentImpl: AdjustmentImplExt + ObjectImpl {\n\n fn changed(&self, adjustment: &Self::Type) {\n\n self.parent_changed(adjustment)\n\n }\n\n\n\n fn value_changed(&self, adjustment: &Self::Type) {\n\n self.parent_value_changed(adjustment)\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/subclass/adjustment.rs", "rank": 97, "score": 169267.8941837917 }, { "content": "#[doc(alias = \"gtk_accelerator_valid\")]\n\npub fn accelerator_valid(keyval: gdk::keys::Key, modifiers: gdk::ModifierType) -> bool {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n from_glib(ffi::gtk_accelerator_valid(\n\n keyval.to_glib(),\n\n modifiers.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk4/src/functions.rs", "rank": 98, "score": 169114.90738240804 }, { "content": "// find the #[@attr_name] attribute in @attrs\n\nfn find_attribute_meta(attrs: &[Attribute], attr_name: &str) -> Result<Option<MetaList>> {\n\n let meta = match attrs.iter().find(|a| a.path.is_ident(attr_name)) {\n\n Some(a) => a.parse_meta(),\n\n _ => return Ok(None),\n\n };\n\n match meta? {\n\n Meta::List(n) => Ok(Some(n)),\n\n _ => bail!(\"wrong meta type\"),\n\n }\n\n}\n\n\n", "file_path": "gtk4-macros/src/attribute_parser.rs", "rank": 99, "score": 168625.428676792 } ]
Rust
packages/vm/src/middleware/deterministic.rs
slave5vw/cosmwasm
220e39c8977eb0f0391a429c146872ad59b16426
use wasmer::wasmparser::Operator; use wasmer::{ FunctionMiddleware, LocalFunctionIndex, MiddlewareError, MiddlewareReaderState, ModuleMiddleware, }; #[derive(Debug)] pub struct Deterministic {} impl Deterministic { pub fn new() -> Self { Self {} } } impl ModuleMiddleware for Deterministic { fn generate_function_middleware(&self, _: LocalFunctionIndex) -> Box<dyn FunctionMiddleware> { Box::new(FunctionDeterministic {}) } } #[derive(Debug)] pub struct FunctionDeterministic {} impl FunctionMiddleware for FunctionDeterministic { fn feed<'a>( &mut self, operator: Operator<'a>, state: &mut MiddlewareReaderState<'a>, ) -> Result<(), MiddlewareError> { match operator { Operator::Unreachable | Operator::Nop | Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } | Operator::Else | Operator::End | Operator::Br { .. } | Operator::BrIf { .. } | Operator::BrTable { .. } | Operator::Return | Operator::Call { .. } | Operator::CallIndirect { .. } | Operator::Drop | Operator::Select | Operator::LocalGet { .. } | Operator::LocalSet { .. } | Operator::LocalTee { .. } | Operator::GlobalGet { .. } | Operator::GlobalSet { .. } | Operator::I32Load { .. } | Operator::I64Load { .. } | Operator::I32Load8S { .. } | Operator::I32Load8U { .. } | Operator::I32Load16S { .. } | Operator::I32Load16U { .. } | Operator::I64Load8S { .. } | Operator::I64Load8U { .. } | Operator::I64Load16S { .. } | Operator::I64Load16U { .. } | Operator::I64Load32S { .. } | Operator::I64Load32U { .. } | Operator::I32Store { .. } | Operator::I64Store { .. } | Operator::I32Store8 { .. } | Operator::I32Store16 { .. } | Operator::I64Store8 { .. } | Operator::I64Store16 { .. } | Operator::I64Store32 { .. } | Operator::MemorySize { .. } | Operator::MemoryGrow { .. } | Operator::I32Const { .. } | Operator::I64Const { .. } | Operator::I32Eqz | Operator::I32Eq | Operator::I32Ne | Operator::I32LtS | Operator::I32LtU | Operator::I32GtS | Operator::I32GtU | Operator::I32LeS | Operator::I32LeU | Operator::I32GeS | Operator::I32GeU | Operator::I64Eqz | Operator::I64Eq | Operator::I64Ne | Operator::I64LtS | Operator::I64LtU | Operator::I64GtS | Operator::I64GtU | Operator::I64LeS | Operator::I64LeU | Operator::I64GeS | Operator::I64GeU | Operator::I32Clz | Operator::I32Ctz | Operator::I32Popcnt | Operator::I32Add | Operator::I32Sub | Operator::I32Mul | Operator::I32DivS | Operator::I32DivU | Operator::I32RemS | Operator::I32RemU | Operator::I32And | Operator::I32Or | Operator::I32Xor | Operator::I32Shl | Operator::I32ShrS | Operator::I32ShrU | Operator::I32Rotl | Operator::I32Rotr | Operator::I64Clz | Operator::I64Ctz | Operator::I64Popcnt | Operator::I64Add | Operator::I64Sub | Operator::I64Mul | Operator::I64DivS | Operator::I64DivU | Operator::I64RemS | Operator::I64RemU | Operator::I64And | Operator::I64Or | Operator::I64Xor | Operator::I64Shl | Operator::I64ShrS | Operator::I64ShrU | Operator::I64Rotl | Operator::I64Rotr | Operator::I32WrapI64 | Operator::I32Extend8S | Operator::I32Extend16S | Operator::I64Extend8S | Operator::I64Extend16S | Operator::I64ExtendI32S | Operator::I64ExtendI32U => { state.push_operator(operator); Ok(()) } _ => { let msg = format!("Non-determinstic operator detected: {:?}", operator); Err(MiddlewareError::new("Deterministic", msg)) } } } } #[cfg(test)] mod tests { use super::*; use std::sync::Arc; use wasmer::{CompilerConfig, Cranelift, Module, Store, JIT}; #[test] fn valid_wasm_instance_sanity() { let wasm = wat::parse_str( r#" (module (func (export "sum") (param i32 i32) (result i32) get_local 0 get_local 1 i32.add )) "#, ) .unwrap(); let deterministic = Arc::new(Deterministic::new()); let mut compiler_config = Cranelift::default(); compiler_config.push_middleware(deterministic); let store = Store::new(&JIT::new(compiler_config).engine()); let result = Module::new(&store, &wasm); assert!(result.is_ok()); } #[test] fn parser_floats_are_not_supported() { let wasm = wat::parse_str( r#" (module (func $to_float (param i32) (result f32) get_local 0 f32.convert_u/i32 )) "#, ) .unwrap(); let deterministic = Arc::new(Deterministic::new()); let mut compiler_config = Cranelift::default(); compiler_config.push_middleware(deterministic); let store = Store::new(&JIT::new(compiler_config).engine()); let result = Module::new(&store, &wasm); assert!(result.is_err()); } }
use wasmer::wasmparser::Operator; use wasmer::{ FunctionMiddleware, LocalFunctionIndex, MiddlewareError, MiddlewareReaderState, ModuleMiddleware, }; #[derive(Debug)] pub struct Deterministic {} impl Deterministic { pub fn new() -> Self { Self {} } } impl ModuleMiddleware for Deterministic { fn generate_function_middleware(&self, _: LocalFunctionIndex) -> Box<dyn FunctionMiddleware> { Box::new(FunctionDeterministic {}) } } #[derive(Debug)] pub struct FunctionDeterministic {} impl FunctionMiddleware for FunctionDeterministic { fn feed<'a>( &mut self, operator: Operator<'a>, state: &mut MiddlewareReaderState<'a>, ) -> Result<(), MiddlewareError> { match operator { Operator::Unreachable | Operator::Nop | Operator::Block { .. } | Operator::Loop { .. } | Operator::If { .. } | Operator::Else | Operator::End | Operator::Br { .. } | Operator::BrIf { .. } | Operator::BrTable { .. } | Operator::Return | Operator::Call { .. } | Operator::CallIndirect { .. } | Operator::Drop | Operator::Select | Operator::LocalGet { .. } | Operator::LocalSet { .. } | Operator::LocalTee { .. } | Operator::GlobalGet { .. } | Operator::GlobalSet { .. } | Operator::I32Load { .. } | Operator::I64Load { .. } | Operator::I32Load8S { .. } | Operator::I32Load8U { .. } | Operator::I32Load16S { .. } | Operator::I32Load16U { .. } | Operator::I64Load8S { .. } | Operator::I64Load8U { .. } | Operator::I64Load16S { .. } | Operator::I64Load16U { .. } | Operator::I64Load32S { .. } | Operator::I64Load32U { .. } | Operator::I32Store { .. } | Operator::I64Store { .. } | Operator::I32Store8 { .. } | Operator::I32Store16 { .. } | Operator::I64Store8 { .. } | Operator::I64Store16 { .. } | Operator::I64Store32 { .. } | Operator::MemorySize { .. } | Operator::MemoryGrow { .. } | Operator::I32Const { .. } | Operator::I64Const { .. } | Operator::I32Eqz | Operator::I32Eq | Operator::I32Ne | Operator::I32LtS | Operator::I32LtU | Operator::I32GtS | Operator::I32GtU | Operator::I32LeS | Operator::I32LeU | Operator::I32GeS | Operator::I32GeU | Operator::I64Eqz | Operator::I64Eq | Operator::I64Ne | Operator::I64LtS | Operator::I64LtU | Operator::I64GtS | Operator::I64GtU | Operator::I64LeS | Operator::I64LeU | Operator::I64GeS | Operator::I64GeU | Operator::I32Clz | Operator::I32Ctz | Operator::I32Popcnt | Operator::I32Add | Operator::I32Sub | Operator::I32Mul | Operator::I32DivS | Operator::I32DivU | Operator::I32RemS | Operator::I32RemU | Operator::I32And | Operator::I32Or | Operator::I32Xor | Operator::I32Shl | Operator::I32ShrS | Operator::I32ShrU | Operator::I32Rotl | Operator::I32Rotr | Operator::I64Clz | Operator::I64Ctz | Operator::I64Popcnt | Operator::I64Add | Operator::I64Sub | Operator::I64Mul | Operator::I64DivS | Operator::I64DivU | Operator::I64RemS | Operator::I64RemU | Operator::I64And | Operator::I64Or | Operator::I64Xor | Operator::I64Shl | Operator::I64ShrS | Operator::I64ShrU | Operator::I64Rotl | Operator::I64Rotr | Operator::I32WrapI64 | Operator::I32Extend8S | Operator::I32Extend16S | Operator::I64Extend8S | Operator::I64Extend16S | Operator::I64ExtendI32S | Operator::I64ExtendI32U => { state.push_operator(operator); Ok(()) } _ => { let msg = format!("Non-determinstic operator detected: {:?}", operator); Err(MiddlewareError::new("Deterministic", msg)) } } } } #[cfg(test)] mod tests { use super::*; use std::sync::Arc; use wasmer::{CompilerConfig, Cranelift, Module, Store, JIT}; #[test] fn valid_wasm_instance_sanity() {
let deterministic = Arc::new(Deterministic::new()); let mut compiler_config = Cranelift::default(); compiler_config.push_middleware(deterministic); let store = Store::new(&JIT::new(compiler_config).engine()); let result = Module::new(&store, &wasm); assert!(result.is_ok()); } #[test] fn parser_floats_are_not_supported() { let wasm = wat::parse_str( r#" (module (func $to_float (param i32) (result f32) get_local 0 f32.convert_u/i32 )) "#, ) .unwrap(); let deterministic = Arc::new(Deterministic::new()); let mut compiler_config = Cranelift::default(); compiler_config.push_middleware(deterministic); let store = Store::new(&JIT::new(compiler_config).engine()); let result = Module::new(&store, &wasm); assert!(result.is_err()); } }
let wasm = wat::parse_str( r#" (module (func (export "sum") (param i32 i32) (result i32) get_local 0 get_local 1 i32.add )) "#, ) .unwrap();
assignment_statement
[ { "content": "pub fn config(storage: &mut dyn Storage) -> Singleton<State> {\n\n singleton(storage, CONFIG_KEY)\n\n}\n\n\n", "file_path": "contracts/reflect/src/state.rs", "rank": 0, "score": 281227.80228281906 }, { "content": "#[entry_point]\n\npub fn init(deps: DepsMut, _env: Env, info: MessageInfo, msg: InitMsg) -> StdResult<InitResponse> {\n\n // ensure the validator is registered\n\n let vals = deps.querier.query_validators()?;\n\n if !vals.iter().any(|v| v.address == msg.validator) {\n\n return Err(StdError::generic_err(format!(\n\n \"{} is not in the current validator set\",\n\n msg.validator\n\n )));\n\n }\n\n\n\n let token = TokenInfoResponse {\n\n name: msg.name,\n\n symbol: msg.symbol,\n\n decimals: msg.decimals,\n\n };\n\n token_info(deps.storage).save(&token)?;\n\n\n\n let denom = deps.querier.query_bonded_denom()?;\n\n let invest = InvestmentInfo {\n\n owner: deps.api.canonical_address(&info.sender)?,\n", "file_path": "contracts/staking/src/contract.rs", "rank": 1, "score": 253283.8093340361 }, { "content": "#[entry_point]\n\npub fn init(deps: DepsMut, _env: Env, _info: MessageInfo, msg: InitMsg) -> StdResult<InitResponse> {\n\n // we store the reflect_id for creating accounts later\n\n let cfg = Config {\n\n reflect_code_id: msg.reflect_code_id,\n\n };\n\n config(deps.storage).save(&cfg)?;\n\n\n\n Ok(InitResponse::default())\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/src/contract.rs", "rank": 2, "score": 250641.59643675713 }, { "content": "/// balances are state of the erc20 tokens\n\npub fn balances(storage: &mut dyn Storage) -> Bucket<Uint128> {\n\n bucket(storage, PREFIX_BALANCE)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 3, "score": 240784.59732500376 }, { "content": "/// claims are the claims to money being unbonded\n\npub fn claims(storage: &mut dyn Storage) -> Bucket<Uint128> {\n\n bucket(storage, PREFIX_CLAIMS)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 4, "score": 240779.3356561612 }, { "content": "pub fn total_supply(storage: &mut dyn Storage) -> Singleton<Supply> {\n\n singleton(storage, KEY_TOTAL_SUPPLY)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 5, "score": 237858.7412667757 }, { "content": "pub fn config(storage: &mut dyn Storage) -> Singleton<Config> {\n\n singleton(storage, KEY_CONFIG)\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/src/state.rs", "rank": 6, "score": 237858.7412667757 }, { "content": "/// nextval increments the counter by 1 and returns the new value.\n\n/// On the first time it is called (no sequence info in db) it will return 1.\n\npub fn nextval(seq: &mut Singleton<u64>) -> StdResult<u64> {\n\n let val = currval(&seq)? + 1;\n\n seq.save(&val)?;\n\n Ok(val)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm_std::testing::MockStorage;\n\n\n\n #[test]\n\n fn walk_through_sequence() {\n\n let mut store = MockStorage::new();\n\n let mut seq = sequence(&mut store, b\"seq\");\n\n\n\n assert_eq!(currval(&seq).unwrap(), 0);\n\n assert_eq!(nextval(&mut seq).unwrap(), 1);\n\n assert_eq!(nextval(&mut seq).unwrap(), 2);\n\n assert_eq!(nextval(&mut seq).unwrap(), 3);\n", "file_path": "packages/storage/src/sequence.rs", "rank": 7, "score": 237441.20400205866 }, { "content": "/// accounts is lookup of channel_id to reflect contract\n\npub fn accounts(storage: &mut dyn Storage) -> Bucket<HumanAddr> {\n\n bucket(storage, PREFIX_ACCOUNTS)\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/src/state.rs", "rank": 8, "score": 235041.9965561165 }, { "content": "pub fn invest_info(storage: &mut dyn Storage) -> Singleton<InvestmentInfo> {\n\n singleton(storage, KEY_INVESTMENT)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 9, "score": 235041.9965561165 }, { "content": "/// Runs a series of IO tests, hammering especially on allocate and deallocate.\n\n/// This could be especially useful when run with some kind of leak detector.\n\npub fn test_io<A, S, Q>(instance: &mut Instance<A, S, Q>)\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n{\n\n let sizes: Vec<usize> = vec![0, 1, 3, 10, 200, 2000, 5 * 1024];\n\n let bytes: Vec<u8> = vec![0x00, 0xA5, 0xFF];\n\n\n\n for size in sizes.into_iter() {\n\n for byte in bytes.iter() {\n\n let original = vec![*byte; size];\n\n let wasm_ptr = instance\n\n .allocate(original.len())\n\n .expect(\"Could not allocate memory\");\n\n instance\n\n .write_memory(wasm_ptr, &original)\n\n .expect(\"Could not write data\");\n\n let wasm_data = instance.read_memory(wasm_ptr, size).expect(\"error reading\");\n\n assert_eq!(\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 10, "score": 234345.25199057756 }, { "content": "pub fn query(deps: Deps, env: Env, msg: QueryMsg) -> StdResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Verifier {} => to_binary(&query_verifier(deps)?),\n\n QueryMsg::OtherBalance { address } => to_binary(&query_other_balance(deps, address)?),\n\n QueryMsg::Recurse { depth, work } => {\n\n to_binary(&query_recurse(deps, depth, work, env.contract.address)?)\n\n }\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 11, "score": 232543.25311447476 }, { "content": "pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Count {} => to_binary(&query_count(deps)?),\n\n QueryMsg::Sum {} => to_binary(&query_sum(deps)?),\n\n QueryMsg::Reducer {} => to_binary(&query_reducer(deps)?),\n\n QueryMsg::List {} => to_binary(&query_list(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/queue/src/contract.rs", "rank": 12, "score": 232543.25311447476 }, { "content": "pub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Owner {} => to_binary(&query_owner(deps)?),\n\n QueryMsg::Capitalized { text } => to_binary(&query_capitalized(deps, text)?),\n\n QueryMsg::Chain { request } => to_binary(&query_chain(deps, &request)?),\n\n QueryMsg::Raw { contract, key } => to_binary(&query_raw(deps, contract, key)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/src/contract.rs", "rank": 13, "score": 232543.25311447476 }, { "content": "#[entry_point]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::TokenInfo {} => to_binary(&query_token_info(deps)?),\n\n QueryMsg::Investment {} => to_binary(&query_investment(deps)?),\n\n QueryMsg::Balance { address } => to_binary(&query_balance(deps, address)?),\n\n QueryMsg::Claims { address } => to_binary(&query_claims(deps, address)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 14, "score": 232543.25311447476 }, { "content": "pub fn token_info(storage: &mut dyn Storage) -> Singleton<TokenInfoResponse> {\n\n singleton(storage, KEY_TOKEN_INFO)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 15, "score": 232323.65929407097 }, { "content": "/// Shortcut helper as the construction of WasmMsg::Instantiate can be quite verbose in contract code\n\npub fn wasm_instantiate<T>(\n\n code_id: u64,\n\n msg: &T,\n\n send: Vec<Coin>,\n\n label: Option<String>,\n\n) -> StdResult<WasmMsg>\n\nwhere\n\n T: Serialize,\n\n{\n\n let payload = to_binary(msg)?;\n\n Ok(WasmMsg::Instantiate {\n\n code_id,\n\n msg: payload,\n\n send,\n\n label,\n\n })\n\n}\n\n\n", "file_path": "packages/std/src/results/cosmos_msg.rs", "rank": 16, "score": 230713.6230709313 }, { "content": "/// Compiles a given Wasm bytecode into a module.\n\n/// The given memory limit (in bytes) is used when memories are created.\n\npub fn compile_and_use(code: &[u8], memory_limit: Option<Size>) -> VmResult<Module> {\n\n let store = make_compile_time_store(memory_limit);\n\n let module = Module::new(&store, code)?;\n\n Ok(module)\n\n}\n", "file_path": "packages/vm/src/wasm_backend/compile.rs", "rank": 17, "score": 230221.17867112433 }, { "content": "#[entry_point]\n\npub fn query(deps: Deps, _env: Env, msg: QueryMsg) -> StdResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Account { channel_id } => to_binary(&query_account(deps, channel_id)?),\n\n QueryMsg::ListAccounts {} => to_binary(&query_list_accounts(deps)?),\n\n }\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/src/contract.rs", "rank": 18, "score": 230179.7598445887 }, { "content": "/// Shortcut helper as the construction of WasmMsg::Instantiate can be quite verbose in contract code\n\npub fn wasm_execute<T, U>(contract_addr: T, msg: &U, send: Vec<Coin>) -> StdResult<WasmMsg>\n\nwhere\n\n T: Into<HumanAddr>,\n\n U: Serialize,\n\n{\n\n let payload = to_binary(msg)?;\n\n Ok(WasmMsg::Execute {\n\n contract_addr: contract_addr.into(),\n\n msg: payload,\n\n send,\n\n })\n\n}\n\n\n\nimpl<T: Clone + fmt::Debug + PartialEq + JsonSchema> From<BankMsg> for CosmosMsg<T> {\n\n fn from(msg: BankMsg) -> Self {\n\n CosmosMsg::Bank(msg)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"staking\")]\n", "file_path": "packages/std/src/results/cosmos_msg.rs", "rank": 19, "score": 229980.73091645184 }, { "content": "/// A prepared and sufficiently large memory Region is expected at ptr that points to pre-allocated memory.\n\n///\n\n/// Returns number of bytes written on success.\n\npub fn write_region(memory: &wasmer::Memory, ptr: u32, data: &[u8]) -> VmResult<()> {\n\n let mut region = get_region(memory, ptr)?;\n\n\n\n let region_capacity = region.capacity as usize;\n\n if data.len() > region_capacity {\n\n return Err(CommunicationError::region_too_small(region_capacity, data.len()).into());\n\n }\n\n match WasmPtr::<u8, Array>::new(region.offset).deref(memory, 0, region.capacity) {\n\n Some(cells) => {\n\n // In case you want to do some premature optimization, this shows how to cast a `&'mut [Cell<u8>]` to `&mut [u8]`:\n\n // https://github.com/wasmerio/wasmer/blob/0.13.1/lib/wasi/src/syscalls/mod.rs#L79-L81\n\n for i in 0..data.len() {\n\n cells[i].set(data[i])\n\n }\n\n region.length = data.len() as u32;\n\n set_region(memory, ptr, region)?;\n\n Ok(())\n\n },\n\n None => Err(CommunicationError::deref_err(region.offset, format!(\n\n \"Tried to access memory of region {:?} in wasm memory of size {} bytes. This typically happens when the given Region pointer does not point to a proper Region struct.\",\n\n region,\n\n memory.size().bytes().0\n\n )).into()),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/memory.rs", "rank": 20, "score": 223519.22272684413 }, { "content": "/// Created a store with no compiler and the given memory limit (in bytes)\n\n/// If memory_limit is None, no limit is applied.\n\npub fn make_runtime_store(memory_limit: Option<Size>) -> Store {\n\n let engine = JIT::headless().engine();\n\n make_store_with_engine(&engine, memory_limit)\n\n}\n\n\n", "file_path": "packages/vm/src/wasm_backend/store.rs", "rank": 21, "score": 218245.24870611748 }, { "content": "/// Created a store with the default compiler and the given memory limit (in bytes).\n\n/// If memory_limit is None, no limit is applied.\n\npub fn make_compile_time_store(memory_limit: Option<Size>) -> Store {\n\n let gas_limit = 0;\n\n let deterministic = Arc::new(Deterministic::new());\n\n let metering = Arc::new(Metering::new(gas_limit, cost));\n\n\n\n #[cfg(feature = \"cranelift\")]\n\n {\n\n let mut config = Cranelift::default();\n\n config.push_middleware(deterministic);\n\n config.push_middleware(metering);\n\n let engine = JIT::new(config).engine();\n\n make_store_with_engine(&engine, memory_limit)\n\n }\n\n\n\n #[cfg(not(feature = \"cranelift\"))]\n\n {\n\n let mut config = Singlepass::default();\n\n config.push_middleware(deterministic);\n\n config.push_middleware(metering);\n\n let engine = JIT::new(config).engine();\n\n make_store_with_engine(&engine, memory_limit)\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/wasm_backend/store.rs", "rank": 22, "score": 216484.60370434215 }, { "content": "/// Compiles a given Wasm bytecode into a module.\n\n/// The resulting module has no memory limit. This\n\n/// should only be used to compile for caching.\n\npub fn compile_only(code: &[u8]) -> VmResult<Module> {\n\n let store = make_compile_time_store(None);\n\n let module = Module::new(&store, code)?;\n\n Ok(module)\n\n}\n\n\n", "file_path": "packages/vm/src/wasm_backend/compile.rs", "rank": 23, "score": 213028.40530758232 }, { "content": "pub fn config_read(storage: &dyn Storage) -> ReadonlySingleton<State> {\n\n singleton_read(storage, CONFIG_KEY)\n\n}\n", "file_path": "contracts/reflect/src/state.rs", "rank": 24, "score": 212776.66087558464 }, { "content": "/// Creates a memory region of capacity `size` and length 0. Returns a pointer to the Region.\n\n/// This is the same as the `allocate` export, but designed to be called internally.\n\npub fn alloc(size: usize) -> *mut Region {\n\n let data: Vec<u8> = Vec::with_capacity(size);\n\n let data_ptr = data.as_ptr() as usize;\n\n\n\n let region = build_region_from_components(\n\n u32::try_from(data_ptr).expect(\"pointer doesn't fit in u32\"),\n\n u32::try_from(data.capacity()).expect(\"capacity doesn't fit in u32\"),\n\n 0,\n\n );\n\n mem::forget(data);\n\n Box::into_raw(region)\n\n}\n\n\n", "file_path": "packages/std/src/memory.rs", "rank": 25, "score": 211700.40200002398 }, { "content": "/// Expects a (fixed size) Region struct at ptr, which is read. This links to the\n\n/// memory region, which is copied in the second step.\n\n/// Errors if the length of the region exceeds `max_length`.\n\npub fn read_region(memory: &wasmer::Memory, ptr: u32, max_length: usize) -> VmResult<Vec<u8>> {\n\n let region = get_region(memory, ptr)?;\n\n\n\n if region.length > to_u32(max_length)? {\n\n return Err(\n\n CommunicationError::region_length_too_big(region.length as usize, max_length).into(),\n\n );\n\n }\n\n\n\n match WasmPtr::<u8, Array>::new(region.offset).deref(memory, 0, region.length) {\n\n Some(cells) => {\n\n // In case you want to do some premature optimization, this shows how to cast a `&'mut [Cell<u8>]` to `&mut [u8]`:\n\n // https://github.com/wasmerio/wasmer/blob/0.13.1/lib/wasi/src/syscalls/mod.rs#L79-L81\n\n let len = region.length as usize;\n\n let mut result = vec![0u8; len];\n\n for i in 0..len {\n\n result[i] = cells[i].get();\n\n }\n\n Ok(result)\n\n }\n\n None => Err(CommunicationError::deref_err(region.offset, format!(\n\n \"Tried to access memory of region {:?} in wasm memory of size {} bytes. This typically happens when the given Region pointer does not point to a proper Region struct.\",\n\n region,\n\n memory.size().bytes().0\n\n )).into()),\n\n }\n\n}\n\n\n\n/// maybe_read_region is like read_region, but gracefully handles null pointer (0) by returning None\n\n/// meant to be used where the argument is optional (like scan)\n", "file_path": "packages/vm/src/memory.rs", "rank": 26, "score": 210932.0887411186 }, { "content": "pub fn custom_query_execute(query: &SpecialQuery) -> ContractResult<Binary> {\n\n let msg = match query {\n\n SpecialQuery::Ping {} => \"pong\".to_string(),\n\n SpecialQuery::Capitalized { text } => text.to_uppercase(),\n\n };\n\n to_binary(&SpecialResponse { msg }).into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use cosmwasm_std::{from_binary, QuerierWrapper, QueryRequest};\n\n\n\n #[test]\n\n fn custom_query_execute_ping() {\n\n let res = custom_query_execute(&SpecialQuery::Ping {}).unwrap();\n\n let response: SpecialResponse = from_binary(&res).unwrap();\n\n assert_eq!(response.msg, \"pong\");\n\n }\n\n\n", "file_path": "contracts/reflect/src/testing.rs", "rank": 27, "score": 209664.74448282478 }, { "content": "/// Similar to alloc, but instead of creating a new vector it consumes an existing one and returns\n\n/// a pointer to the Region (preventing the memory from being freed until explicitly called later).\n\n///\n\n/// The resulting Region has capacity = length, i.e. the buffer's capacity is ignored.\n\npub fn release_buffer(buffer: Vec<u8>) -> *mut Region {\n\n let region = build_region(&buffer);\n\n mem::forget(buffer);\n\n Box::into_raw(region)\n\n}\n\n\n\n/// Return the data referenced by the Region and\n\n/// deallocates the Region (and the vector when finished).\n\n/// Warning: only use this when you are sure the caller will never use (or free) the Region later\n\n///\n\n/// # Safety\n\n///\n\n/// The ptr must refer to a valid Region, which was previously returned by alloc,\n\n/// and not yet deallocated. This call will deallocate the Region and return an owner vector\n\n/// to the caller containing the referenced data.\n\n///\n\n/// Naturally, calling this function twice on the same pointer will double deallocate data\n\n/// and lead to a crash. Make sure to call it exactly once (either consuming the input in\n\n/// the wasm code OR deallocating the buffer from the caller).\n\npub unsafe fn consume_region(ptr: *mut Region) -> Vec<u8> {\n", "file_path": "packages/std/src/memory.rs", "rank": 28, "score": 203337.65664831124 }, { "content": "pub fn mock_instance(\n\n wasm: &[u8],\n\n contract_balance: &[Coin],\n\n) -> Instance<MockApi, MockStorage, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n contract_balance: Some(contract_balance),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 29, "score": 196637.60320239305 }, { "content": "pub fn remove_schemas(schemas_dir: &path::Path) -> Result<(), io::Error> {\n\n let file_paths = fs::read_dir(schemas_dir)?\n\n .filter_map(Result::ok) // skip read errors on entries\n\n .map(|entry| entry.path())\n\n .filter(|path| is_regular_file(path).unwrap_or(false)) // skip directories and symlinks\n\n .filter(|path| !is_hidden(path)) // skip hidden\n\n .filter(|path| is_json(path)) // skip non JSON\n\n ;\n\n\n\n for file_path in file_paths {\n\n println!(\"Removing {:?} …\", file_path);\n\n fs::remove_file(file_path)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::ffi::OsStr;\n", "file_path": "packages/schema/src/remove.rs", "rank": 30, "score": 195406.32140714923 }, { "content": "/// A drop-in replacement for cosmwasm_std::testing::mock_dependencies\n\n/// this uses our CustomQuerier.\n\npub fn mock_dependencies_with_custom_querier(\n\n contract_balance: &[Coin],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier<SpecialQuery>> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n let custom_querier: MockQuerier<SpecialQuery> =\n\n MockQuerier::new(&[(&contract_addr, contract_balance)])\n\n .with_custom_handler(|query| SystemResult::Ok(custom_query_execute(&query)));\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: custom_querier,\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/src/testing.rs", "rank": 31, "score": 194120.56908684404 }, { "content": "/// A drop-in replacement for cosmwasm_vm::testing::mock_dependencies\n\n/// that supports SpecialQuery.\n\npub fn mock_dependencies_with_custom_querier(\n\n contract_balance: &[Coin],\n\n) -> Backend<MockApi, MockStorage, MockQuerier<SpecialQuery>> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n let custom_querier: MockQuerier<SpecialQuery> =\n\n MockQuerier::new(&[(&contract_addr, contract_balance)])\n\n .with_custom_handler(|query| SystemResult::Ok(custom_query_execute(query)));\n\n\n\n Backend {\n\n api: MockApi::default(),\n\n storage: MockStorage::default(),\n\n querier: custom_querier,\n\n }\n\n}\n\n\n", "file_path": "contracts/reflect/tests/integration.rs", "rank": 32, "score": 194115.72406796776 }, { "content": "pub fn mock_instance_with_balances(\n\n wasm: &[u8],\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Instance<MockApi, MockStorage, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n balances,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 33, "score": 194111.01151575308 }, { "content": "/// Initializes the querier along with the mock_dependencies.\n\n/// Sets all balances provided (yoy must explicitly set contract balance if desired)\n\npub fn mock_backend_with_balances(\n\n balances: &[(&HumanAddr, &[Coin])],\n\n) -> Backend<MockApi, MockStorage, MockQuerier> {\n\n Backend {\n\n api: MockApi::default(),\n\n storage: MockStorage::default(),\n\n querier: MockQuerier::new(balances),\n\n }\n\n}\n\n\n\n/// Zero-pads all human addresses to make them fit the canonical_length and\n\n/// trims off zeros for the reverse operation.\n\n/// This is not really smart, but allows us to see a difference (and consistent length for canonical adddresses).\n\n#[derive(Copy, Clone)]\n\npub struct MockApi {\n\n /// Length of canonical addresses created with this API. Contracts should not make any assumtions\n\n /// what this value is.\n\n pub canonical_length: usize,\n\n /// When set, all calls to the API fail with BackendError::Unknown containing this message\n\n backend_error: Option<&'static str>,\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 34, "score": 194111.0115157531 }, { "content": "pub fn mock_instance_with_options(\n\n wasm: &[u8],\n\n options: MockInstanceOptions,\n\n) -> Instance<MockApi, MockStorage, MockQuerier> {\n\n check_wasm(wasm, &options.supported_features).unwrap();\n\n let contract_address = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n\n\n // merge balances\n\n let mut balances = options.balances.to_vec();\n\n if let Some(contract_balance) = options.contract_balance {\n\n // Remove old entry if exists\n\n if let Some(pos) = balances.iter().position(|item| *item.0 == contract_address) {\n\n balances.remove(pos);\n\n }\n\n balances.push((&contract_address, contract_balance));\n\n }\n\n\n\n let api = if let Some(backend_error) = options.backend_error {\n\n MockApi::new_failing(backend_error)\n\n } else {\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 35, "score": 194111.01151575308 }, { "content": "pub fn mock_instance_with_failing_api(\n\n wasm: &[u8],\n\n contract_balance: &[Coin],\n\n backend_error: &'static str,\n\n) -> Instance<MockApi, MockStorage, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n contract_balance: Some(contract_balance),\n\n backend_error: Some(backend_error),\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 36, "score": 191681.13781187386 }, { "content": "pub fn mock_instance_with_gas_limit(\n\n wasm: &[u8],\n\n gas_limit: u64,\n\n) -> Instance<MockApi, MockStorage, MockQuerier> {\n\n mock_instance_with_options(\n\n wasm,\n\n MockInstanceOptions {\n\n gas_limit,\n\n ..Default::default()\n\n },\n\n )\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct MockInstanceOptions<'a> {\n\n // dependencies\n\n pub balances: &'a [(&'a HumanAddr, &'a [Coin])],\n\n /// This option is merged into balances and might override an existing value\n\n pub contract_balance: Option<&'a [Coin]>,\n\n /// When set, all calls to the API fail with BackendError::Unknown containing this message\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 37, "score": 191681.13781187386 }, { "content": "#[proc_macro_attribute]\n\npub fn entry_point(_attr: TokenStream, mut item: TokenStream) -> TokenStream {\n\n let cloned = item.clone();\n\n let function = parse_macro_input!(cloned as syn::ItemFn);\n\n let name = function.sig.ident.to_string();\n\n // The first argument is `deps`, the rest is region pointers\n\n let args = function.sig.inputs.len() - 1;\n\n\n\n // E.g. \"ptr0: u32, ptr1: u32, ptr2: u32, \"\n\n let typed_ptrs = (0..args).fold(String::new(), |acc, i| format!(\"{}ptr{}: u32, \", acc, i));\n\n // E.g. \"ptr0, ptr1, ptr2, \"\n\n let ptrs = (0..args).fold(String::new(), |acc, i| format!(\"{}ptr{}, \", acc, i));\n\n\n\n let new_code = format!(\n\n r##\"\n\n #[cfg(target_arch = \"wasm32\")]\n\n mod __wasm_export_{name} {{ // new module to avoid conflict of function name\n\n #[no_mangle]\n\n extern \"C\" fn {name}({typed_ptrs}) -> u32 {{\n\n cosmwasm_std::do_{name}(&super::{name}, {ptrs})\n\n }}\n", "file_path": "packages/derive/src/lib.rs", "rank": 38, "score": 191608.8122899543 }, { "content": "/// reinvest will withdraw all pending rewards,\n\n/// then issue a callback to itself via _bond_all_tokens\n\n/// to reinvest the new earnings (and anything else that accumulated)\n\npub fn reinvest(deps: DepsMut, env: Env, _info: MessageInfo) -> StdResult<HandleResponse> {\n\n let contract_addr = env.contract.address;\n\n let invest = invest_info_read(deps.storage).load()?;\n\n let msg = to_binary(&HandleMsg::_BondAllTokens {})?;\n\n\n\n // and bond them to the validator\n\n let res = HandleResponse {\n\n messages: vec![\n\n StakingMsg::Withdraw {\n\n validator: invest.validator,\n\n recipient: Some(contract_addr.clone()),\n\n }\n\n .into(),\n\n WasmMsg::Execute {\n\n contract_addr,\n\n msg,\n\n send: vec![],\n\n }\n\n .into(),\n\n ],\n\n attributes: vec![],\n\n data: None,\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 39, "score": 191424.47618581256 }, { "content": "pub fn bond(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<HandleResponse> {\n\n let sender_raw = deps.api.canonical_address(&info.sender)?;\n\n\n\n // ensure we have the proper denom\n\n let invest = invest_info_read(deps.storage).load()?;\n\n // payment finds the proper coin (or throws an error)\n\n let payment = info\n\n .sent_funds\n\n .iter()\n\n .find(|x| x.denom == invest.bond_denom)\n\n .ok_or_else(|| StdError::generic_err(format!(\"No {} tokens sent\", &invest.bond_denom)))?;\n\n\n\n // bonded is the total number of tokens we have delegated from this address\n\n let bonded = get_bonded(&deps.querier, &env.contract.address)?;\n\n\n\n // calculate to_mint and update total supply\n\n let mut totals = total_supply(deps.storage);\n\n let mut supply = totals.load()?;\n\n // TODO: this is just temporary check - we should use dynamic query or have a way to recover\n\n assert_bonds(&supply, bonded)?;\n", "file_path": "contracts/staking/src/contract.rs", "rank": 40, "score": 191419.7072864038 }, { "content": "pub fn claim(deps: DepsMut, env: Env, info: MessageInfo) -> StdResult<HandleResponse> {\n\n // find how many tokens the contract has\n\n let invest = invest_info_read(deps.storage).load()?;\n\n let mut balance = deps\n\n .querier\n\n .query_balance(&env.contract.address, &invest.bond_denom)?;\n\n if balance.amount < invest.min_withdrawal {\n\n return Err(StdError::generic_err(\n\n \"Insufficient balance in contract to process claim\",\n\n ));\n\n }\n\n\n\n // check how much to send - min(balance, claims[sender]), and reduce the claim\n\n let sender_raw = deps.api.canonical_address(&info.sender)?;\n\n let mut to_send = balance.amount;\n\n claims(deps.storage).update(sender_raw.as_slice(), |claim| {\n\n let claim = claim.ok_or_else(|| StdError::generic_err(\"no claim for this address\"))?;\n\n to_send = to_send.min(claim);\n\n claim - to_send\n\n })?;\n", "file_path": "contracts/staking/src/contract.rs", "rank": 41, "score": 191419.70728640383 }, { "content": "/// enforces ordering and versioing constraints\n\npub fn ibc_channel_open(_deps: DepsMut, _env: Env, channel: IbcChannel) -> StdResult<()> {\n\n if channel.order != IbcOrder::Ordered {\n\n return Err(StdError::generic_err(\"Only supports ordered channels\"));\n\n }\n\n if channel.version.as_str() != IBC_VERSION {\n\n return Err(StdError::generic_err(format!(\n\n \"Must set version to `{}`\",\n\n IBC_VERSION\n\n )));\n\n }\n\n // TODO: do we need to check counterparty version as well?\n\n // This flow needs to be well documented\n\n if let Some(counter_version) = channel.counterparty_version {\n\n if counter_version.as_str() != IBC_VERSION {\n\n return Err(StdError::generic_err(format!(\n\n \"Counterparty version must be `{}`\",\n\n IBC_VERSION\n\n )));\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[entry_point]\n", "file_path": "contracts/ibc-reflect/src/contract.rs", "rank": 42, "score": 190912.13064754586 }, { "content": "/// Returns a default enviroment with height, time, chain_id, and contract address\n\n/// You can submit as is to most contracts, or modify height/time if you want to\n\n/// test for expiration.\n\n///\n\n/// This is intended for use in test code only.\n\npub fn mock_env() -> Env {\n\n Env {\n\n block: BlockInfo {\n\n height: 12_345,\n\n time: 1_571_797_419,\n\n time_nanos: 879305533,\n\n chain_id: \"cosmos-testnet-14002\".to_string(),\n\n },\n\n contract: ContractInfo {\n\n address: HumanAddr::from(MOCK_CONTRACT_ADDR),\n\n },\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 43, "score": 190592.93060549724 }, { "content": "/// Implementation for check_wasm, based on static analysis of the bytecode.\n\n/// This is used for code upload, to perform check before compiling the Wasm.\n\npub fn required_features_from_module(module: &Module) -> HashSet<String> {\n\n match module.export_section() {\n\n None => HashSet::new(),\n\n Some(export_section) => export_section\n\n .entries()\n\n .iter()\n\n .filter_map(|entry| {\n\n if let Internal::Function(_) = entry.internal() {\n\n let name = entry.field();\n\n if name.starts_with(REQUIRES_PREFIX) && name.len() > REQUIRES_PREFIX.len() {\n\n let (_, required_feature) = name.split_at(REQUIRES_PREFIX.len());\n\n return Some(required_feature.to_string());\n\n }\n\n }\n\n None\n\n })\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/features.rs", "rank": 44, "score": 189918.48482454862 }, { "content": "// use this if you want to override the auto-detected name of the object.\n\n// very useful when creating an alias for a type-alias.\n\npub fn export_schema_with_title(schema: &mut RootSchema, out_dir: &PathBuf, title: &str) {\n\n // set the title explicitly on the schemas metadata\n\n let metadata = &mut schema.schema.metadata;\n\n if let Some(data) = metadata {\n\n data.title = Some(title.to_string());\n\n }\n\n write_schema(schema, out_dir, &title);\n\n}\n\n\n", "file_path": "packages/schema/src/export.rs", "rank": 45, "score": 186553.55358821095 }, { "content": "pub fn required_features_from_wasmer_instance(wasmer_instance: &WasmerInstance) -> HashSet<String> {\n\n let module = wasmer_instance.module();\n\n module\n\n .exports()\n\n .filter_map(|export| {\n\n if let ExternType::Function { .. } = export.ty() {\n\n let name = export.name();\n\n if name.starts_with(REQUIRES_PREFIX) && name.len() > REQUIRES_PREFIX.len() {\n\n let required_feature = name.to_string().split_off(REQUIRES_PREFIX.len());\n\n return Some(required_feature);\n\n }\n\n }\n\n None\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "packages/vm/src/features.rs", "rank": 46, "score": 184601.18400248466 }, { "content": "/// An alias of PrefixedStorage::new for less verbose usage\n\npub fn prefixed<'a>(storage: &'a mut dyn Storage, namespace: &[u8]) -> PrefixedStorage<'a> {\n\n PrefixedStorage::new(storage, namespace)\n\n}\n\n\n", "file_path": "packages/storage/src/prefixed_storage.rs", "rank": 47, "score": 182241.69483824787 }, { "content": "/// Sequence creates a custom Singleton to hold an empty sequence\n\npub fn sequence<'a>(storage: &'a mut dyn Storage, key: &[u8]) -> Singleton<'a, u64> {\n\n Singleton::new(storage, key)\n\n}\n\n\n", "file_path": "packages/storage/src/sequence.rs", "rank": 48, "score": 181920.84409968898 }, { "content": "// query mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn query<A, S, Q, M>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n msg: M,\n\n) -> ContractResult<QueryResponse>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_query(instance, &env, &serialized_msg).expect(\"VM error\")\n\n}\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 49, "score": 179504.69039037856 }, { "content": "/// An alias of Singleton::new for less verbose usage\n\npub fn singleton<'a, T>(storage: &'a mut dyn Storage, key: &[u8]) -> Singleton<'a, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n Singleton::new(storage, key)\n\n}\n\n\n", "file_path": "packages/storage/src/singleton.rs", "rank": 50, "score": 177707.62620416176 }, { "content": "/// An alias of Bucket::new for less verbose usage\n\npub fn bucket<'a, T>(storage: &'a mut dyn Storage, namespace: &[u8]) -> Bucket<'a, T>\n\nwhere\n\n T: Serialize + DeserializeOwned,\n\n{\n\n Bucket::new(storage, namespace)\n\n}\n\n\n", "file_path": "packages/storage/src/bucket.rs", "rank": 51, "score": 177707.62620416176 }, { "content": "// ibc_channel_open mimicks the call signature of the smart contracts.\n\n// thus it moves env and channel rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn ibc_channel_open<A, S, Q>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n channel: IbcChannel,\n\n) -> ContractResult<()>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n{\n\n call_ibc_channel_open(instance, &env, &channel).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/ibc_calls.rs", "rank": 52, "score": 177521.87924987107 }, { "content": "/// Creates InstanceOptions for testing\n\npub fn mock_instance_options() -> (InstanceOptions, Option<Size>) {\n\n (\n\n InstanceOptions {\n\n gas_limit: DEFAULT_GAS_LIMIT,\n\n print_debug: DEFAULT_PRINT_DEBUG,\n\n },\n\n DEFAULT_MEMORY_LIMIT,\n\n )\n\n}\n\n\n", "file_path": "packages/vm/src/testing/instance.rs", "rank": 53, "score": 175346.87422588991 }, { "content": "// init mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn init<A, S, Q, M, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: M,\n\n) -> ContractResult<InitResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_init(instance, &env, &info, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 54, "score": 174779.97350197696 }, { "content": "// migrate mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn migrate<A, S, Q, M, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: M,\n\n) -> ContractResult<MigrateResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_migrate(instance, &env, &info, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 55, "score": 174779.97350197696 }, { "content": "// handle mimicks the call signature of the smart contracts.\n\n// thus it moves env and msg rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn handle<A, S, Q, M, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: M,\n\n) -> ContractResult<HandleResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n M: Serialize + JsonSchema,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n let serialized_msg = to_vec(&msg).expect(\"Testing error: Could not seralize request message\");\n\n call_handle(instance, &env, &info, &serialized_msg).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/calls.rs", "rank": 56, "score": 174779.97350197696 }, { "content": "fn do_storage_loop(deps: DepsMut) -> Result<HandleResponse, HackError> {\n\n let mut test_case = 0u64;\n\n loop {\n\n deps.storage\n\n .set(b\"test.key\", test_case.to_string().as_bytes());\n\n test_case += 1;\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 57, "score": 174204.81117496284 }, { "content": "pub fn query_investment(deps: Deps) -> StdResult<InvestmentResponse> {\n\n let invest = invest_info_read(deps.storage).load()?;\n\n let supply = total_supply_read(deps.storage).load()?;\n\n\n\n let res = InvestmentResponse {\n\n owner: deps.api.human_address(&invest.owner)?,\n\n exit_tax: invest.exit_tax,\n\n validator: invest.validator,\n\n min_withdrawal: invest.min_withdrawal,\n\n token_supply: supply.issued,\n\n staked_tokens: coin(supply.bonded.u128(), &invest.bond_denom),\n\n nominal_value: if supply.issued.is_zero() {\n\n FALLBACK_RATIO\n\n } else {\n\n Decimal::from_ratio(supply.bonded, supply.issued)\n\n },\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 58, "score": 172885.15058219616 }, { "content": "// ibc_packet_timeout mimicks the call signature of the smart contracts.\n\n// thus it moves env and packet rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn ibc_packet_timeout<A, S, Q, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n packet: IbcPacket,\n\n) -> ContractResult<IbcBasicResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n call_ibc_packet_timeout(instance, &env, &packet).expect(\"VM error\")\n\n}\n", "file_path": "packages/vm/src/testing/ibc_calls.rs", "rank": 59, "score": 172738.0706536814 }, { "content": "// ibc_packet_receive mimicks the call signature of the smart contracts.\n\n// thus it moves env and packet rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn ibc_packet_receive<A, S, Q, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n packet: IbcPacket,\n\n) -> ContractResult<IbcReceiveResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n call_ibc_packet_receive(instance, &env, &packet).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/ibc_calls.rs", "rank": 60, "score": 172738.0706536814 }, { "content": "// ibc_channel_connect mimicks the call signature of the smart contracts.\n\n// thus it moves env and channel rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn ibc_channel_connect<A, S, Q, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n channel: IbcChannel,\n\n) -> ContractResult<IbcBasicResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n call_ibc_channel_connect(instance, &env, &channel).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/ibc_calls.rs", "rank": 61, "score": 172738.0706536814 }, { "content": "// ibc_channel_close mimicks the call signature of the smart contracts.\n\n// thus it moves env and channel rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn ibc_channel_close<A, S, Q, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n channel: IbcChannel,\n\n) -> ContractResult<IbcBasicResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n call_ibc_channel_close(instance, &env, &channel).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/ibc_calls.rs", "rank": 62, "score": 172738.0706536814 }, { "content": "// ibc_packet_ack mimicks the call signature of the smart contracts.\n\n// thus it moves env and acknowledgement rather than take them as reference.\n\n// this is inefficient here, but only used in test code\n\npub fn ibc_packet_ack<A, S, Q, U>(\n\n instance: &mut Instance<A, S, Q>,\n\n env: Env,\n\n ack: IbcAcknowledgement,\n\n) -> ContractResult<IbcBasicResponse<U>>\n\nwhere\n\n A: Api + 'static,\n\n S: Storage + 'static,\n\n Q: Querier + 'static,\n\n U: DeserializeOwned + Clone + PartialEq + JsonSchema + fmt::Debug,\n\n{\n\n call_ibc_packet_ack(instance, &env, &ack).expect(\"VM error\")\n\n}\n\n\n", "file_path": "packages/vm/src/testing/ibc_calls.rs", "rank": 63, "score": 172738.0706536814 }, { "content": "/// currval returns the last value returned by nextval. If the sequence has never been used,\n\n/// then it will return 0.\n\npub fn currval(seq: &Singleton<u64>) -> StdResult<u64> {\n\n Ok(seq.may_load()?.unwrap_or_default())\n\n}\n\n\n", "file_path": "packages/storage/src/sequence.rs", "rank": 64, "score": 172677.5765647717 }, { "content": "pub fn to_binary<T>(data: &T) -> StdResult<Binary>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n to_vec(data).map(Binary)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde::Deserialize;\n\n\n\n #[derive(Serialize, Deserialize, Debug, PartialEq)]\n\n #[serde(rename_all = \"snake_case\")]\n\n enum SomeMsg {\n\n Refund {},\n\n ReleaseAll {\n\n image: String,\n\n amount: u32,\n\n time: u64,\n", "file_path": "packages/std/src/serde.rs", "rank": 65, "score": 172672.761273735 }, { "content": "fn enqueue(storage: &mut dyn Storage, value: i32) -> StdResult<()> {\n\n // find the last element in the queue and extract key\n\n let last_item = storage.range(None, None, Order::Descending).next();\n\n\n\n let new_key = match last_item {\n\n None => FIRST_KEY,\n\n Some((key, _)) => {\n\n key[0] + 1 // all keys are one byte\n\n }\n\n };\n\n let new_value = to_vec(&Item { value })?;\n\n\n\n storage.set(&[new_key], &new_value);\n\n Ok(())\n\n}\n\n\n", "file_path": "contracts/queue/src/contract.rs", "rank": 66, "score": 171353.0833499421 }, { "content": "pub fn balances_read(storage: &dyn Storage) -> ReadonlyBucket<Uint128> {\n\n bucket_read(storage, PREFIX_BALANCE)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 67, "score": 170921.40229375986 }, { "content": "pub fn claims_read(storage: &dyn Storage) -> ReadonlyBucket<Uint128> {\n\n bucket_read(storage, PREFIX_CLAIMS)\n\n}\n\n\n\n/// Investment info is fixed at initialization, and is used to control the function of the contract\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct InvestmentInfo {\n\n /// owner created the contract and takes a cut\n\n pub owner: CanonicalAddr,\n\n /// this is the denomination we can stake (and only one we accept for payments)\n\n pub bond_denom: String,\n\n /// this is how much the owner takes as a cut when someone unbonds\n\n pub exit_tax: Decimal,\n\n /// All tokens are bonded to this validator\n\n /// FIXME: humanize/canonicalize address doesn't work for validator addrresses\n\n pub validator: HumanAddr,\n\n /// This is the minimum amount we will pull out to reinvest, as well as a minumum\n\n /// that can be unbonded (to avoid needless staking tx)\n\n pub min_withdrawal: Uint128,\n\n}\n", "file_path": "contracts/staking/src/state.rs", "rank": 68, "score": 170921.40229375986 }, { "content": "/// Reads in a Region at ptr in wasm memory and returns a copy of it\n\nfn get_region(memory: &wasmer::Memory, ptr: u32) -> CommunicationResult<Region> {\n\n let wptr = WasmPtr::<Region>::new(ptr);\n\n match wptr.deref(memory) {\n\n Some(cell) => {\n\n let region = cell.get();\n\n validate_region(&region)?;\n\n Ok(region)\n\n }\n\n None => Err(CommunicationError::deref_err(\n\n ptr,\n\n \"Could not dereference this pointer to a Region\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/memory.rs", "rank": 69, "score": 169587.54733647735 }, { "content": "pub fn config_read(storage: &dyn Storage) -> ReadonlySingleton<Config> {\n\n singleton_read(storage, KEY_CONFIG)\n\n}\n", "file_path": "contracts/ibc-reflect/src/state.rs", "rank": 70, "score": 168817.4631626747 }, { "content": "pub fn total_supply_read(storage: &dyn Storage) -> ReadonlySingleton<Supply> {\n\n singleton_read(storage, KEY_TOTAL_SUPPLY)\n\n}\n", "file_path": "contracts/staking/src/state.rs", "rank": 71, "score": 168817.4631626747 }, { "content": "pub fn query_token_info(deps: Deps) -> StdResult<TokenInfoResponse> {\n\n token_info_read(deps.storage).load()\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 72, "score": 168765.53271792486 }, { "content": "pub fn from_slice<'a, T>(value: &'a [u8]) -> VmResult<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n serde_json::from_slice(value).map_err(|e| VmError::parse_err(type_name::<T>(), e))\n\n}\n\n\n", "file_path": "packages/vm/src/serde.rs", "rank": 73, "score": 168472.3648680687 }, { "content": "pub fn to_vec<T>(data: &T) -> StdResult<Vec<u8>>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n serde_json_wasm::to_vec(data).map_err(|e| StdError::serialize_err(type_name::<T>(), e))\n\n}\n\n\n", "file_path": "packages/std/src/serde.rs", "rank": 74, "score": 168311.4658133577 }, { "content": "pub fn to_vec<T>(data: &T) -> VmResult<Vec<u8>>\n\nwhere\n\n T: Serialize + ?Sized,\n\n{\n\n serde_json::to_vec(data).map_err(|e| VmError::serialize_err(type_name::<T>(), e))\n\n}\n", "file_path": "packages/vm/src/serde.rs", "rank": 75, "score": 168311.4658133577 }, { "content": "pub fn query_list_accounts(deps: Deps) -> StdResult<ListAccountsResponse> {\n\n let accounts: StdResult<Vec<_>> = accounts_read(deps.storage)\n\n .range(None, None, Order::Ascending)\n\n .map(|r| {\n\n let (k, account) = r?;\n\n Ok(AccountInfo {\n\n account,\n\n channel_id: String::from_utf8(k)?,\n\n })\n\n })\n\n .collect();\n\n Ok(ListAccountsResponse {\n\n accounts: accounts?,\n\n })\n\n}\n\n\n\n#[entry_point]\n", "file_path": "contracts/ibc-reflect/src/contract.rs", "rank": 76, "score": 166811.81460331922 }, { "content": "pub fn invest_info_read(storage: &dyn Storage) -> ReadonlySingleton<InvestmentInfo> {\n\n singleton_read(storage, KEY_INVESTMENT)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 77, "score": 166787.0284855591 }, { "content": "pub fn accounts_read(storage: &dyn Storage) -> ReadonlyBucket<HumanAddr> {\n\n bucket_read(storage, PREFIX_ACCOUNTS)\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/src/state.rs", "rank": 78, "score": 166787.0284855591 }, { "content": "pub fn from_slice<T: DeserializeOwned>(value: &[u8]) -> StdResult<T> {\n\n serde_json_wasm::from_slice(value).map_err(|e| StdError::parse_err(type_name::<T>(), e))\n\n}\n\n\n", "file_path": "packages/std/src/serde.rs", "rank": 79, "score": 166215.03582616945 }, { "content": "pub fn from_binary<T: DeserializeOwned>(value: &Binary) -> StdResult<T> {\n\n from_slice(value.as_slice())\n\n}\n\n\n", "file_path": "packages/std/src/serde.rs", "rank": 80, "score": 166215.03582616945 }, { "content": "/// Overrides a Region at ptr in wasm memory with data\n\nfn set_region(memory: &wasmer::Memory, ptr: u32, data: Region) -> CommunicationResult<()> {\n\n let wptr = WasmPtr::<Region>::new(ptr);\n\n\n\n match wptr.deref(memory) {\n\n Some(cell) => {\n\n cell.set(data);\n\n Ok(())\n\n }\n\n None => Err(CommunicationError::deref_err(\n\n ptr,\n\n \"Could not dereference this pointer to a Region\",\n\n )),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "packages/vm/src/memory.rs", "rank": 81, "score": 165435.01534682844 }, { "content": "fn cost(_operator: &Operator) -> u64 {\n\n // A flat fee in order to maintain roughly the same pricing as with Wasmer 0.17\n\n // (https://github.com/wasmerio/wasmer/blob/0.17.1/lib/middleware-common/src/metering.rs#L43-L113).\n\n // This should become more advanced soon: https://github.com/CosmWasm/cosmwasm/issues/670\n\n 1\n\n}\n\n\n", "file_path": "packages/vm/src/wasm_backend/store.rs", "rank": 82, "score": 165396.24235340892 }, { "content": "fn get_count(deps: &mut Instance<MockApi, MockStorage, MockQuerier>) -> u32 {\n\n let data = query(deps, mock_env(), QueryMsg::Count {}).unwrap();\n\n let res: CountResponse = from_binary(&data).unwrap();\n\n res.count\n\n}\n\n\n", "file_path": "contracts/queue/tests/integration.rs", "rank": 83, "score": 165095.93282606013 }, { "content": "fn get_sum(deps: &mut Instance<MockApi, MockStorage, MockQuerier>) -> i32 {\n\n let data = query(deps, mock_env(), QueryMsg::Sum {}).unwrap();\n\n let res: SumResponse = from_binary(&data).unwrap();\n\n res.sum\n\n}\n\n\n", "file_path": "contracts/queue/tests/integration.rs", "rank": 84, "score": 165095.93282606013 }, { "content": "pub fn token_info_read(storage: &dyn Storage) -> ReadonlySingleton<TokenInfoResponse> {\n\n singleton_read(storage, KEY_TOKEN_INFO)\n\n}\n\n\n", "file_path": "contracts/staking/src/state.rs", "rank": 85, "score": 164826.31240271882 }, { "content": "fn make_init_msg() -> (InitMsg, HumanAddr) {\n\n let verifier = HumanAddr::from(\"verifies\");\n\n let beneficiary = HumanAddr::from(\"benefits\");\n\n let creator = HumanAddr::from(\"creator\");\n\n (\n\n InitMsg {\n\n verifier: verifier.clone(),\n\n beneficiary: beneficiary.clone(),\n\n },\n\n creator,\n\n )\n\n}\n\n\n", "file_path": "contracts/hackatom/tests/integration.rs", "rank": 86, "score": 163846.4635294639 }, { "content": "fn check_wasm_memories(module: &Module) -> VmResult<()> {\n\n let section = match module.memory_section() {\n\n Some(section) => section,\n\n None => {\n\n return Err(VmError::static_validation_err(\n\n \"Wasm contract doesn't have a memory section\",\n\n ));\n\n }\n\n };\n\n\n\n let memories = section.entries();\n\n if memories.len() != 1 {\n\n return Err(VmError::static_validation_err(\n\n \"Wasm contract must contain exactly one memory\",\n\n ));\n\n }\n\n\n\n let memory = memories[0];\n\n // println!(\"Memory: {:?}\", memory);\n\n let limits = memory.limits();\n", "file_path": "packages/vm/src/compatibility.rs", "rank": 87, "score": 162565.8881448442 }, { "content": "fn check_wasm_exports(module: &Module) -> VmResult<()> {\n\n let available_exports: Vec<String> = module.export_section().map_or(vec![], |export_section| {\n\n export_section\n\n .entries()\n\n .iter()\n\n .map(|entry| entry.field().to_string())\n\n .collect()\n\n });\n\n\n\n for required_export in REQUIRED_EXPORTS {\n\n if !available_exports.iter().any(|x| x == required_export) {\n\n return Err(VmError::static_validation_err(format!(\n\n \"Wasm contract doesn't have required export: \\\"{}\\\". Exports required by VM: {:?}. Contract version too old for this VM?\",\n\n required_export, REQUIRED_EXPORTS\n\n )));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/compatibility.rs", "rank": 88, "score": 162565.8881448442 }, { "content": "pub fn query_claims(deps: Deps, address: HumanAddr) -> StdResult<ClaimsResponse> {\n\n let address_raw = deps.api.canonical_address(&address)?;\n\n let claims = claims_read(deps.storage)\n\n .may_load(address_raw.as_slice())?\n\n .unwrap_or_default();\n\n Ok(ClaimsResponse { claims })\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 89, "score": 162238.12983448076 }, { "content": "pub fn query_balance(deps: Deps, address: HumanAddr) -> StdResult<BalanceResponse> {\n\n let address_raw = deps.api.canonical_address(&address)?;\n\n let balance = balances_read(deps.storage)\n\n .may_load(address_raw.as_slice())?\n\n .unwrap_or_default();\n\n Ok(BalanceResponse { balance })\n\n}\n\n\n", "file_path": "contracts/staking/src/contract.rs", "rank": 90, "score": 162238.12983448076 }, { "content": "#[test]\n\nfn ibc_entry_points_are_detected() {\n\n let deps = mock_instance(WASM, &[]);\n\n assert_eq!(deps.has_ibc_entry_points(), true);\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/tests/integration.rs", "rank": 91, "score": 161317.68835296083 }, { "content": "/// Checks if the data is valid wasm and compatibility with the CosmWasm API (imports and exports)\n\npub fn check_wasm(wasm_code: &[u8], supported_features: &HashSet<String>) -> VmResult<()> {\n\n let module = deserialize(wasm_code)?;\n\n check_wasm_memories(&module)?;\n\n check_wasm_exports(&module)?;\n\n check_wasm_imports(&module, SUPPORTED_IMPORTS)?;\n\n check_wasm_features(&module, supported_features)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "packages/vm/src/compatibility.rs", "rank": 92, "score": 160350.36382270342 }, { "content": "pub fn query_account(deps: Deps, channel_id: String) -> StdResult<AccountResponse> {\n\n let account = accounts_read(deps.storage).load(channel_id.as_bytes())?;\n\n Ok(AccountResponse {\n\n account: Some(account),\n\n })\n\n}\n\n\n", "file_path": "contracts/ibc-reflect/src/contract.rs", "rank": 93, "score": 160350.36382270342 }, { "content": "/// All external requirements that can be injected for unit tests.\n\n/// It sets the given balance for the contract itself, nothing else\n\npub fn mock_backend(contract_balance: &[Coin]) -> Backend<MockApi, MockStorage, MockQuerier> {\n\n let contract_addr = HumanAddr::from(MOCK_CONTRACT_ADDR);\n\n Backend {\n\n api: MockApi::default(),\n\n storage: MockStorage::default(),\n\n querier: MockQuerier::new(&[(&contract_addr, contract_balance)]),\n\n }\n\n}\n\n\n", "file_path": "packages/vm/src/testing/mock.rs", "rank": 94, "score": 158386.74442027486 }, { "content": "fn do_release(deps: DepsMut, env: Env, info: MessageInfo) -> Result<HandleResponse, HackError> {\n\n let data = deps\n\n .storage\n\n .get(CONFIG_KEY)\n\n .ok_or_else(|| StdError::not_found(\"State\"))?;\n\n let state: State = from_slice(&data)?;\n\n\n\n if deps.api.canonical_address(&info.sender)? == state.verifier {\n\n let to_addr = deps.api.human_address(&state.beneficiary)?;\n\n let balance = deps.querier.query_all_balances(&env.contract.address)?;\n\n\n\n let mut ctx = Context::new();\n\n ctx.add_attribute(\"action\", \"release\");\n\n ctx.add_attribute(\"destination\", &to_addr);\n\n ctx.add_message(BankMsg::Send {\n\n to_address: to_addr,\n\n amount: balance,\n\n });\n\n ctx.set_data(&[0xF0, 0x0B, 0xAA]);\n\n Ok(ctx.into())\n\n } else {\n\n Err(HackError::Unauthorized {})\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 95, "score": 157604.9392705633 }, { "content": "pub fn migrate(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n msg: MigrateMsg,\n\n) -> Result<MigrateResponse, HackError> {\n\n let data = deps\n\n .storage\n\n .get(CONFIG_KEY)\n\n .ok_or_else(|| StdError::not_found(\"State\"))?;\n\n let mut config: State = from_slice(&data)?;\n\n config.verifier = deps.api.canonical_address(&msg.verifier)?;\n\n deps.storage.set(CONFIG_KEY, &to_vec(&config)?);\n\n\n\n Ok(MigrateResponse::default())\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 96, "score": 156682.74940229772 }, { "content": "#[entry_point]\n\npub fn init(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n _msg: InitMsg,\n\n) -> StdResult<InitResponse> {\n\n Err(StdError::generic_err(\n\n \"You can only use this contract for migrations\",\n\n ))\n\n}\n\n\n", "file_path": "contracts/burner/src/contract.rs", "rank": 97, "score": 156682.74940229772 }, { "content": "pub fn handle(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: HandleMsg,\n\n) -> Result<HandleResponse, HackError> {\n\n match msg {\n\n HandleMsg::Release {} => do_release(deps, env, info),\n\n HandleMsg::CpuLoop {} => do_cpu_loop(),\n\n HandleMsg::StorageLoop {} => do_storage_loop(deps),\n\n HandleMsg::MemoryLoop {} => do_memory_loop(),\n\n HandleMsg::AllocateLargeMemory { pages } => do_allocate_large_memory(pages),\n\n HandleMsg::Panic {} => do_panic(),\n\n HandleMsg::UserErrorsInApiCalls {} => do_user_errors_in_api_calls(deps.api),\n\n }\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 98, "score": 156682.74940229772 }, { "content": "pub fn init(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n msg: InitMsg,\n\n) -> Result<InitResponse, HackError> {\n\n deps.api.debug(\"here we go 🚀\");\n\n\n\n deps.storage.set(\n\n CONFIG_KEY,\n\n &to_vec(&State {\n\n verifier: deps.api.canonical_address(&msg.verifier)?,\n\n beneficiary: deps.api.canonical_address(&msg.beneficiary)?,\n\n funder: deps.api.canonical_address(&info.sender)?,\n\n })?,\n\n );\n\n\n\n // This adds some unrelated event attribute for testing purposes\n\n let mut ctx = Context::new();\n\n ctx.add_attribute(\"Let the\", \"hacking begin\");\n\n Ok(ctx.try_into()?)\n\n}\n\n\n", "file_path": "contracts/hackatom/src/contract.rs", "rank": 99, "score": 156682.74940229772 } ]
Rust
strum_macros/src/macros/enum_iter.rs
orenbenkiki/strum
d5f660a3737ca0db3a5d8384a772e2b92e4ec0ed
use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{Data, DeriveInput, Fields, Ident}; use crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties}; pub fn enum_iter_inner(ast: &DeriveInput) -> syn::Result<TokenStream> { let name = &ast.ident; let gen = &ast.generics; let (impl_generics, ty_generics, where_clause) = gen.split_for_impl(); let vis = &ast.vis; let type_properties = ast.get_type_properties()?; let strum_module_path = type_properties.crate_module_path(); if gen.lifetimes().count() > 0 { return Err(syn::Error::new( Span::call_site(), "This macro doesn't support enums with lifetimes. \ The resulting enums would be unbounded.", )); } let phantom_data = if gen.type_params().count() > 0 { let g = gen.type_params().map(|param| &param.ident); quote! { < ( #(#g),* ) > } } else { quote! { < () > } }; let variants = match &ast.data { Data::Enum(v) => &v.variants, _ => return Err(non_enum_error()), }; let mut arms = Vec::new(); let mut idx = 0usize; for variant in variants { if variant.get_variant_properties()?.disabled.is_some() { continue; } let ident = &variant.ident; let params = match &variant.fields { Fields::Unit => quote! {}, Fields::Unnamed(fields) => { let defaults = ::core::iter::repeat(quote!(::core::default::Default::default())) .take(fields.unnamed.len()); quote! { (#(#defaults),*) } } Fields::Named(fields) => { let fields = fields .named .iter() .map(|field| field.ident.as_ref().unwrap()); quote! { {#(#fields: ::core::default::Default::default()),*} } } }; arms.push(quote! {#idx => ::core::option::Option::Some(#name::#ident #params)}); idx += 1; } let variant_count = arms.len(); arms.push(quote! { _ => ::core::option::Option::None }); let iter_name = syn::parse_str::<Ident>(&format!("{}Iter", name)).unwrap(); Ok(quote! { #[doc = "An iterator over the variants of [Self]"] #vis struct #iter_name #ty_generics { idx: usize, back_idx: usize, marker: ::core::marker::PhantomData #phantom_data, } impl #impl_generics #iter_name #ty_generics #where_clause { fn get(&self, idx: usize) -> Option<#name #ty_generics> { match idx { #(#arms),* } } } impl #impl_generics #strum_module_path::IntoEnumIterator for #name #ty_generics #where_clause { type Iterator = #iter_name #ty_generics; fn iter() -> #iter_name #ty_generics { #iter_name { idx: 0, back_idx: 0, marker: ::core::marker::PhantomData, } } } impl #impl_generics Iterator for #iter_name #ty_generics #where_clause { type Item = #name #ty_generics; fn next(&mut self) -> Option<<Self as Iterator>::Item> { self.nth(0) } fn size_hint(&self) -> (usize, Option<usize>) { let t = if self.idx + self.back_idx >= #variant_count { 0 } else { #variant_count - self.idx - self.back_idx }; (t, Some(t)) } fn nth(&mut self, n: usize) -> Option<<Self as Iterator>::Item> { let idx = self.idx + n + 1; if idx + self.back_idx > #variant_count { self.idx = #variant_count; None } else { self.idx = idx; self.get(idx - 1) } } } impl #impl_generics ExactSizeIterator for #iter_name #ty_generics #where_clause { fn len(&self) -> usize { self.size_hint().0 } } impl #impl_generics DoubleEndedIterator for #iter_name #ty_generics #where_clause { fn next_back(&mut self) -> Option<<Self as Iterator>::Item> { let back_idx = self.back_idx + 1; if self.idx + back_idx > #variant_count { self.back_idx = #variant_count; None } else { self.back_idx = back_idx; self.get(#variant_count - self.back_idx) } } } impl #impl_generics Clone for #iter_name #ty_generics #where_clause { fn clone(&self) -> #iter_name #ty_generics { #iter_name { idx: self.idx, back_idx: self.back_idx, marker: self.marker.clone(), } } } }) }
use proc_macro2::{Span, TokenStream}; use quote::quote; use syn::{Data, DeriveInput, Fields, Ident}; use crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};
pub fn enum_iter_inner(ast: &DeriveInput) -> syn::Result<TokenStream> { let name = &ast.ident; let gen = &ast.generics; let (impl_generics, ty_generics, where_clause) = gen.split_for_impl(); let vis = &ast.vis; let type_properties = ast.get_type_properties()?; let strum_module_path = type_properties.crate_module_path(); if gen.lifetimes().count() > 0 { return Err(syn::Error::new( Span::call_site(), "This macro doesn't support enums with lifetimes. \ The resulting enums would be unbounded.", )); } let phantom_data = if gen.type_params().count() > 0 { let g = gen.type_params().map(|param| &param.ident); quote! { < ( #(#g),* ) > } } else { quote! { < () > } }; let variants = match &ast.data { Data::Enum(v) => &v.variants, _ => return Err(non_enum_error()), }; let mut arms = Vec::new(); let mut idx = 0usize; for variant in variants { if variant.get_variant_properties()?.disabled.is_some() { continue; } let ident = &variant.ident; let params = match &variant.fields { Fields::Unit => quote! {}, Fields::Unnamed(fields) => { let defaults = ::core::iter::repeat(quote!(::core::default::Default::default())) .take(fields.unnamed.len()); quote! { (#(#defaults),*) } } Fields::Named(fields) => { let fields = fields .named .iter() .map(|field| field.ident.as_ref().unwrap()); quote! { {#(#fields: ::core::default::Default::default()),*} } } }; arms.push(quote! {#idx => ::core::option::Option::Some(#name::#ident #params)}); idx += 1; } let variant_count = arms.len(); arms.push(quote! { _ => ::core::option::Option::None }); let iter_name = syn::parse_str::<Ident>(&format!("{}Iter", name)).unwrap(); Ok(quote! { #[doc = "An iterator over the variants of [Self]"] #vis struct #iter_name #ty_generics { idx: usize, back_idx: usize, marker: ::core::marker::PhantomData #phantom_data, } impl #impl_generics #iter_name #ty_generics #where_clause { fn get(&self, idx: usize) -> Option<#name #ty_generics> { match idx { #(#arms),* } } } impl #impl_generics #strum_module_path::IntoEnumIterator for #name #ty_generics #where_clause { type Iterator = #iter_name #ty_generics; fn iter() -> #iter_name #ty_generics { #iter_name { idx: 0, back_idx: 0, marker: ::core::marker::PhantomData, } } } impl #impl_generics Iterator for #iter_name #ty_generics #where_clause { type Item = #name #ty_generics; fn next(&mut self) -> Option<<Self as Iterator>::Item> { self.nth(0) } fn size_hint(&self) -> (usize, Option<usize>) { let t = if self.idx + self.back_idx >= #variant_count { 0 } else { #variant_count - self.idx - self.back_idx }; (t, Some(t)) } fn nth(&mut self, n: usize) -> Option<<Self as Iterator>::Item> { let idx = self.idx + n + 1; if idx + self.back_idx > #variant_count { self.idx = #variant_count; None } else { self.idx = idx; self.get(idx - 1) } } } impl #impl_generics ExactSizeIterator for #iter_name #ty_generics #where_clause { fn len(&self) -> usize { self.size_hint().0 } } impl #impl_generics DoubleEndedIterator for #iter_name #ty_generics #where_clause { fn next_back(&mut self) -> Option<<Self as Iterator>::Item> { let back_idx = self.back_idx + 1; if self.idx + back_idx > #variant_count { self.back_idx = #variant_count; None } else { self.back_idx = back_idx; self.get(#variant_count - self.back_idx) } } } impl #impl_generics Clone for #iter_name #ty_generics #where_clause { fn clone(&self) -> #iter_name #ty_generics { #iter_name { idx: self.idx, back_idx: self.back_idx, marker: self.marker.clone(), } } } }) }
function_block-full_function
[ { "content": "struct Prop(Ident, LitStr);\n\n\n\nimpl Parse for Prop {\n\n fn parse(input: ParseStream) -> syn::Result<Self> {\n\n use syn::ext::IdentExt;\n\n\n\n let k = Ident::parse_any(input)?;\n\n let _: Token![=] = input.parse()?;\n\n let v = input.parse()?;\n\n\n\n Ok(Prop(k, v))\n\n }\n\n}\n\n\n\nimpl Spanned for VariantMeta {\n\n fn span(&self) -> Span {\n\n match self {\n\n VariantMeta::Message { kw, .. } => kw.span,\n\n VariantMeta::DetailedMessage { kw, .. } => kw.span,\n\n VariantMeta::Documentation { value } => value.span(),\n\n VariantMeta::Serialize { kw, .. } => kw.span,\n\n VariantMeta::ToString { kw, .. } => kw.span,\n\n VariantMeta::Disabled(kw) => kw.span,\n\n VariantMeta::Default(kw) => kw.span,\n\n VariantMeta::AsciiCaseInsensitive { kw, .. } => kw.span,\n\n VariantMeta::Props { kw, .. } => kw.span,\n\n }\n\n }\n\n}\n\n\n", "file_path": "strum_macros/src/helpers/metadata.rs", "rank": 0, "score": 58699.21703490926 }, { "content": "fn debug_print_generated(ast: &DeriveInput, toks: &TokenStream) {\n\n let debug = env::var(\"STRUM_DEBUG\");\n\n if let Ok(s) = debug {\n\n if s == \"1\" {\n\n println!(\"{}\", toks);\n\n }\n\n\n\n if ast.ident == s {\n\n println!(\"{}\", toks);\n\n }\n\n }\n\n}\n\n\n\n/// Converts strings to enum variants based on their name.\n\n///\n\n/// auto-derives `std::str::FromStr` on the enum (for Rust 1.34 and above, `std::convert::TryFrom<&str>`\n\n/// will be derived as well). Each variant of the enum will match on it's own name.\n\n/// This can be overridden using `serialize=\"DifferentName\"` or `to_string=\"DifferentName\"`\n\n/// on the attribute as shown below.\n\n/// Multiple deserializations can be added to the same variant. If the variant contains additional data,\n", "file_path": "strum_macros/src/lib.rs", "rank": 1, "score": 46054.072262638 }, { "content": "pub fn from_repr_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let gen = &ast.generics;\n\n let (impl_generics, ty_generics, where_clause) = gen.split_for_impl();\n\n let vis = &ast.vis;\n\n let attrs = &ast.attrs;\n\n\n\n let mut discriminant_type: Type = syn::parse(\"usize\".parse().unwrap()).unwrap();\n\n for attr in attrs {\n\n let path = &attr.path;\n\n let tokens = &attr.tokens;\n\n if path.leading_colon.is_some() {\n\n continue;\n\n }\n\n if path.segments.len() != 1 {\n\n continue;\n\n }\n\n let segment = path.segments.first().unwrap();\n\n if segment.ident != \"repr\" {\n\n continue;\n", "file_path": "strum_macros/src/macros/from_repr.rs", "rank": 2, "score": 40511.391489987865 }, { "content": "pub fn display_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n let type_properties = ast.get_type_properties()?;\n\n\n\n let mut arms = Vec::new();\n\n for variant in variants {\n\n let ident = &variant.ident;\n\n let variant_properties = variant.get_variant_properties()?;\n\n\n\n if variant_properties.disabled.is_some() {\n\n continue;\n\n }\n\n\n\n // Look at all the serialize attributes.\n", "file_path": "strum_macros/src/macros/strings/display.rs", "rank": 3, "score": 40511.391489987865 }, { "content": "fn get_arms(ast: &DeriveInput) -> syn::Result<Vec<TokenStream>> {\n\n let name = &ast.ident;\n\n let mut arms = Vec::new();\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n let type_properties = ast.get_type_properties()?;\n\n\n\n for variant in variants {\n\n let ident = &variant.ident;\n\n let variant_properties = variant.get_variant_properties()?;\n\n\n\n if variant_properties.disabled.is_some() {\n\n continue;\n\n }\n\n\n\n // Look at all the serialize attributes.\n\n // Use `to_string` attribute (not `as_ref_str` or something) to keep things consistent\n", "file_path": "strum_macros/src/macros/strings/as_ref_str.rs", "rank": 5, "score": 40511.391489987865 }, { "content": "pub fn enum_discriminants_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let vis = &ast.vis;\n\n\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n // Derives for the generated enum\n\n let type_properties = ast.get_type_properties()?;\n\n\n\n let derives = type_properties.discriminant_derives;\n\n\n\n let derives = quote! {\n\n #[derive(Clone, Copy, Debug, PartialEq, Eq, #(#derives),*)]\n\n };\n\n\n\n // Work out the name\n\n let default_name = syn::Ident::new(&format!(\"{}Discriminants\", name), Span::call_site());\n", "file_path": "strum_macros/src/macros/enum_discriminants.rs", "rank": 6, "score": 40511.391489987865 }, { "content": "pub fn enum_properties_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n let type_properties = ast.get_type_properties()?;\n\n let strum_module_path = type_properties.crate_module_path();\n\n\n\n let mut arms = Vec::new();\n\n for variant in variants {\n\n let ident = &variant.ident;\n\n let variant_properties = variant.get_variant_properties()?;\n\n let mut string_arms = Vec::new();\n\n let mut bool_arms = Vec::new();\n\n let mut num_arms = Vec::new();\n\n // But you can disable the messages.\n\n if variant_properties.disabled.is_some() {\n\n continue;\n", "file_path": "strum_macros/src/macros/enum_properties.rs", "rank": 7, "score": 40511.391489987865 }, { "content": "pub fn from_string_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n let type_properties = ast.get_type_properties()?;\n\n let strum_module_path = type_properties.crate_module_path();\n\n\n\n let mut default_kw = None;\n\n let mut default = quote! { _ => ::core::result::Result::Err(#strum_module_path::ParseError::VariantNotFound) };\n\n let mut arms = Vec::new();\n\n for variant in variants {\n\n let ident = &variant.ident;\n\n let variant_properties = variant.get_variant_properties()?;\n\n\n\n if variant_properties.disabled.is_some() {\n\n continue;\n", "file_path": "strum_macros/src/macros/strings/from_string.rs", "rank": 8, "score": 40511.391489987865 }, { "content": "pub fn to_string_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n let type_properties = ast.get_type_properties()?;\n\n let mut arms = Vec::new();\n\n for variant in variants {\n\n let ident = &variant.ident;\n\n let variant_properties = variant.get_variant_properties()?;\n\n\n\n if variant_properties.disabled.is_some() {\n\n continue;\n\n }\n\n\n\n // Look at all the serialize attributes.\n\n let output = variant_properties.get_preferred_name(type_properties.case_style);\n", "file_path": "strum_macros/src/macros/strings/to_string.rs", "rank": 9, "score": 40511.391489987865 }, { "content": "pub fn enum_variant_names_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let gen = &ast.generics;\n\n let (impl_generics, ty_generics, where_clause) = gen.split_for_impl();\n\n\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n // Derives for the generated enum\n\n let type_properties = ast.get_type_properties()?;\n\n let strum_module_path = type_properties.crate_module_path();\n\n\n\n let names = variants\n\n .iter()\n\n .map(|v| {\n\n let props = v.get_variant_properties()?;\n\n Ok(props.get_preferred_name(type_properties.case_style))\n\n })\n\n .collect::<syn::Result<Vec<_>>>()?;\n\n\n\n Ok(quote! {\n\n impl #impl_generics #strum_module_path::VariantNames for #name #ty_generics #where_clause {\n\n const VARIANTS: &'static [&'static str] = &[ #(#names),* ];\n\n }\n\n })\n\n}\n", "file_path": "strum_macros/src/macros/enum_variant_names.rs", "rank": 10, "score": 40511.391489987865 }, { "content": "pub fn enum_message_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let variants = match &ast.data {\n\n Data::Enum(v) => &v.variants,\n\n _ => return Err(non_enum_error()),\n\n };\n\n\n\n let type_properties = ast.get_type_properties()?;\n\n let strum_module_path = type_properties.crate_module_path();\n\n\n\n let mut arms = Vec::new();\n\n let mut detailed_arms = Vec::new();\n\n let mut documentation_arms = Vec::new();\n\n let mut serializations = Vec::new();\n\n\n\n for variant in variants {\n\n let variant_properties = variant.get_variant_properties()?;\n\n let messages = variant_properties.message.as_ref();\n\n let detailed_messages = variant_properties.detailed_message.as_ref();\n", "file_path": "strum_macros/src/macros/enum_messages.rs", "rank": 11, "score": 40511.391489987865 }, { "content": "pub fn as_ref_str_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let name = &ast.ident;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let arms = get_arms(ast)?;\n\n Ok(quote! {\n\n impl #impl_generics ::core::convert::AsRef<str> for #name #ty_generics #where_clause {\n\n fn as_ref(&self) -> &str {\n\n match *self {\n\n #(#arms),*\n\n }\n\n }\n\n }\n\n })\n\n}\n\n\n\npub enum GenerateTraitVariant {\n\n AsStaticStr,\n\n From,\n\n}\n\n\n", "file_path": "strum_macros/src/macros/strings/as_ref_str.rs", "rank": 12, "score": 40511.391489987865 }, { "content": "#[allow(dead_code)]\n\n#[derive(Debug, EnumDiscriminants)]\n\n#[strum_discriminants(derive(EnumIter))]\n\nenum WithFields {\n\n Variant0(NonDefault),\n\n Variant1 { a: NonDefault },\n\n}\n\n\n", "file_path": "strum_tests/tests/enum_discriminants.rs", "rank": 13, "score": 34144.904862710544 }, { "content": "#[test]\n\nfn fields_test() {\n\n let discriminants = WithFieldsDiscriminants::iter().collect::<Vec<_>>();\n\n let expected = vec![\n\n WithFieldsDiscriminants::Variant0,\n\n WithFieldsDiscriminants::Variant1,\n\n ];\n\n\n\n assert_eq!(expected, discriminants);\n\n}\n\n\n", "file_path": "strum_tests/tests/enum_discriminants.rs", "rank": 14, "score": 33195.177709361546 }, { "content": "#[proc_macro_derive(EnumString, attributes(strum))]\n\npub fn from_string(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks =\n\n macros::from_string::from_string_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Converts enum variants to `&'static str`.\n\n///\n\n/// Implements `AsRef<str>` on your enum using the same rules as\n\n/// `Display` for determining what string is returned. The difference is that `as_ref()` returns\n\n/// a `&str` instead of a `String` so you don't allocate any additional memory with each call.\n\n///\n\n/// ```\n\n/// // You need to bring the AsRef trait into scope to use it\n\n/// use std::convert::AsRef;\n\n/// use strum_macros::AsRefStr;\n\n///\n", "file_path": "strum_macros/src/lib.rs", "rank": 15, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(ToString, attributes(strum))]\n\npub fn to_string(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks =\n\n macros::to_string::to_string_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Converts enum variants to strings.\n\n///\n\n/// Deriving `Display` on an enum prints out the given enum. This enables you to perform round\n\n/// trip style conversions from enum into string and back again for unit style variants. `Display`\n\n/// choose which serialization to used based on the following criteria:\n\n///\n\n/// 1. If there is a `to_string` property, this value will be used. There can only be one per variant.\n\n/// 1. Of the various `serialize` properties, the value with the longest length is chosen. If that\n\n/// behavior isn't desired, you should use `to_string`.\n\n/// 1. The name of the variant will be used if there are no `serialize` or `to_string` attributes.\n\n///\n", "file_path": "strum_macros/src/lib.rs", "rank": 16, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(EnumDiscriminants, attributes(strum, strum_discriminants))]\n\npub fn enum_discriminants(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::enum_discriminants::enum_discriminants_inner(&ast)\n\n .unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Add a constant `usize` equal to the number of variants.\n\n///\n\n/// For a given enum generates implementation of `strum::EnumCount`,\n\n/// which adds a static property `COUNT` of type usize that holds the number of variants.\n\n///\n\n/// ```\n\n/// use strum::{EnumCount, IntoEnumIterator};\n\n/// use strum_macros::{EnumCount as EnumCountMacro, EnumIter};\n\n///\n\n/// #[derive(Debug, EnumCountMacro, EnumIter)]\n\n/// enum Week {\n", "file_path": "strum_macros/src/lib.rs", "rank": 17, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(EnumProperty, attributes(strum))]\n\npub fn enum_properties(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::enum_properties::enum_properties_inner(&ast)\n\n .unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Generate a new type with only the discriminant names.\n\n///\n\n/// Given an enum named `MyEnum`, generates another enum called `MyEnumDiscriminants` with the same\n\n/// variants but without any data fields. This is useful when you wish to determine the variant of\n\n/// an `enum` but one or more of the variants contains a non-`Default` field. `From`\n\n/// implementations are generated so that you can easily convert from `MyEnum` to\n\n/// `MyEnumDiscriminants`.\n\n///\n\n/// By default, the generated enum has the following derives: `Clone, Copy, Debug, PartialEq, Eq`.\n\n/// You can add additional derives using the `#[strum_discriminants(derive(AdditionalDerive))]`\n\n/// attribute.\n", "file_path": "strum_macros/src/lib.rs", "rank": 18, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(EnumMessage, attributes(strum))]\n\npub fn enum_messages(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::enum_messages::enum_message_inner(&ast)\n\n .unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Add custom properties to enum variants.\n\n///\n\n/// Enables the encoding of arbitary constants into enum variants. This method\n\n/// currently only supports adding additional string values. Other types of literals are still\n\n/// experimental in the rustc compiler. The generated code works by nesting match statements.\n\n/// The first match statement matches on the type of the enum, and the inner match statement\n\n/// matches on the name of the property requested. This design works well for enums with a small\n\n/// number of variants and properties, but scales linearly with the number of variants so may not\n\n/// be the best choice in all situations.\n\n///\n\n/// ```\n", "file_path": "strum_macros/src/lib.rs", "rank": 19, "score": 28727.6670043459 }, { "content": "pub fn as_static_str(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::as_ref_str::as_static_str_inner(\n\n &ast,\n\n &macros::as_ref_str::GenerateTraitVariant::AsStaticStr,\n\n )\n\n .unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Implements `From<MyEnum> for &'static str` on an enum.\n\n///\n\n/// Implements `From<YourEnum>` and `From<&'a YourEnum>` for `&'static str`. This is\n\n/// useful for turning an enum variant into a static string.\n\n/// The Rust `std` provides a blanket impl of the reverse direction - i.e. `impl Into<&'static str> for YourEnum`.\n\n///\n\n/// ```\n\n/// use strum_macros::IntoStaticStr;\n", "file_path": "strum_macros/src/lib.rs", "rank": 20, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(FromRepr, attributes(strum))]\n\npub fn from_repr(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks =\n\n macros::from_repr::from_repr_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Add a verbose message to an enum variant.\n\n///\n\n/// Encode strings into the enum itself. The `strum_macros::EmumMessage` macro implements the `strum::EnumMessage` trait.\n\n/// `EnumMessage` looks for `#[strum(message=\"...\")]` attributes on your variants.\n\n/// You can also provided a `detailed_message=\"...\"` attribute to create a seperate more detailed message than the first.\n\n/// ```\n\n/// // You need to bring the trait into scope to use it\n\n/// use strum::EnumMessage;\n\n/// use strum_macros;\n\n///\n\n/// #[derive(strum_macros::EnumMessage, Debug)]\n", "file_path": "strum_macros/src/lib.rs", "rank": 21, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(AsRefStr, attributes(strum))]\n\npub fn as_ref_str(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks =\n\n macros::as_ref_str::as_ref_str_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Implements `Strum::VariantNames` which adds an associated constant `VARIANTS` which is an array of discriminant names.\n\n///\n\n/// Adds an `impl` block for the `enum` that adds a static `VARIANTS` array of `&'static str` that are the discriminant names.\n\n/// This will respect the `serialize_all` attribute on the `enum` (like `#[strum(serialize_all = \"snake_case\")]`.\n\n///\n\n/// ```\n\n/// // import the macros needed\n\n/// use strum_macros::{EnumString, EnumVariantNames};\n\n/// // You need to import the trait, to have access to VARIANTS\n\n/// use strum::VariantNames;\n\n///\n\n/// #[derive(Debug, EnumString, EnumVariantNames)]\n\n/// #[strum(serialize_all = \"kebab_case\")]\n\n/// enum Color {\n\n/// Red,\n\n/// Blue,\n\n/// Yellow,\n\n/// RebeccaPurple,\n\n/// }\n\n/// assert_eq!([\"red\", \"blue\", \"yellow\", \"rebecca-purple\"], Color::VARIANTS);\n\n/// ```\n", "file_path": "strum_macros/src/lib.rs", "rank": 22, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(EnumIter, attributes(strum))]\n\npub fn enum_iter(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks =\n\n macros::enum_iter::enum_iter_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Add a function to enum that allows accessing variants by its discriminant\n\n///\n\n/// This macro adds a standalone function to obtain an enum variant by its discriminant. The macro adds\n\n/// `from_repr(discriminant: usize) -> Option<YourEnum>` as a standalone function on the enum. For\n\n/// variants with additional data, the returned variant will use the `Default` trait to fill the\n\n/// data. The discriminant follows the same rules as `rustc`. The first discriminant is zero and each\n\n/// successive variant has a discriminant of one greater than the previous variant, expect where an\n\n/// explicit discriminant is specified. The type of the discriminant will match the `repr` type if\n\n/// it is specifed.\n\n///\n\n/// When the macro is applied using rustc >= 1.46 and when there is no additional data on any of\n", "file_path": "strum_macros/src/lib.rs", "rank": 23, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(EnumVariantNames, attributes(strum))]\n\npub fn variant_names(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::enum_variant_names::enum_variant_names_inner(&ast)\n\n .unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n#[proc_macro_derive(AsStaticStr, attributes(strum))]\n\n#[deprecated(\n\n since = \"0.22.0\",\n\n note = \"please use `#[derive(IntoStaticStr)]` instead\"\n\n)]\n", "file_path": "strum_macros/src/lib.rs", "rank": 24, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(Display, attributes(strum))]\n\npub fn display(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::display::display_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// Creates a new type that iterates of the variants of an enum.\n\n///\n\n/// Iterate over the variants of an Enum. Any additional data on your variants will be set to `Default::default()`.\n\n/// The macro implements `strum::IntoEnumIter` on your enum and creates a new type called `YourEnumIter` that is the iterator object.\n\n/// You cannot derive `EnumIter` on any type with a lifetime bound (`<'a>`) because the iterator would surely\n\n/// create [unbounded lifetimes](https://doc.rust-lang.org/nightly/nomicon/unbounded-lifetimes.html).\n\n///\n\n/// ```\n\n///\n\n/// // You need to bring the trait into scope to use it!\n\n/// use strum::IntoEnumIterator;\n\n/// use strum_macros::EnumIter;\n", "file_path": "strum_macros/src/lib.rs", "rank": 25, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(EnumCount, attributes(strum))]\n\npub fn enum_count(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n let toks =\n\n macros::enum_count::enum_count_inner(&ast).unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n", "file_path": "strum_macros/src/lib.rs", "rank": 26, "score": 28727.6670043459 }, { "content": "#[proc_macro_derive(IntoStaticStr, attributes(strum))]\n\npub fn into_static_str(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n let toks = macros::as_ref_str::as_static_str_inner(\n\n &ast,\n\n &macros::as_ref_str::GenerateTraitVariant::From,\n\n )\n\n .unwrap_or_else(|err| err.to_compile_error());\n\n debug_print_generated(&ast, &toks);\n\n toks.into()\n\n}\n\n\n\n/// implements `std::string::ToString` on en enum\n\n///\n\n/// ```\n\n/// // You need to bring the ToString trait into scope to use it\n\n/// use std::string::ToString;\n\n/// use strum_macros;\n\n///\n\n/// #[derive(strum_macros::ToString, Debug)]\n", "file_path": "strum_macros/src/lib.rs", "rank": 27, "score": 28727.6670043459 }, { "content": "use heck::ToShoutySnakeCase;\n\nuse proc_macro2::{Span, TokenStream};\n\nuse quote::{format_ident, quote};\n\nuse syn::{Data, DeriveInput, Fields, PathArguments, Type, TypeParen};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties};\n\n\n", "file_path": "strum_macros/src/macros/from_repr.rs", "rank": 29, "score": 9.472179410790037 }, { "content": " // See <https://github.com/dtolnay/syn/issues/433>\n\n // ---\n\n // let is_copy = unique_meta_list(type_meta.iter(), \"derive\")\n\n // .map(extract_list_metas)\n\n // .map(|metas| {\n\n // metas\n\n // .filter_map(get_meta_ident)\n\n // .any(|derive| derive.to_string() == \"Copy\")\n\n // }).unwrap_or(false);\n\n\n\n let arms = variants\n\n .iter()\n\n .map(|variant| {\n\n let ident = &variant.ident;\n\n let params = match &variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(_fields) => {\n\n quote! { (..) }\n\n }\n\n Fields::Named(_fields) => {\n", "file_path": "strum_macros/src/macros/enum_discriminants.rs", "rank": 30, "score": 7.820751861969361 }, { "content": "\n\n let params = match variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(..) => quote! { (..) },\n\n Fields::Named(..) => quote! { {..} },\n\n };\n\n\n\n arms.push(quote! { #name::#ident #params => ::std::string::String::from(#output) });\n\n }\n\n\n\n if arms.len() < variants.len() {\n\n arms.push(quote! { _ => panic!(\"to_string() called on disabled variant.\") });\n\n }\n\n\n\n Ok(quote! {\n\n #[allow(clippy::use_self)]\n\n impl #impl_generics ::std::string::ToString for #name #ty_generics #where_clause {\n\n fn to_string(&self) -> ::std::string::String {\n\n match *self {\n\n #(#arms),*\n\n }\n\n }\n\n }\n\n })\n\n}\n", "file_path": "strum_macros/src/macros/strings/to_string.rs", "rank": 31, "score": 7.631615600285128 }, { "content": " let documentation = &variant_properties.documentation;\n\n let ident = &variant.ident;\n\n\n\n let params = match variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(..) => quote! { (..) },\n\n Fields::Named(..) => quote! { {..} },\n\n };\n\n\n\n // You can't disable getting the serializations.\n\n {\n\n let serialization_variants =\n\n variant_properties.get_serializations(type_properties.case_style);\n\n\n\n let count = serialization_variants.len();\n\n serializations.push(quote! {\n\n &#name::#ident #params => {\n\n static ARR: [&'static str; #count] = [#(#serialization_variants),*];\n\n &ARR\n\n }\n", "file_path": "strum_macros/src/macros/enum_messages.rs", "rank": 32, "score": 7.345284776188693 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput, Fields};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};\n\n\n", "file_path": "strum_macros/src/macros/enum_properties.rs", "rank": 33, "score": 7.31824640206011 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput, Fields};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};\n\n\n", "file_path": "strum_macros/src/macros/strings/to_string.rs", "rank": 34, "score": 7.31824640206011 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput, Fields};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};\n\n\n", "file_path": "strum_macros/src/macros/strings/display.rs", "rank": 35, "score": 7.31824640206011 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{parse_quote, Data, DeriveInput, Fields};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};\n\n\n", "file_path": "strum_macros/src/macros/strings/as_ref_str.rs", "rank": 36, "score": 7.201904838279564 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput, Fields, LitStr};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};\n\n\n", "file_path": "strum_macros/src/macros/enum_messages.rs", "rank": 37, "score": 7.201904838279564 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput, Fields};\n\n\n\nuse crate::helpers::{\n\n non_enum_error, occurrence_error, HasStrumVariantProperties, HasTypeProperties,\n\n};\n\n\n", "file_path": "strum_macros/src/macros/strings/from_string.rs", "rank": 38, "score": 7.201904838279564 }, { "content": " let mut prev_const_var_ident = None;\n\n for variant in variants {\n\n if variant.get_variant_properties()?.disabled.is_some() {\n\n continue;\n\n }\n\n\n\n let ident = &variant.ident;\n\n let params = match &variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(fields) => {\n\n has_additional_data = true;\n\n let defaults = ::core::iter::repeat(quote!(::core::default::Default::default()))\n\n .take(fields.unnamed.len());\n\n quote! { (#(#defaults),*) }\n\n }\n\n Fields::Named(fields) => {\n\n has_additional_data = true;\n\n let fields = fields\n\n .named\n\n .iter()\n", "file_path": "strum_macros/src/macros/from_repr.rs", "rank": 39, "score": 7.190492968414006 }, { "content": "use std::default::Default;\n\nuse syn::{Ident, LitStr, Variant};\n\n\n\nuse super::case_style::{CaseStyle, CaseStyleHelpers};\n\nuse super::metadata::{kw, VariantExt, VariantMeta};\n\nuse super::occurrence_error;\n\n\n", "file_path": "strum_macros/src/helpers/variant_props.rs", "rank": 40, "score": 7.150080038009833 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse std::default::Default;\n\nuse syn::{parse_quote, DeriveInput, Ident, Path, Visibility};\n\n\n\nuse super::case_style::CaseStyle;\n\nuse super::metadata::{DeriveInputExt, EnumDiscriminantsMeta, EnumMeta};\n\nuse super::occurrence_error;\n\n\n", "file_path": "strum_macros/src/helpers/type_props.rs", "rank": 41, "score": 6.9208689600656195 }, { "content": " Fields::Unit => quote! {},\n\n Fields::Unnamed(fields) => {\n\n let defaults =\n\n ::core::iter::repeat(quote!(Default::default())).take(fields.unnamed.len());\n\n quote! { (#(#defaults),*) }\n\n }\n\n Fields::Named(fields) => {\n\n let fields = fields\n\n .named\n\n .iter()\n\n .map(|field| field.ident.as_ref().unwrap());\n\n quote! { {#(#fields: Default::default()),*} }\n\n }\n\n };\n\n\n\n arms.push(quote! { #(#attrs => ::core::result::Result::Ok(#name::#ident #params)),* });\n\n }\n\n\n\n arms.push(default);\n\n\n", "file_path": "strum_macros/src/macros/strings/from_string.rs", "rank": 42, "score": 6.854575423037257 }, { "content": "use proc_macro2::{Span, TokenStream, TokenTree};\n\nuse quote::{quote, ToTokens};\n\nuse syn::parse_quote;\n\nuse syn::{Data, DeriveInput, Fields};\n\n\n\nuse crate::helpers::{non_enum_error, strum_discriminants_passthrough_error, HasTypeProperties};\n\n\n\n/// Attributes to copy from the main enum's variants to the discriminant enum's variants.\n\n///\n\n/// Attributes not in this list may be for other `proc_macro`s on the main enum, and may cause\n\n/// compilation problems when copied across.\n\nconst ATTRIBUTES_TO_COPY: &[&str] = &[\"doc\", \"cfg\", \"allow\", \"deny\", \"strum_discriminants\"];\n\n\n", "file_path": "strum_macros/src/macros/enum_discriminants.rs", "rank": 44, "score": 6.005617829907104 }, { "content": "use proc_macro2::{Span, TokenStream};\n\nuse syn::{\n\n parenthesized,\n\n parse::{Parse, ParseStream},\n\n parse2, parse_str,\n\n punctuated::Punctuated,\n\n spanned::Spanned,\n\n Attribute, DeriveInput, Ident, Lit, LitBool, LitStr, Meta, MetaNameValue, Path, Token, Variant, Visibility,\n\n};\n\n\n\nuse super::case_style::CaseStyle;\n\n\n\npub mod kw {\n\n use syn::custom_keyword;\n\n pub use syn::token::Crate;\n\n\n\n // enum metadata\n\n custom_keyword!(serialize_all);\n\n\n\n // enum discriminant metadata\n", "file_path": "strum_macros/src/helpers/metadata.rs", "rank": 45, "score": 5.959888266646222 }, { "content": " // (i.e. always `enum.as_ref().to_string() == enum.to_string()`).\n\n let output = variant_properties.get_preferred_name(type_properties.case_style);\n\n let params = match variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(..) => quote! { (..) },\n\n Fields::Named(..) => quote! { {..} },\n\n };\n\n\n\n arms.push(quote! { #name::#ident #params => #output });\n\n }\n\n\n\n if arms.len() < variants.len() {\n\n arms.push(quote! {\n\n _ => panic!(\n\n \"AsRef::<str>::as_ref() or AsStaticRef::<str>::as_static() \\\n\n called on disabled variant.\",\n\n )\n\n });\n\n }\n\n\n\n Ok(arms)\n\n}\n\n\n", "file_path": "strum_macros/src/macros/strings/as_ref_str.rs", "rank": 46, "score": 5.782436141329776 }, { "content": " }\n\n\n\n let params = match variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(..) => quote! { (..) },\n\n Fields::Named(..) => quote! { {..} },\n\n };\n\n\n\n for (key, value) in variant_properties.string_props {\n\n string_arms.push(quote! { #key => ::core::option::Option::Some( #value )});\n\n }\n\n\n\n string_arms.push(quote! { _ => ::core::option::Option::None });\n\n bool_arms.push(quote! { _ => ::core::option::Option::None });\n\n num_arms.push(quote! { _ => ::core::option::Option::None });\n\n\n\n arms.push(quote! {\n\n &#name::#ident #params => {\n\n match prop {\n\n #(#string_arms),*\n", "file_path": "strum_macros/src/macros/enum_properties.rs", "rank": 48, "score": 5.44866365726444 }, { "content": "use heck::{\n\n ToKebabCase, ToLowerCamelCase, ToShoutySnakeCase, ToSnakeCase, ToTitleCase, ToUpperCamelCase,\n\n};\n\nuse std::str::FromStr;\n\nuse syn::{\n\n parse::{Parse, ParseStream},\n\n Ident, LitStr,\n\n};\n\n\n\n#[allow(clippy::enum_variant_names)]\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub enum CaseStyle {\n\n CamelCase,\n\n KebabCase,\n\n MixedCase,\n\n ShoutySnakeCase,\n\n SnakeCase,\n\n TitleCase,\n\n UpperCase,\n\n LowerCase,\n", "file_path": "strum_macros/src/helpers/case_style.rs", "rank": 49, "score": 5.330885358253186 }, { "content": " let output = variant_properties.get_preferred_name(type_properties.case_style);\n\n\n\n let params = match variant.fields {\n\n Fields::Unit => quote! {},\n\n Fields::Unnamed(..) => quote! { (..) },\n\n Fields::Named(..) => quote! { {..} },\n\n };\n\n\n\n arms.push(quote! { #name::#ident #params => f.pad(#output) });\n\n }\n\n\n\n if arms.len() < variants.len() {\n\n arms.push(quote! { _ => panic!(\"fmt() called on disabled variant.\") });\n\n }\n\n\n\n Ok(quote! {\n\n impl #impl_generics ::core::fmt::Display for #name #ty_generics #where_clause {\n\n fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::result::Result<(), ::core::fmt::Error> {\n\n match *self {\n\n #(#arms),*\n\n }\n\n }\n\n }\n\n })\n\n}\n", "file_path": "strum_macros/src/macros/strings/display.rs", "rank": 50, "score": 5.2804774677868505 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput};\n\n\n\nuse crate::helpers::{non_enum_error, HasTypeProperties};\n\n\n\npub(crate) fn enum_count_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {\n\n let n = match &ast.data {\n\n Data::Enum(v) => v.variants.len(),\n\n _ => return Err(non_enum_error()),\n\n };\n\n let type_properties = ast.get_type_properties()?;\n\n let strum_module_path = type_properties.crate_module_path();\n\n\n\n // Used in the quasi-quotation below as `#name`\n\n let name = &ast.ident;\n\n\n\n // Helper is provided for handling complex generic types correctly and effortlessly\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n\n\n Ok(quote! {\n\n // Implementation\n\n impl #impl_generics #strum_module_path::EnumCount for #name #ty_generics #where_clause {\n\n const COUNT: usize = #n;\n\n }\n\n })\n\n}\n", "file_path": "strum_macros/src/macros/enum_count.rs", "rank": 51, "score": 5.070169011791643 }, { "content": " .map(|field| field.ident.as_ref().unwrap());\n\n quote! { {#(#fields: ::core::default::Default::default()),*} }\n\n }\n\n };\n\n\n\n let const_var_str = format!(\"{}_DISCRIMINANT\", variant.ident).to_shouty_snake_case();\n\n let const_var_ident = format_ident!(\"{}\", const_var_str);\n\n\n\n let const_val_expr = match &variant.discriminant {\n\n Some((_, expr)) => quote! { #expr },\n\n None => match &prev_const_var_ident {\n\n Some(prev) => quote! { #prev + 1 },\n\n None => quote! { 0 },\n\n },\n\n };\n\n\n\n constant_defs.push(quote! {const #const_var_ident: #discriminant_type = #const_val_expr;});\n\n arms.push(quote! {v if v == #const_var_ident => ::core::option::Option::Some(#name::#ident #params)});\n\n\n\n prev_const_var_ident = Some(const_var_ident);\n", "file_path": "strum_macros/src/macros/from_repr.rs", "rank": 52, "score": 4.591015673155843 }, { "content": " }\n\n\n\n if let Some(kw) = variant_properties.default {\n\n if let Some(fst_kw) = default_kw {\n\n return Err(occurrence_error(fst_kw, kw, \"default\"));\n\n }\n\n\n\n match &variant.fields {\n\n Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {}\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n variant,\n\n \"Default only works on newtype structs with a single String field\",\n\n ))\n\n }\n\n }\n\n\n\n default_kw = Some(kw);\n\n default = quote! {\n\n default => ::core::result::Result::Ok(#name::#ident(default.into()))\n", "file_path": "strum_macros/src/macros/strings/from_string.rs", "rank": 53, "score": 4.455552163845091 }, { "content": "use proc_macro2::TokenStream;\n\nuse quote::quote;\n\nuse syn::{Data, DeriveInput};\n\n\n\nuse crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};\n\n\n", "file_path": "strum_macros/src/macros/enum_variant_names.rs", "rank": 54, "score": 4.417969666107224 }, { "content": "pub use self::case_style::CaseStyleHelpers;\n\npub use self::type_props::HasTypeProperties;\n\npub use self::variant_props::HasStrumVariantProperties;\n\n\n\npub mod case_style;\n\nmod metadata;\n\npub mod type_props;\n\npub mod variant_props;\n\n\n\nuse proc_macro2::Span;\n\nuse quote::ToTokens;\n\nuse syn::spanned::Spanned;\n\n\n", "file_path": "strum_macros/src/helpers/mod.rs", "rank": 55, "score": 4.405140189010975 }, { "content": "#![allow(deprecated)]\n\n\n\nuse std::str::FromStr;\n\nuse std::string::ToString;\n\nuse strum::{EnumString, ToString};\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumString, ToString)]\n", "file_path": "strum_tests/tests/to_string.rs", "rank": 56, "score": 4.1922183255973255 }, { "content": "}\n\n\n\n/// This trait designates that an `Enum` can be iterated over. It can\n\n/// be auto generated using `strum_macros` on your behalf.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use std::fmt::Debug;\n\n/// // You need to bring the type into scope to use it!!!\n\n/// use strum::{EnumIter, IntoEnumIterator};\n\n///\n\n/// #[derive(EnumIter, Debug)]\n\n/// enum Color {\n\n/// Red,\n\n/// Green { range: usize },\n\n/// Blue(usize),\n\n/// Yellow,\n\n/// }\n\n///\n", "file_path": "strum/src/lib.rs", "rank": 57, "score": 4.090200374759712 }, { "content": "use structopt::StructOpt;\n\nuse strum::{EnumString, EnumVariantNames, VariantNames};\n\n\n\n#[test]\n", "file_path": "strum_tests/tests/enum_variant_names.rs", "rank": 58, "score": 4.085360039809054 }, { "content": "pub mod enum_count;\n\npub mod enum_discriminants;\n\npub mod enum_iter;\n\npub mod enum_messages;\n\npub mod enum_properties;\n\npub mod enum_variant_names;\n\npub mod from_repr;\n\n\n\nmod strings;\n\n\n\npub use self::strings::as_ref_str;\n\npub use self::strings::display;\n\npub use self::strings::from_string;\n\npub use self::strings::to_string;\n", "file_path": "strum_macros/src/macros/mod.rs", "rank": 59, "score": 4.0713221558221395 }, { "content": "use std::str::FromStr;\n\nuse strum::EnumString;\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumString)]\n", "file_path": "strum_tests/tests/from_str.rs", "rank": 60, "score": 4.047865556123924 }, { "content": " let ident = self.ident.as_ref().expect(\"identifier\");\n\n LitStr::new(&ident.convert_case(case_style), ident.span())\n\n }\n\n\n\n pub fn get_preferred_name(&self, case_style: Option<CaseStyle>) -> LitStr {\n\n self.to_string.as_ref().cloned().unwrap_or_else(|| {\n\n self.serialize\n\n .iter()\n\n .max_by_key(|s| s.value().len())\n\n .cloned()\n\n .unwrap_or_else(|| self.ident_as_str(case_style))\n\n })\n\n }\n\n\n\n pub fn get_serializations(&self, case_style: Option<CaseStyle>) -> Vec<LitStr> {\n\n let mut attrs = self.serialize.clone();\n\n if let Some(to_string) = &self.to_string {\n\n attrs.push(to_string.clone());\n\n }\n\n\n", "file_path": "strum_macros/src/helpers/variant_props.rs", "rank": 61, "score": 4.026295951444789 }, { "content": "### Added\n\n\n\n* Added a derive to implement `From<YourEnum>` for `&'static str`. This deprecates `AsStaticStr` since\n\n the new solution doesn't require a `strum` specific trait to use.\n\n\n\n## 0.12.0\n\n\n\n### Added\n\n\n\n* Serialization case can be controlled using `#[strum(serialize_all = \"snake_case\")]`. ([#21][#21])\n\n* `#[derive(EnumDiscriminants)]` generates enum with variants without fields. ([#33][#33])\n\n\n\n[#21]: https://github.com/Peternator7/strum/issues/21\n\n[#33]: https://github.com/Peternator7/strum/issues/33\n\n\n\n## 0.10.0\n\n\n\n### Added\n\n\n\n* Implemented `Clone` for `EnumIter`s. ([#18][#18])\n\n* Added `AsStaticRef` derive to allow enums to `impl AsStaticRef<str>`. ([#23][#23])\n\n\n\n### Fixed\n\n\n\n* `#[allow(missing_docs)]` on generated `EnumIter`s. ([#19][#19])\n\n\n\n[#18]: https://github.com/Peternator7/strum/pull/18\n\n[#19]: https://github.com/Peternator7/strum/issues/19\n\n[#23]: https://github.com/Peternator7/strum/issues/23\n", "file_path": "CHANGELOG.md", "rank": 62, "score": 4.016854093105081 }, { "content": "//! # Strum\n\n//!\n\n//! Strum is a set of macros and traits for working with\n\n//! enums and strings easier in Rust.\n\n//!\n\n\n\n#![recursion_limit = \"128\"]\n\n\n\nextern crate proc_macro;\n\n\n\nmod helpers;\n\nmod macros;\n\n\n\nuse proc_macro2::TokenStream;\n\nuse std::env;\n\nuse syn::DeriveInput;\n\n\n", "file_path": "strum_macros/src/lib.rs", "rank": 63, "score": 3.9899530339831513 }, { "content": "/// test `serialize_all` cooperation with other macroses\n\nuse std::str::FromStr;\n\nuse std::string::ToString;\n\nuse strum::{Display, EnumString, IntoStaticStr};\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumString, Display, IntoStaticStr)]\n\n#[strum(serialize_all = \"title_case\")]\n", "file_path": "strum_tests/tests/serialize_all.rs", "rank": 64, "score": 3.942400006855636 }, { "content": "//! ```rust\n\n//! use std::string::ToString;\n\n//! use strum;\n\n//! use strum_macros;\n\n//! \n\n//! #[derive(Debug, Eq, PartialEq, strum_macros::ToString)]\n\n//! #[strum(serialize_all = \"snake_case\")]\n\n//! enum Brightness {\n\n//! DarkBlack,\n\n//! Dim {\n\n//! glow: usize,\n\n//! },\n\n//! #[strum(serialize = \"bright\")]\n\n//! BrightWhite,\n\n//! }\n\n//! \n\n//! assert_eq!(\n\n//! String::from(\"dark_black\"),\n\n//! Brightness::DarkBlack.to_string().as_ref()\n\n//! );\n", "file_path": "strum/src/additional_attributes.rs", "rank": 65, "score": 3.720681047653657 }, { "content": "/// ```\n\n/// // You need to bring the ToString trait into scope to use it\n\n/// use std::string::ToString;\n\n/// use strum_macros::Display;\n\n///\n\n/// #[derive(Display, Debug)]\n\n/// enum Color {\n\n/// #[strum(serialize = \"redred\")]\n\n/// Red,\n\n/// Green {\n\n/// range: usize,\n\n/// },\n\n/// Blue(usize),\n\n/// Yellow,\n\n/// }\n\n///\n\n/// // uses the serialize string for Display\n\n/// let red = Color::Red;\n\n/// assert_eq!(String::from(\"redred\"), format!(\"{}\", red));\n\n/// // by default the variants Name\n\n/// let yellow = Color::Yellow;\n\n/// assert_eq!(String::from(\"Yellow\"), yellow.to_string());\n\n/// // or for string formatting\n\n/// println!(\n\n/// \"blue: {} green: {}\",\n\n/// Color::Blue(10),\n\n/// Color::Green { range: 42 }\n\n/// );\n\n/// ```\n", "file_path": "strum_macros/src/lib.rs", "rank": 66, "score": 3.6960259868153855 }, { "content": " if attrs.is_empty() {\n\n attrs.push(self.ident_as_str(case_style));\n\n }\n\n\n\n attrs\n\n }\n\n}\n\n\n\nimpl HasStrumVariantProperties for Variant {\n\n fn get_variant_properties(&self) -> syn::Result<StrumVariantProperties> {\n\n let mut output = StrumVariantProperties {\n\n ident: Some(self.ident.clone()),\n\n ..Default::default()\n\n };\n\n\n\n let mut message_kw = None;\n\n let mut detailed_message_kw = None;\n\n let mut to_string_kw = None;\n\n let mut disabled_kw = None;\n\n let mut default_kw = None;\n", "file_path": "strum_macros/src/helpers/variant_props.rs", "rank": 67, "score": 3.621069388564398 }, { "content": "use strum::EnumProperty;\n\n\n\n#[derive(Debug, EnumProperty)]\n", "file_path": "strum_tests/tests/enum_props.rs", "rank": 68, "score": 3.6088296058701004 }, { "content": "/// This trait designates that an `Enum` can be iterated over. It can\n\n/// be auto generated using `strum_macros` on your behalf.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use std::fmt::Debug;\n\n/// // You need to bring the type into scope to use it!!!\n\n/// use strum::{EnumIter, IntoEnumIterator};\n\n///\n\n/// #[derive(EnumIter, Debug)]\n\n/// enum Color {\n\n/// Red,\n\n/// Green { range: usize },\n\n/// Blue(usize),\n\n/// Yellow,\n\n/// }\n\n///\n\n/// // Iterate over the items in an enum and perform some function on them.\n\n/// fn generic_iterator<E, F>(pred: F)\n\n/// where\n\n/// E: IntoEnumIterator,\n\n/// F: Fn(E),\n\n/// {\n\n/// for e in E::iter() {\n\n/// pred(e)\n\n/// }\n\n/// }\n\n///\n\n/// generic_iterator::<Color, _>(|color| println!(\"{:?}\", color));\n\n/// ```\n\npub trait IntoEnumIterator: Sized {\n\n type Iterator: Iterator<Item = Self>;\n\n\n\n fn iter() -> Self::Iterator;\n\n}\n\n\n", "file_path": "strum/src/lib.rs", "rank": 69, "score": 3.60538616970004 }, { "content": "#![allow(deprecated)]\n\n\n\nuse std::str::FromStr;\n\nuse strum::{AsRefStr, AsStaticRef, AsStaticStr, EnumString, IntoStaticStr};\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumString, AsRefStr, AsStaticStr, IntoStaticStr)]\n", "file_path": "strum_tests/tests/as_ref_str.rs", "rank": 70, "score": 3.5295686078312145 }, { "content": "use strum::FromRepr;\n\n\n\n#[derive(Debug, FromRepr, PartialEq)]\n\n#[repr(u8)]\n", "file_path": "strum_tests/tests/from_repr.rs", "rank": 71, "score": 3.4944582714166144 }, { "content": "/// Some(\"Variant zero\")\n\n/// );\n\n/// ```\n\n///\n\n/// It is also possible to specify the visibility (e.g. `pub`/`pub(crate)`/etc.)\n\n/// of the generated enum. By default, the generated enum inherits the\n\n/// visibility of the parent enum it was generated from.\n\n///\n\n/// ```nocompile\n\n/// use strum_macros::EnumDiscriminants;\n\n///\n\n/// // You can set the visibility of the generated enum using the `#[strum_discriminants(vis(..))]` attribute:\n\n/// mod inner {\n\n/// use strum_macros::EnumDiscriminants;\n\n///\n\n/// # #[allow(dead_code)]\n\n/// #[derive(Debug, EnumDiscriminants)]\n\n/// #[strum_discriminants(vis(pub))]\n\n/// #[strum_discriminants(name(PubDiscriminants))]\n\n/// enum PrivateEnum {\n", "file_path": "strum_macros/src/lib.rs", "rank": 72, "score": 3.4491136185537705 }, { "content": "use enum_variant_type::EnumVariantType;\n\nuse strum::{Display, EnumDiscriminants, EnumIter, EnumMessage, EnumString, IntoEnumIterator};\n\n\n\n#[allow(dead_code)]\n\n#[derive(Debug, Eq, PartialEq, EnumDiscriminants)]\n\n#[strum_discriminants(derive(EnumIter))]\n", "file_path": "strum_tests/tests/enum_discriminants.rs", "rank": 73, "score": 3.446817813193452 }, { "content": "use strum::EnumMessage;\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumMessage)]\n", "file_path": "strum_tests/tests/enum_message.rs", "rank": 74, "score": 3.439948698225949 }, { "content": "/// they will be set to their default values upon deserialization.\n\n///\n\n/// The `default` attribute can be applied to a tuple variant with a single data parameter. When a match isn't\n\n/// found, the given variant will be returned and the input string will be captured in the parameter.\n\n///\n\n/// Note that the implementation of `FromStr` by default only matches on the name of the\n\n/// variant. There is an option to match on different case conversions through the\n\n/// `#[strum(serialize_all = \"snake_case\")]` type attribute.\n\n///\n\n/// See the [Additional Attributes](https://docs.rs/strum/0.22/strum/additional_attributes/index.html)\n\n/// Section for more information on using this feature.\n\n///\n\n/// # Example howto use `EnumString`\n\n/// ```\n\n/// use std::str::FromStr;\n\n/// use strum_macros::EnumString;\n\n///\n\n/// #[derive(Debug, PartialEq, EnumString)]\n\n/// enum Color {\n\n/// Red,\n", "file_path": "strum_macros/src/lib.rs", "rank": 75, "score": 3.423481403070199 }, { "content": "use strum_macros::AsRefStr;\n\n\n\n#[derive(Debug, Eq, PartialEq, AsRefStr)]\n", "file_path": "strum_tests/tests/as_ref_no_strum.rs", "rank": 76, "score": 3.3871135800878664 }, { "content": "/// `EnumProperty` is a trait that makes it possible to store additional information\n\n/// with enum variants. This trait is designed to be used with the macro of the same\n\n/// name in the `strum_macros` crate. Currently, the only string literals are supported\n\n/// in attributes, the other methods will be implemented as additional attribute types\n\n/// become stabilized.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use std::fmt::Debug;\n\n/// // You need to bring the type into scope to use it!!!\n\n/// use strum::EnumProperty;\n\n///\n\n/// #[derive(PartialEq, Eq, Debug, EnumProperty)]\n\n/// enum Class {\n\n/// #[strum(props(Teacher=\"Ms.Frizzle\", Room=\"201\"))]\n\n/// History,\n\n/// #[strum(props(Teacher=\"Mr.Smith\"))]\n\n/// #[strum(props(Room=\"103\"))]\n\n/// Mathematics,\n\n/// #[strum(props(Time=\"2:30\"))]\n\n/// Science,\n\n/// }\n\n///\n\n/// let history = Class::History;\n\n/// assert_eq!(\"Ms.Frizzle\", history.get_str(\"Teacher\").unwrap());\n\n/// ```\n\npub trait EnumProperty {\n\n fn get_str(&self, prop: &str) -> Option<&'static str>;\n\n fn get_int(&self, _prop: &str) -> Option<usize> {\n\n Option::None\n\n }\n\n\n\n fn get_bool(&self, _prop: &str) -> Option<bool> {\n\n Option::None\n\n }\n\n}\n\n\n\n/// A cheap reference-to-reference conversion. Used to convert a value to a\n\n/// reference value with `'static` lifetime within generic code.\n\n#[deprecated(\n\n since = \"0.22.0\",\n\n note = \"please use `#[derive(IntoStaticStr)]` instead\"\n\n)]\n", "file_path": "strum/src/lib.rs", "rank": 77, "score": 3.3455753846501626 }, { "content": "use strum::{EnumIter, IntoEnumIterator};\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumIter)]\n", "file_path": "strum_tests/tests/enum_iter.rs", "rank": 78, "score": 3.3358769288778465 }, { "content": "use strum::{Display, EnumString};\n\n\n\n#[derive(Debug, Eq, PartialEq, EnumString, Display)]\n", "file_path": "strum_tests/tests/display.rs", "rank": 79, "score": 3.3358769288778465 }, { "content": "/// Associates additional pieces of information with an Enum. This can be\n\n/// autoimplemented by deriving `EnumMessage` and annotating your variants with\n\n/// `#[strum(message=\"...\")]`.\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use std::fmt::Debug;\n\n/// // You need to bring the type into scope to use it!!!\n\n/// use strum::EnumMessage;\n\n///\n\n/// #[derive(PartialEq, Eq, Debug, EnumMessage)]\n\n/// enum Pet {\n\n/// #[strum(message=\"I have a dog\")]\n\n/// #[strum(detailed_message=\"My dog's name is Spots\")]\n\n/// Dog,\n\n/// /// I am documented.\n\n/// #[strum(message=\"I don't have a cat\")]\n\n/// Cat,\n\n/// }\n\n///\n\n/// let my_pet = Pet::Dog;\n\n/// assert_eq!(\"I have a dog\", my_pet.get_message().unwrap());\n\n/// ```\n\npub trait EnumMessage {\n\n fn get_message(&self) -> Option<&'static str>;\n\n fn get_detailed_message(&self) -> Option<&'static str>;\n\n fn get_documentation(&self) -> Option<&'static str>;\n\n fn get_serializations(&self) -> &'static [&'static str];\n\n}\n\n\n", "file_path": "strum/src/lib.rs", "rank": 80, "score": 3.311018356461323 }, { "content": "use strum::{EnumCount, EnumIter, IntoEnumIterator};\n\n\n\n#[derive(Debug, EnumCount, EnumIter)]\n", "file_path": "strum_tests/tests/enum_count.rs", "rank": 81, "score": 3.28616728582617 }, { "content": "//! If the whole enum is marked `ascii_case_insensitive`, you can specify `ascii_case_insensitive = false`\n\n//! to disable case insensitivity on this v ariant.\n\n//!\n\n//! - `message=\"..\"`: Adds a message to enum variant. This is used in conjunction with the `EnumMessage`\n\n//! trait to associate a message with a variant. If `detailed_message` is not provided,\n\n//! then `message` will also be returned when `get_detailed_message` is called.\n\n//!\n\n//! - `detailed_message=\"..\"`: Adds a more detailed message to a variant. If this value is omitted, then\n\n//! `message` will be used in it's place.\n\n//!\n\n//! - Structured documentation, as in `/// ...`: If using `EnumMessage`, is accessible via get_documentation().\n\n//!\n\n//! - `props(key=\"value\")`: Enables associating additional information with a given variant.\n", "file_path": "strum/src/additional_attributes.rs", "rank": 82, "score": 3.2620413755041615 }, { "content": "#![no_std]\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use core::str::FromStr;\n\n use strum::EnumString;\n\n\n\n #[derive(Debug, Eq, PartialEq, EnumString)]\n\n enum Color {\n\n Red,\n\n Blue {\n\n hue: usize,\n\n },\n\n #[strum(serialize = \"y\", serialize = \"yellow\")]\n\n Yellow,\n\n #[strum(to_string = \"purp\")]\n\n Purple,\n\n #[strum(serialize = \"blk\", serialize = \"Black\", ascii_case_insensitive)]\n\n Black,\n\n }\n", "file_path": "strum_nostd_tests/src/lib.rs", "rank": 83, "score": 3.2203166790979134 }, { "content": "///\n\n/// Note, the variant attributes passed to the discriminant enum are filtered to avoid compilation\n\n/// errors due to the derives mismatches, thus only `#[doc]`, `#[cfg]`, `#[allow]`, and `#[deny]`\n\n/// are passed through by default. If you want to specify a custom attribute on the discriminant\n\n/// variant, wrap it with `#[strum_discriminants(...)]` attribute.\n\n///\n\n/// ```\n\n/// // Bring trait into scope\n\n/// use std::str::FromStr;\n\n/// use strum::{IntoEnumIterator, EnumMessage};\n\n/// use strum_macros::{EnumDiscriminants, EnumIter, EnumString};\n\n///\n\n/// #[derive(Debug)]\n\n/// struct NonDefault;\n\n///\n\n/// // simple example\n\n/// # #[allow(dead_code)]\n\n/// #[derive(Debug, EnumDiscriminants)]\n\n/// #[strum_discriminants(derive(EnumString, EnumMessage))]\n\n/// enum MyEnum {\n", "file_path": "strum_macros/src/lib.rs", "rank": 84, "score": 3.2144922207329256 }, { "content": " documentation_arms\n\n .push(quote! { &#name::#ident #params => ::core::option::Option::Some(#text) });\n\n } else {\n\n // Push the documentation.\n\n documentation_arms\n\n .push(quote! {\n\n &#name::#ident #params => ::core::option::Option::Some(concat!(#(concat!(#documentation, \"\\n\")),*))\n\n });\n\n }\n\n }\n\n }\n\n\n\n if arms.len() < variants.len() {\n\n arms.push(quote! { _ => ::core::option::Option::None });\n\n }\n\n\n\n if detailed_arms.len() < variants.len() {\n\n detailed_arms.push(quote! { _ => ::core::option::Option::None });\n\n }\n\n\n", "file_path": "strum_macros/src/macros/enum_messages.rs", "rank": 85, "score": 3.1493953167591204 }, { "content": " quote! { { .. } }\n\n }\n\n };\n\n\n\n quote! { #name::#ident #params => #discriminants_name::#ident }\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let from_fn_body = quote! { match val { #(#arms),* } };\n\n\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n let impl_from = quote! {\n\n impl #impl_generics ::core::convert::From< #name #ty_generics > for #discriminants_name #where_clause {\n\n fn from(val: #name #ty_generics) -> #discriminants_name {\n\n #from_fn_body\n\n }\n\n }\n\n };\n\n let impl_from_ref = {\n\n let mut generics = ast.generics.clone();\n", "file_path": "strum_macros/src/macros/enum_discriminants.rs", "rank": 86, "score": 3.0851039949304178 }, { "content": "///\n\n/// use strum_macros;\n\n/// // bring the trait into scope\n\n/// use strum::EnumProperty;\n\n///\n\n/// #[derive(strum_macros::EnumProperty, Debug)]\n\n/// #[allow(dead_code)]\n\n/// enum Color {\n\n/// #[strum(props(Red = \"255\", Blue = \"255\", Green = \"255\"))]\n\n/// White,\n\n/// #[strum(props(Red = \"0\", Blue = \"0\", Green = \"0\"))]\n\n/// Black,\n\n/// #[strum(props(Red = \"0\", Blue = \"255\", Green = \"0\"))]\n\n/// Blue,\n\n/// #[strum(props(Red = \"255\", Blue = \"0\", Green = \"0\"))]\n\n/// Red,\n\n/// #[strum(props(Red = \"0\", Blue = \"0\", Green = \"255\"))]\n\n/// Green,\n\n/// }\n\n///\n", "file_path": "strum_macros/src/lib.rs", "rank": 87, "score": 3.0637288823358952 }, { "content": "/// enum Color {\n\n/// #[strum(serialize = \"redred\")]\n\n/// Red,\n\n/// Green {\n\n/// range: usize,\n\n/// },\n\n/// Blue(usize),\n\n/// Yellow,\n\n/// }\n\n///\n\n/// // uses the serialize string for Display\n\n/// let red = Color::Red;\n\n/// assert_eq!(String::from(\"redred\"), red.to_string());\n\n/// // by default the variants Name\n\n/// let yellow = Color::Yellow;\n\n/// assert_eq!(String::from(\"Yellow\"), yellow.to_string());\n\n/// ```\n\n#[deprecated(\n\n since = \"0.22.0\",\n\n note = \"please use `#[derive(Display)]` instead. See issue https://github.com/Peternator7/strum/issues/132\"\n\n)]\n", "file_path": "strum_macros/src/lib.rs", "rank": 88, "score": 3.0217481936412454 }, { "content": " let mut pascal = String::with_capacity(camel_case.len());\n\n let mut it = camel_case.chars();\n\n if let Some(ch) = it.next() {\n\n pascal.extend(ch.to_lowercase());\n\n }\n\n pascal.extend(it);\n\n pascal\n\n }\n\n }\n\n } else {\n\n ident_string\n\n }\n\n }\n\n}\n\n\n", "file_path": "strum_macros/src/helpers/case_style.rs", "rank": 89, "score": 3.0182422113061 }, { "content": " ATTRIBUTES_TO_COPY\n\n .iter()\n\n .any(|attr_whitelisted| attr.path.is_ident(attr_whitelisted))\n\n })\n\n .map(|attr| {\n\n if attr.path.is_ident(\"strum_discriminants\") {\n\n let passthrough_group = attr\n\n .tokens\n\n .clone()\n\n .into_iter()\n\n .next()\n\n .ok_or_else(|| strum_discriminants_passthrough_error(attr))?;\n\n let passthrough_attribute = match passthrough_group {\n\n TokenTree::Group(ref group) => group.stream(),\n\n _ => {\n\n return Err(strum_discriminants_passthrough_error(&passthrough_group));\n\n }\n\n };\n\n if passthrough_attribute.is_empty() {\n\n return Err(strum_discriminants_passthrough_error(&passthrough_group));\n", "file_path": "strum_macros/src/macros/enum_discriminants.rs", "rank": 90, "score": 2.9448346165109585 }, { "content": "\n\n let discriminants_name = type_properties.discriminant_name.unwrap_or(default_name);\n\n let discriminants_vis = type_properties\n\n .discriminant_vis\n\n .unwrap_or_else(|| vis.clone());\n\n\n\n // Pass through all other attributes\n\n let pass_though_attributes = type_properties.discriminant_others;\n\n\n\n // Add the variants without fields, but exclude the `strum` meta item\n\n let mut discriminants = Vec::new();\n\n for variant in variants {\n\n let ident = &variant.ident;\n\n\n\n // Don't copy across the \"strum\" meta attribute. Only passthrough the whitelisted\n\n // attributes and proxy `#[strum_discriminants(...)]` attributes\n\n let attrs = variant\n\n .attrs\n\n .iter()\n\n .filter(|attr| {\n", "file_path": "strum_macros/src/macros/enum_discriminants.rs", "rank": 91, "score": 2.9258306520418325 }, { "content": "/// the variants, the `from_repr` function is marked `const`. rustc >= 1.46 is required\n\n/// to allow `match` statements in `const fn`. The no additional data requirement is due to the\n\n/// inability to use `Default::default()` in a `const fn`.\n\n///\n\n/// You cannot derive `FromRepr` on any type with a lifetime bound (`<'a>`) because the function would surely\n\n/// create [unbounded lifetimes](https://doc.rust-lang.org/nightly/nomicon/unbounded-lifetimes.html).\n\n///\n\n/// ```\n\n///\n\n/// use strum_macros::FromRepr;\n\n///\n\n/// #[derive(FromRepr, Debug, PartialEq)]\n\n/// enum Color {\n\n/// Red,\n\n/// Green { range: usize },\n\n/// Blue(usize),\n\n/// Yellow,\n\n/// }\n\n///\n\n/// assert_eq!(Some(Color::Red), Color::from_repr(0));\n", "file_path": "strum_macros/src/lib.rs", "rank": 92, "score": 2.9024364474308255 }, { "content": "use strum::{Display, EnumCount, EnumDiscriminants, EnumString};\n\n\n\n#[allow(dead_code)]\n\n#[derive(Debug, Eq, PartialEq, EnumString, Display, EnumCount, EnumDiscriminants)]\n", "file_path": "strum_tests/src/main.rs", "rank": 93, "score": 2.8599858661558915 }, { "content": "//! # Documentation for Additional Attributes\n\n//!\n\n//! ## Attributes on Enums\n\n//!\n\n//! Strum supports several custom attributes to modify the generated code. At the enum level, the following attributes are supported:\n\n//!\n\n//! - `#[strum(serialize_all = \"case_style\")]` attribute can be used to change the case used when serializing to and deserializing\n\n//! from strings. This feature is enabled by [withoutboats/heck](https://github.com/withoutboats/heck) and supported case styles are:\n\n//!\n\n//! - `camelCase`\n\n//! - `PascalCase`\n\n//! - `kebab-case`\n\n//! - `snake_case`\n\n//! - `SCREAMING_SNAKE_CASE`\n\n//! - `SCREAMING-KEBAB-CASE`\n\n//! - `lowercase`\n\n//! - `UPPERCASE`\n\n//! - `title_case`\n\n//! - `mixed_case`\n\n//!\n", "file_path": "strum/src/additional_attributes.rs", "rank": 94, "score": 2.8279954175112656 }, { "content": "/// // test rename example combined with EnumIter\n\n/// assert_eq!(\n\n/// vec![MyVariants::Variant0, MyVariants::Variant1],\n\n/// MyVariants::iter().collect::<Vec<_>>()\n\n/// );\n\n///\n\n/// // Make use of the auto-From conversion to check whether an instance of `MyEnum` matches a\n\n/// // `MyEnumDiscriminants` discriminant.\n\n/// assert_eq!(\n\n/// MyEnumDiscriminants::Variant0,\n\n/// MyEnum::Variant0(NonDefault).into()\n\n/// );\n\n/// assert_eq!(\n\n/// MyEnumDiscriminants::Variant0,\n\n/// MyEnumDiscriminants::from(MyEnum::Variant0(NonDefault))\n\n/// );\n\n///\n\n/// // Make use of the EnumMessage on the `MyEnumDiscriminants` discriminant.\n\n/// assert_eq!(\n\n/// MyEnumDiscriminants::Variant0.get_message(),\n", "file_path": "strum_macros/src/lib.rs", "rank": 95, "score": 2.809977998350061 }, { "content": " #[allow(clippy::use_self)]\n\n impl #impl_generics #name #ty_generics #where_clause {\n\n #[doc = \"Try to create [Self] from the raw representation\"]\n\n #vis #const_if_possible fn from_repr(discriminant: #discriminant_type) -> Option<#name #ty_generics> {\n\n #(#constant_defs)*\n\n match discriminant {\n\n #(#arms),*\n\n }\n\n }\n\n }\n\n })\n\n}\n", "file_path": "strum_macros/src/macros/from_repr.rs", "rank": 96, "score": 2.6219575923964515 }, { "content": "\n\n #[test]\n\n fn from_str_no_std() {\n\n assert_eq!(Color::Yellow, Color::from_str(\"yellow\").unwrap());\n\n }\n\n\n\n #[test]\n\n #[rustversion::since(1.34)]\n\n fn try_from_str_no_std() {\n\n use core::convert::TryFrom;\n\n assert_eq!(Color::Yellow, Color::try_from(\"yellow\").unwrap());\n\n }\n\n\n\n #[test]\n\n #[rustversion::before(1.34)]\n\n fn try_from_str_no_std() {}\n\n}\n", "file_path": "strum_nostd_tests/src/lib.rs", "rank": 97, "score": 2.6219575923964515 }, { "content": " });\n\n }\n\n\n\n // But you can disable the messages.\n\n if variant_properties.disabled.is_some() {\n\n continue;\n\n }\n\n\n\n if let Some(msg) = messages {\n\n let params = params.clone();\n\n\n\n // Push the simple message.\n\n let tokens = quote! { &#name::#ident #params => ::core::option::Option::Some(#msg) };\n\n arms.push(tokens.clone());\n\n\n\n if detailed_messages.is_none() {\n\n detailed_arms.push(tokens);\n\n }\n\n }\n\n\n", "file_path": "strum_macros/src/macros/enum_messages.rs", "rank": 98, "score": 2.5729082193935606 }, { "content": "# Changelog\n\n\n\n## 0.23.1\n\n\n\n* [#193](https://github.com/Peternator7/strum/pull/193) Fixes an ambiguity introduced by #186 when your enum has a variant called Error.\n\n\n\n* [#192](https://github.com/Peternator7/strum/pull/192) The work done in #189 was lost in other PR's. This re-added the functionality to support no-std.\n\n\n\n## 0.23.0\n\n\n\n* [#185](https://github.com/Peternator7/strum/pull/185) Adds the `FromRepr` derive that adds a `from_repr(x: usize) -> Option<Self>`\n\n method to your enum. This lets you convert integer values to your enum. If you specify a #[repr(..)] attribute on your enum, or use\n\n an explicit discriminant, this will be incorporated into the derive.\n\n * `from_repr` will be `const` if you use a recent rust version.\n\n * This cannot be a trait method currently because only inherent methods support `const`.\n\n\n\n* [#186](https://github.com/Peternator7/strum/pull/186) Automatically implement `TryFrom<str>` for enums that implement `EnumString`.\n\n This is only enabled for rustc >= 1.34 which is when `TryFrom was stabilized.\n\n * This is a small breaking change. If you had manually implemented `TryFrom<str>` for your enum, this will cause a conflict. You\n\n can probably remove your manual implementation.\n\n\n\n* [#189](https://github.com/Peternator7/strum/pull/189) Use `core::result::Result` instead of `std::result::Result`. This should be\n\n more portable in no-std environments.\n\n\n\n## 0.22.0\n\n\n\n* [#180](https://github.com/Peternator7/strum/pull/180): Deprecates `ToString` derive. You should use `Display`\n\n instead.\n\n\n\n* [#178](https://github.com/Peternator7/strum/pull/178): Deprecates AsStaticStr. This has been undocumented for a while.\n\n The recommended method is to derive `IntoStaticStr` instead.\n\n\n\n* [#171](https://github.com/Peternator7/strum/pull/171): Improve `no-std` support. \n\n\n\n* [#170](https://github.com/Peternator7/strum/pull/170): enable changing the path to strum traits. This is necessary\n\n if you re-export strum as a submodule in another crate.\n\n\n", "file_path": "CHANGELOG.md", "rank": 99, "score": 2.4801678284423962 } ]
Rust
cargo-spatial/src/download.rs
randomPoison/spatialos-sdk-rs
6a0149a21a7de40fd4ff127820d6f04f87173454
#[cfg(target_os = "linux")] pub use self::linux::*; #[cfg(target_os = "macos")] pub use self::macos::*; #[cfg(target_os = "windows")] pub use self::windows::*; use crate::{config::Config, opt::DownloadSdk}; use log::*; use reqwest::get; use std::fs::File; use std::io::copy; use std::{ fs, path::{Path, PathBuf}, process, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialWorkerSdkPackage { CHeaders, CApiWin, CApiMac, CApiLinux, } impl SpatialWorkerSdkPackage { fn package_name(self) -> &'static str { match self { SpatialWorkerSdkPackage::CHeaders => "c_headers", SpatialWorkerSdkPackage::CApiWin => "c-static-x86_64-vc140_mt-win32", SpatialWorkerSdkPackage::CApiMac => "c-static-x86_64-clang-macos", SpatialWorkerSdkPackage::CApiLinux => "c-static-x86_64-gcc510_pic-linux", } } fn relative_target_directory(self) -> &'static str { match self { SpatialWorkerSdkPackage::CHeaders => "headers", SpatialWorkerSdkPackage::CApiWin => "win", SpatialWorkerSdkPackage::CApiMac => "macos", SpatialWorkerSdkPackage::CApiLinux => "linux", } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialToolsPackage { SchemaCompilerWin, SchemaCompilerMac, SchemaCompilerLinux, SnapshotConverterWin, SnapshotConverterMac, SnapshotConverterLinux, } impl SpatialToolsPackage { fn package_name(self) -> &'static str { match self { SpatialToolsPackage::SchemaCompilerWin => "schema_compiler-x86_64-win32", SpatialToolsPackage::SchemaCompilerMac => "schema_compiler-x86_64-macos", SpatialToolsPackage::SchemaCompilerLinux => "schema_compiler-x86_64-linux", SpatialToolsPackage::SnapshotConverterWin => "snapshot_converter-x86_64-win32", SpatialToolsPackage::SnapshotConverterMac => "snapshot_converter-x86_64-macos", SpatialToolsPackage::SnapshotConverterLinux => "snapshot_converter-x86_64-linux", } } fn relative_target_directory(self) -> &'static str { match self { SpatialToolsPackage::SchemaCompilerWin | SpatialToolsPackage::SchemaCompilerMac | SpatialToolsPackage::SchemaCompilerLinux => "schema-compiler", SpatialToolsPackage::SnapshotConverterWin | SpatialToolsPackage::SnapshotConverterMac | SpatialToolsPackage::SnapshotConverterLinux => "snapshot-converter", } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialSchemaPackage { StandardLibrary, ExhaustiveTestSchema, } impl SpatialSchemaPackage { fn package_name(self) -> &'static str { match self { SpatialSchemaPackage::StandardLibrary => "standard_library", SpatialSchemaPackage::ExhaustiveTestSchema => "test_schema_library", } } fn relative_target_directory(self) -> &'static str { match self { SpatialSchemaPackage::StandardLibrary => "std-lib", SpatialSchemaPackage::ExhaustiveTestSchema => "test-schema", } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialPackageSource { WorkerSdk(SpatialWorkerSdkPackage), Tools(SpatialToolsPackage), Schema(SpatialSchemaPackage), } impl SpatialPackageSource { fn package_name(self) -> Vec<&'static str> { match self { SpatialPackageSource::WorkerSdk(package) => vec!["worker_sdk", package.package_name()], SpatialPackageSource::Tools(package) => vec!["tools", package.package_name()], SpatialPackageSource::Schema(package) => vec!["schema", package.package_name()], } } fn relative_target_directory(self) -> &'static str { match self { SpatialPackageSource::WorkerSdk(package) => package.relative_target_directory(), SpatialPackageSource::Tools(package) => package.relative_target_directory(), SpatialPackageSource::Schema(package) => package.relative_target_directory(), } } } static COMMON_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CHeaders), SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CApiLinux), SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CApiWin), SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CApiMac), SpatialPackageSource::Schema(SpatialSchemaPackage::StandardLibrary), ]; #[cfg(target_os = "linux")] static PLATFORM_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::Tools(SpatialToolsPackage::SchemaCompilerLinux), SpatialPackageSource::Tools(SpatialToolsPackage::SnapshotConverterLinux), ]; #[cfg(target_os = "windows")] static PLATFORM_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::Tools(SpatialToolsPackage::SchemaCompilerWin), SpatialPackageSource::Tools(SpatialToolsPackage::SnapshotConverterWin), ]; #[cfg(target_os = "macos")] static PLATFORM_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::Tools(SpatialToolsPackage::SchemaCompilerMac), SpatialPackageSource::Tools(SpatialToolsPackage::SnapshotConverterMac), ]; pub fn download_sdk( config: Result<Config, Box<dyn std::error::Error>>, options: &DownloadSdk, ) -> Result<(), Box<dyn std::error::Error>> { let spatial_lib_dir = match config { Ok(ref config) => config.spatial_lib_dir().ok_or("spatial_lib_dir value must be set in the config, or the SPATIAL_LIB_DIR environment variable must be set")?, Err(_) => ::std::env::var("SPATIAL_LIB_DIR")? }; let spatial_sdk_version = match options.sdk_version { Some(ref version) => version.clone(), None => config?.spatial_sdk_version, }; info!("Downloading packages into: {}", spatial_lib_dir); if Path::new(&spatial_lib_dir).exists() { fs::remove_dir_all(&spatial_lib_dir)?; } fs::create_dir_all(&spatial_lib_dir)?; trace!("Spatial lib directory cleaned."); for package in COMMON_PACKAGES { download_package(*package, &spatial_sdk_version, &spatial_lib_dir)?; } for package in PLATFORM_PACKAGES { download_package(*package, &spatial_sdk_version, &spatial_lib_dir)?; } if options.with_test_schema { download_package( SpatialPackageSource::Schema(SpatialSchemaPackage::ExhaustiveTestSchema), &spatial_sdk_version, &spatial_lib_dir, )?; } Ok(()) } fn download_package( package_source: SpatialPackageSource, sdk_version: &str, spatial_lib_dir: &str, ) -> Result<(), Box<dyn std::error::Error>> { info!("Downloading {}", package_source.package_name().join(" ")); let mut output_path = PathBuf::new(); output_path.push(spatial_lib_dir); output_path.push(package_source.relative_target_directory()); let mut args = vec!["package", "retrieve"]; args.extend(package_source.package_name()); args.push(sdk_version); args.push(output_path.to_str().unwrap()); args.push("--unzip"); trace!("Running spatial command with arguments: {:?}", args); let process = process::Command::new("spatial").args(args).output()?; if !process.status.success() { let stdout = String::from_utf8(process.stdout)?; let stderr = String::from_utf8(process.stderr)?; trace!("{}", stdout); trace!("{}", stderr); return Err("Failed to download package.".into()); } Ok(()) } fn get_installer( download_url: &str, directory: &Path, ) -> Result<PathBuf, Box<dyn std::error::Error>> { trace!("GET request to {}", download_url); let mut response = get(download_url)?; let (mut dest, path) = { let fname = response .url() .path_segments() .and_then(::std::iter::Iterator::last) .and_then(|name| if name.is_empty() { None } else { Some(name) }) .unwrap_or("tmp.bin"); trace!("Downloading {}", fname); let fname = directory.join(fname); trace!("Creating temporary file at: {:?}", fname); (File::create(fname.clone())?, fname) }; copy(&mut response, &mut dest)?; Ok(path) } #[cfg(target_os = "linux")] mod linux { pub fn download_cli() -> Result<(), Box<dyn std::error::Error>> { Err("Linux installer is unsupported. Follow the instructions here to install the Spatial CLI: https://docs.improbable.io/reference/latest/shared/setup/linux".to_owned().into()) } } #[cfg(target_os = "windows")] mod windows { use log::info; use std::process; use tempfile; const DOWNLOAD_LOCATION: &str = "https://console.improbable.io/installer/download/stable/latest/win"; pub fn download_cli() -> Result<(), Box<dyn std::error::Error>> { let tmp_dir = tempfile::TempDir::new()?; info!("Downloading installer."); let installer_path = super::get_installer(DOWNLOAD_LOCATION, tmp_dir.path())?; info!("Executing installer."); let result = process::Command::new(installer_path).status(); match result { Ok(status) => { if !status.success() { return Err("Installer returned a non-zero exit code.".to_owned().into()); } Ok(()) } Err(e) => { if let Some(code) = e.raw_os_error() { if code == 740 { return Err("Installer requires elevated permissions to run. Please rerun in a terminal with elevated permissions.".to_owned().into()); } } Err(e.into()) } } } } #[cfg(target_os = "macos")] mod macos { use log::info; use std::process; use tempfile; const DOWNLOAD_LOCATION: &str = "https://console.improbable.io/installer/download/stable/latest/mac"; pub fn download_cli() -> Result<(), Box<dyn std::error::Error>> { let tmp_dir = tempfile::TempDir::new()?; info!("Downloading installer."); let installer_path = super::get_installer(DOWNLOAD_LOCATION, tmp_dir.path())?; info!("Executing installer."); let status = process::Command::new("installer") .arg("-pkg") .arg(installer_path) .args(&["-target", "/"]) .status()?; if !status.success() { return Err("Installer returned a non-zero exit code.".to_owned().into()); } Ok(()) } }
#[cfg(target_os = "linux")] pub use self::linux::*; #[cfg(target_os = "macos")] pub use self::macos::*; #[cfg(target_os = "windows")] pub use self::windows::*; use crate::{config::Config, opt::DownloadSdk}; use log::*; use reqwest::get; use std::fs::File; use std::io::copy; use std::{ fs, path::{Path, PathBuf}, process, }; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialWorkerSdkPackage { CHeaders, CApiWin, CApiMac, CApiLinux, } impl SpatialWorkerSdkPackage { fn package_name(self) -> &'static str { match self { SpatialWorkerSdkPackage::CHeaders => "c_headers", SpatialWorkerSdkPackage::CApiWin => "c-static-x86_64-vc140_mt-win32", SpatialWorkerSdkPackage::CApiMac => "c-static-x86_64-clang-macos", SpatialWorkerSdkPackage::CApiLinux => "c-static-x86_64-gcc510_pic-linux", } } fn relative_target_directory(self) -> &'static str { match self { SpatialWorkerSdkPackage::CHeaders => "headers", SpatialWorkerSdkPackage::CApiWin => "win", SpatialWorkerSdkPackage::CApiMac => "macos", SpatialWorkerSdkPackage::CApiLinux => "linux", } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialToolsPackage { SchemaCompilerWin, SchemaCompilerMac, SchemaCompilerLinux, SnapshotConverterWin, SnapshotConverterMac, SnapshotConverterLinux, } impl SpatialToolsPackage { fn package_name(self) -> &'static str { match self { SpatialToolsPackage::SchemaCompilerWin => "schema_compiler-x86_64-win32", SpatialToolsPackage::SchemaCompilerMac => "schema_compiler-x86_64-macos", SpatialToolsPackage::SchemaCompilerLinux => "schema_compiler-x86_64-linux", SpatialToolsPackage::SnapshotConverterWin => "snapshot_converter-x86_64-win32", SpatialToolsPackage::SnapshotConverterMac => "snapshot_converter-x86_64-macos", SpatialToolsPackage::SnapshotConverterLinux => "snapshot_converter-x86_64-linux", } } fn relative_target_directory(self) -> &'static str { match self { SpatialToolsPackage::SchemaCompilerWin | SpatialToolsPackage::SchemaCompilerMac | SpatialToolsPackage::SchemaCompilerLinux => "schema-compiler", SpatialToolsPackage::SnapshotConverterWin | SpatialToolsPackage::SnapshotConverterMac | SpatialToolsPackage::SnapshotConverterLinux => "snapshot-converter", } } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialSchemaPackage { StandardLibrary, ExhaustiveTestSchema, } impl SpatialSchemaPackage { fn package_name(self) -> &'static str { match self { SpatialSchemaPackage::StandardLibrary => "standard_library", SpatialSchemaPackage::ExhaustiveTestSchema => "test_schema_library", } } fn rel
} #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] enum SpatialPackageSource { WorkerSdk(SpatialWorkerSdkPackage), Tools(SpatialToolsPackage), Schema(SpatialSchemaPackage), } impl SpatialPackageSource { fn package_name(self) -> Vec<&'static str> { match self { SpatialPackageSource::WorkerSdk(package) => vec!["worker_sdk", package.package_name()], SpatialPackageSource::Tools(package) => vec!["tools", package.package_name()], SpatialPackageSource::Schema(package) => vec!["schema", package.package_name()], } } fn relative_target_directory(self) -> &'static str { match self { SpatialPackageSource::WorkerSdk(package) => package.relative_target_directory(), SpatialPackageSource::Tools(package) => package.relative_target_directory(), SpatialPackageSource::Schema(package) => package.relative_target_directory(), } } } static COMMON_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CHeaders), SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CApiLinux), SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CApiWin), SpatialPackageSource::WorkerSdk(SpatialWorkerSdkPackage::CApiMac), SpatialPackageSource::Schema(SpatialSchemaPackage::StandardLibrary), ]; #[cfg(target_os = "linux")] static PLATFORM_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::Tools(SpatialToolsPackage::SchemaCompilerLinux), SpatialPackageSource::Tools(SpatialToolsPackage::SnapshotConverterLinux), ]; #[cfg(target_os = "windows")] static PLATFORM_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::Tools(SpatialToolsPackage::SchemaCompilerWin), SpatialPackageSource::Tools(SpatialToolsPackage::SnapshotConverterWin), ]; #[cfg(target_os = "macos")] static PLATFORM_PACKAGES: &[SpatialPackageSource] = &[ SpatialPackageSource::Tools(SpatialToolsPackage::SchemaCompilerMac), SpatialPackageSource::Tools(SpatialToolsPackage::SnapshotConverterMac), ]; pub fn download_sdk( config: Result<Config, Box<dyn std::error::Error>>, options: &DownloadSdk, ) -> Result<(), Box<dyn std::error::Error>> { let spatial_lib_dir = match config { Ok(ref config) => config.spatial_lib_dir().ok_or("spatial_lib_dir value must be set in the config, or the SPATIAL_LIB_DIR environment variable must be set")?, Err(_) => ::std::env::var("SPATIAL_LIB_DIR")? }; let spatial_sdk_version = match options.sdk_version { Some(ref version) => version.clone(), None => config?.spatial_sdk_version, }; info!("Downloading packages into: {}", spatial_lib_dir); if Path::new(&spatial_lib_dir).exists() { fs::remove_dir_all(&spatial_lib_dir)?; } fs::create_dir_all(&spatial_lib_dir)?; trace!("Spatial lib directory cleaned."); for package in COMMON_PACKAGES { download_package(*package, &spatial_sdk_version, &spatial_lib_dir)?; } for package in PLATFORM_PACKAGES { download_package(*package, &spatial_sdk_version, &spatial_lib_dir)?; } if options.with_test_schema { download_package( SpatialPackageSource::Schema(SpatialSchemaPackage::ExhaustiveTestSchema), &spatial_sdk_version, &spatial_lib_dir, )?; } Ok(()) } fn download_package( package_source: SpatialPackageSource, sdk_version: &str, spatial_lib_dir: &str, ) -> Result<(), Box<dyn std::error::Error>> { info!("Downloading {}", package_source.package_name().join(" ")); let mut output_path = PathBuf::new(); output_path.push(spatial_lib_dir); output_path.push(package_source.relative_target_directory()); let mut args = vec!["package", "retrieve"]; args.extend(package_source.package_name()); args.push(sdk_version); args.push(output_path.to_str().unwrap()); args.push("--unzip"); trace!("Running spatial command with arguments: {:?}", args); let process = process::Command::new("spatial").args(args).output()?; if !process.status.success() { let stdout = String::from_utf8(process.stdout)?; let stderr = String::from_utf8(process.stderr)?; trace!("{}", stdout); trace!("{}", stderr); return Err("Failed to download package.".into()); } Ok(()) } fn get_installer( download_url: &str, directory: &Path, ) -> Result<PathBuf, Box<dyn std::error::Error>> { trace!("GET request to {}", download_url); let mut response = get(download_url)?; let (mut dest, path) = { let fname = response .url() .path_segments() .and_then(::std::iter::Iterator::last) .and_then(|name| if name.is_empty() { None } else { Some(name) }) .unwrap_or("tmp.bin"); trace!("Downloading {}", fname); let fname = directory.join(fname); trace!("Creating temporary file at: {:?}", fname); (File::create(fname.clone())?, fname) }; copy(&mut response, &mut dest)?; Ok(path) } #[cfg(target_os = "linux")] mod linux { pub fn download_cli() -> Result<(), Box<dyn std::error::Error>> { Err("Linux installer is unsupported. Follow the instructions here to install the Spatial CLI: https://docs.improbable.io/reference/latest/shared/setup/linux".to_owned().into()) } } #[cfg(target_os = "windows")] mod windows { use log::info; use std::process; use tempfile; const DOWNLOAD_LOCATION: &str = "https://console.improbable.io/installer/download/stable/latest/win"; pub fn download_cli() -> Result<(), Box<dyn std::error::Error>> { let tmp_dir = tempfile::TempDir::new()?; info!("Downloading installer."); let installer_path = super::get_installer(DOWNLOAD_LOCATION, tmp_dir.path())?; info!("Executing installer."); let result = process::Command::new(installer_path).status(); match result { Ok(status) => { if !status.success() { return Err("Installer returned a non-zero exit code.".to_owned().into()); } Ok(()) } Err(e) => { if let Some(code) = e.raw_os_error() { if code == 740 { return Err("Installer requires elevated permissions to run. Please rerun in a terminal with elevated permissions.".to_owned().into()); } } Err(e.into()) } } } } #[cfg(target_os = "macos")] mod macos { use log::info; use std::process; use tempfile; const DOWNLOAD_LOCATION: &str = "https://console.improbable.io/installer/download/stable/latest/mac"; pub fn download_cli() -> Result<(), Box<dyn std::error::Error>> { let tmp_dir = tempfile::TempDir::new()?; info!("Downloading installer."); let installer_path = super::get_installer(DOWNLOAD_LOCATION, tmp_dir.path())?; info!("Executing installer."); let status = process::Command::new("installer") .arg("-pkg") .arg(installer_path) .args(&["-target", "/"]) .status()?; if !status.success() { return Err("Installer returned a non-zero exit code.".to_owned().into()); } Ok(()) } }
ative_target_directory(self) -> &'static str { match self { SpatialSchemaPackage::StandardLibrary => "std-lib", SpatialSchemaPackage::ExhaustiveTestSchema => "test-schema", } }
function_block-function_prefixed
[ { "content": "/// Formats an key-value pair into an argument string.\n\npub fn format_arg<S: AsRef<OsStr>>(prefix: &str, value: S) -> OsString {\n\n let mut arg = OsString::from(format!(\"--{}=\", prefix));\n\n arg.push(value.as_ref());\n\n arg\n\n}\n", "file_path": "cargo-spatial/src/lib.rs", "rank": 0, "score": 90548.65107011895 }, { "content": "pub fn main() {\n\n let input_filename = args().nth(1).unwrap();\n\n let output_filename = args().nth(2).unwrap();\n\n\n\n let mut input_file =\n\n File::open(input_filename).expect(\"Unable to open the test schema bundle.\");\n\n let mut contents = String::new();\n\n input_file\n\n .read_to_string(&mut contents)\n\n .expect(\"Unable to read the test schema bundle\");\n\n let generated_file = generator::generate_code(schema_bundle::load_bundle(&contents).unwrap());\n\n let mut output_file = File::create(output_filename).unwrap();\n\n output_file.write_all(generated_file.as_bytes()).unwrap();\n\n}\n", "file_path": "spatialos-sdk-code-generator/src/bin.rs", "rank": 2, "score": 87527.3893469573 }, { "content": "/// Performs code generation for the project described by `config`.\n\n///\n\n/// Assumes that the current working directory is the root directory of the project,\n\n/// i.e. the directory that has the `Spatial.toml` file.\n\npub fn run_codegen(config: &Config) -> Result<(), Box<dyn std::error::Error>> {\n\n assert!(\n\n crate::current_dir_is_root(),\n\n \"Current directory should be the project root\"\n\n );\n\n\n\n // Ensure that the path to the Spatial SDK has been specified.\n\n let spatial_lib_dir = config.spatial_lib_dir()\n\n .map(PathBuf::from)\n\n .ok_or(\"spatial_lib_dir value must be set in the config, or the SPATIAL_LIB_DIR environment variable must be set\")?;\n\n\n\n // Determine the paths the the schema compiler and protoc relative the the lib\n\n // dir path.\n\n let schema_compiler_path = spatial_lib_dir.join(\"schema-compiler/schema_compiler\");\n\n let std_lib_path = spatial_lib_dir.join(\"std-lib\");\n\n\n\n // Calculate the various output directories relative to `output_dir`.\n\n let output_dir = PathBuf::from(config.schema_build_dir());\n\n let bundle_json_path = output_dir.join(\"bundle.json\");\n\n let schema_descriptor_path = output_dir.join(\"schema.descriptor\");\n", "file_path": "cargo-spatial/src/codegen.rs", "rank": 3, "score": 83880.68281104487 }, { "content": "pub fn cstr_to_string(ptr: *const std::os::raw::c_char) -> String {\n\n assert!(!ptr.is_null());\n\n unsafe {\n\n CStr::from_ptr(ptr)\n\n .to_owned()\n\n .into_string()\n\n .expect(\"Failed to unwrap string\")\n\n }\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/internal/utils.rs", "rank": 4, "score": 83189.35516901535 }, { "content": "#[test]\n\npub fn create_and_read_snapshot() {\n\n let snapshot_path = env::temp_dir().join(\"test.snapshot\");\n\n\n\n let entity = get_test_entity().expect(\"Error\");\n\n\n\n {\n\n SnapshotOutputStream::new(snapshot_path.clone())\n\n .expect(\"Error\")\n\n .write_entity(EntityId::new(1), &entity)\n\n .expect(\"Error\");\n\n }\n\n\n\n {\n\n let mut snapshot = SnapshotInputStream::new(snapshot_path).expect(\"Error\");\n\n\n\n assert!(snapshot.has_next());\n\n\n\n let entity = snapshot.read_entity().expect(\"Error\");\n\n\n\n let position = entity.get::<Position>();\n", "file_path": "test-suite/src/snapshot_integration_tests.rs", "rank": 5, "score": 83028.42414328185 }, { "content": "pub fn cstr_array_to_vec_string(\n\n char_ptr: *mut *const std::os::raw::c_char,\n\n count: u32,\n\n) -> Vec<String> {\n\n let mut strings = Vec::new();\n\n unsafe {\n\n for i in 0..count as isize {\n\n let ptr = char_ptr.offset(i) as *mut *const std::os::raw::c_char;\n\n assert!(!ptr.is_null());\n\n strings.push(cstr_to_string(*ptr));\n\n }\n\n }\n\n strings\n\n}\n", "file_path": "spatialos-sdk/src/worker/internal/utils.rs", "rank": 6, "score": 81031.58997203186 }, { "content": "pub fn load_bundle(data: &str) -> Result<SchemaBundle, serde_json::Error> {\n\n serde_json::from_str::<SchemaBundle>(data)\n\n}\n", "file_path": "spatialos-sdk-code-generator/src/schema_bundle.rs", "rank": 7, "score": 80991.32637678923 }, { "content": "/// Generates a random, valid component ID.\n\n///\n\n/// Component IDs are `i32` values that must be:\n\n///\n\n/// * Greater than 100.\n\n/// * Less than 536,870,911.\n\n/// * Not in the range 190,000 to 199999.\n\npub fn generate_component_id() -> i32 {\n\n use rand::Rng;\n\n\n\n let mut rng = rand::thread_rng();\n\n loop {\n\n let num = rng.gen();\n\n if num > 100 && (num < 190_000 || num > 199_999) && num < 536_870_911 {\n\n return num;\n\n }\n\n }\n\n}\n\n\n", "file_path": "cargo-spatial/src/lib.rs", "rank": 8, "score": 80029.70775329013 }, { "content": "/// Returns true if the current working directory is the project root (i.e. has the\n\n/// `Spatial.toml` file).\n\npub fn current_dir_is_root() -> bool {\n\n std::path::Path::new(\"./Spatial.toml\").exists()\n\n}\n\n\n", "file_path": "cargo-spatial/src/lib.rs", "rank": 9, "score": 80029.70775329013 }, { "content": "#[test]\n\npub fn writing_invalid_entity_returns_error() {\n\n let snapshot_path = env::temp_dir().join(\"test2.snapshot\");\n\n\n\n let entity = Entity::new();\n\n\n\n let error = SnapshotOutputStream::new(snapshot_path)\n\n .expect(\"Error\")\n\n .write_entity(EntityId::new(1), &entity);\n\n\n\n assert!(error.is_err());\n\n}\n\n\n", "file_path": "test-suite/src/snapshot_integration_tests.rs", "rank": 10, "score": 79178.8752365093 }, { "content": "/// Prepares and launches a local deployment.\n\n///\n\n/// Before launching the deployment, this will first run code generation and build\n\n/// workers in the project. Assumes that the current working directory is the root\n\n/// directory of the project, i.e. the directory that has the `Spatial.toml` file.\n\npub fn launch(config: &Config, launch: &LocalLaunch) -> Result<(), Box<dyn std::error::Error>> {\n\n assert!(\n\n crate::current_dir_is_root(),\n\n \"Current directory should be the project root\"\n\n );\n\n\n\n // Run codegen and such.\n\n crate::codegen::run_codegen(config)?;\n\n\n\n // Use `cargo install` to build workers and copy the exectuables to the build\n\n // directory.\n\n //\n\n // TODO: Manually copy the built executables instead of using `cargo install`.\n\n // `cargo install` doesn't use the same build cache as normal builds, so it will\n\n // sometimes result in unnecessary recompilation, which can slow down launch times.\n\n if !launch.no_build {\n\n let build_profile = match config.local_build_profile {\n\n BuildProfile::Debug => \"debug\",\n\n BuildProfile::Release => \"release\",\n\n };\n", "file_path": "cargo-spatial/src/local.rs", "rank": 11, "score": 77505.49454564134 }, { "content": "fn get_schema_type(value_type: &TypeReference) -> &str {\n\n match value_type {\n\n TypeReference::Primitive(primitive) => get_rust_primitive_type_tag(&primitive),\n\n TypeReference::Enum(_) => \"SchemaEnum\",\n\n TypeReference::Type(_) => \"SchemaObject\",\n\n }\n\n}\n\n\n", "file_path": "spatialos-sdk-code-generator/src/generator.rs", "rank": 12, "score": 67301.22042535621 }, { "content": "fn get_field_schema_type(field: &FieldDefinition) -> &str {\n\n match field.field_type {\n\n FieldDefinition_FieldType::Singular { ref type_reference } => {\n\n get_schema_type(type_reference)\n\n }\n\n FieldDefinition_FieldType::Option { ref inner_type } => get_schema_type(&inner_type),\n\n FieldDefinition_FieldType::List { ref inner_type } => get_schema_type(&inner_type),\n\n FieldDefinition_FieldType::Map { .. } => \"SchemaObject\",\n\n }\n\n}\n\n\n", "file_path": "spatialos-sdk-code-generator/src/generator.rs", "rank": 13, "score": 67301.22042535621 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let opt = Opt::from_args();\n\n\n\n // Initialize the logger.\n\n let verbosity = if opt.verbose {\n\n LevelFilter::Trace\n\n } else {\n\n LevelFilter::Info\n\n };\n\n SimpleLogger::init(verbosity, Default::default()).expect(\"Failed to setup logger\");\n\n\n\n // Perform the operation selected by the user.\n\n match &opt.command {\n\n Command::Codegen => codegen::run_codegen(&Config::load()?)?,\n\n\n\n Command::Local(local) => match local {\n\n Local::Launch(launch) => local::launch(&Config::load()?, launch)?,\n\n },\n\n\n\n Command::Generate { command } => match command {\n", "file_path": "cargo-spatial/src/main.rs", "rank": 14, "score": 66056.14112249648 }, { "content": "pub fn generate_code(bundle: SchemaBundle) -> String {\n\n // Set up the root package.\n\n let generated_code = Rc::new(RefCell::new(GeneratedCode {\n\n root_package: None,\n\n packages: BTreeSet::new(),\n\n enums: BTreeMap::new(),\n\n types: BTreeMap::new(),\n\n components: BTreeMap::new(),\n\n }));\n\n let mut root_package = Package::new(Rc::clone(&generated_code), \"\", vec![]);\n\n for file in bundle.schema_files {\n\n let package = get_or_create_packages(\n\n &mut root_package,\n\n file.package\n\n .name\n\n .split('.')\n\n .collect::<Vec<&str>>()\n\n .as_slice(),\n\n );\n\n for type_def in file.types {\n", "file_path": "spatialos-sdk-code-generator/src/generator.rs", "rank": 15, "score": 65972.61046360133 }, { "content": "fn get_rust_primitive_type_tag(primitive_type: &PrimitiveType) -> &str {\n\n match primitive_type {\n\n PrimitiveType::Invalid => panic!(\"Encountered invalid primitive.\"),\n\n PrimitiveType::Int32 => \"SchemaInt32\",\n\n PrimitiveType::Sint32 => \"SchemaSint32\",\n\n PrimitiveType::Sfixed32 => \"SchemaSfixed32\",\n\n PrimitiveType::Int64 => \"SchemaInt64\",\n\n PrimitiveType::Sint64 => \"SchemaSint64\",\n\n PrimitiveType::Sfixed64 => \"SchemaSfixed64\",\n\n PrimitiveType::Uint32 => \"SchemaUint32\",\n\n PrimitiveType::Fixed32 => \"SchemaFixed32\",\n\n PrimitiveType::Uint64 => \"SchemaUint64\",\n\n PrimitiveType::Fixed64 => \"SchemaFixed64\",\n\n PrimitiveType::Bool => \"SchemaBool\",\n\n PrimitiveType::Float => \"SchemaFloat\",\n\n PrimitiveType::Double => \"SchemaDouble\",\n\n PrimitiveType::String => \"SchemaString\",\n\n PrimitiveType::EntityId => \"SchemaEntityId\",\n\n PrimitiveType::Entity => panic!(\"Entity serialization unimplemented.\"),\n\n PrimitiveType::Bytes => \"SchemaBytes\",\n\n }\n\n}\n\n\n", "file_path": "spatialos-sdk-code-generator/src/generator.rs", "rank": 16, "score": 64384.88814813508 }, { "content": "fn check_for_flag(connection: &mut WorkerConnection, flag_name: &str) {\n\n let flag = connection.get_worker_flag(flag_name);\n\n match flag {\n\n Some(f) => println!(\"Found flag value: {}\", f),\n\n None => println!(\"Could not find flag value\"),\n\n }\n\n}\n\n\n", "file_path": "project-example/src/main.rs", "rank": 17, "score": 64101.68843793412 }, { "content": "pub fn get_connection(opt: Opt) -> Result<WorkerConnection, String> {\n\n let Opt {\n\n worker_type,\n\n worker_id,\n\n connect_with_poll,\n\n command,\n\n } = opt;\n\n\n\n let worker_id = worker_id.unwrap_or_else(|| format!(\"{}-{}\", &worker_type, Uuid::new_v4()));\n\n let mut future = match command {\n\n Command::Receptionist {\n\n host,\n\n port,\n\n connect_with_external_ip,\n\n } => {\n\n let params = ConnectionParameters::new(worker_type)\n\n .using_udp()\n\n .using_external_ip(connect_with_external_ip)\n\n .enable_internal_serialization();\n\n WorkerConnection::connect_receptionist_async(\n", "file_path": "project-example/src/connection_handler.rs", "rank": 18, "score": 60632.654370001284 }, { "content": "// This function ensures that given a path [\"example\", \"foo\"] and the root package, it will create\n\n// 2 packages with the following structure:\n\n// Package(\"root\", [Package(\"example\", [Package(\"foo\", [])])])\n\nfn get_or_create_packages<'a>(package: &'a mut Package, path: &[&str]) -> &'a mut Package {\n\n if path.is_empty() {\n\n return package;\n\n }\n\n // Given a package, and a path. If that package does not have any subpackages with the name of the \"next\"\n\n // package in the FQN, create it.\n\n let package_name = path[0];\n\n let mut package_path = package.path.clone();\n\n package_path.push(package_name.to_string());\n\n if !package.subpackages.contains_key(package_name) {\n\n package.subpackages.insert(\n\n package_name.to_string(),\n\n Package::new(\n\n Rc::clone(&package.generated_code),\n\n package_name,\n\n package_path,\n\n ),\n\n );\n\n }\n\n\n\n // Recurse into the package created above, and create more packages if needed.\n\n get_or_create_packages(\n\n package.subpackages.get_mut(package_name).unwrap(),\n\n &path[1..],\n\n )\n\n}\n\n\n", "file_path": "spatialos-sdk-code-generator/src/generator.rs", "rank": 19, "score": 53668.3208969688 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n let mut worker_connection = match get_connection(opt) {\n\n Ok(c) => c,\n\n Err(e) => panic!(\"{}\", e),\n\n };\n\n\n\n println!(\"Connected as: {}\", worker_connection.get_worker_id());\n\n\n\n exercise_connection_code_paths(&mut worker_connection);\n\n logic_loop(&mut worker_connection);\n\n}\n\n\n", "file_path": "project-example/src/main.rs", "rank": 20, "score": 51660.533043396004 }, { "content": "fn main() {\n\n let lib_dir = match env::var(\"SPATIAL_LIB_DIR\") {\n\n Ok(s) => s,\n\n Err(_) => panic!(\"SPATIAL_LIB_DIR environment variable not set.\"),\n\n };\n\n\n\n let package_dir = Path::new(&lib_dir).join(PACKAGE_DIR);\n\n\n\n println!(\"cargo:rustc-link-search={}\", package_dir.to_str().unwrap());\n\n\n\n for lib in LIBS.iter() {\n\n println!(\"cargo:rustc-link-lib=static={}\", lib)\n\n }\n\n\n\n #[cfg(target_os = \"macos\")]\n\n println!(\"cargo:rustc-link-lib=dylib=c++\");\n\n\n\n #[cfg(target_os = \"linux\")]\n\n println!(\"cargo:rustc-link-lib=dylib=stdc++\");\n\n\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n println!(\"cargo:rustc-link-lib=dylib=gdi32\");\n\n println!(\"cargo:rustc-link-lib=dylib=user32\");\n\n }\n\n}\n", "file_path": "spatialos-sdk-sys/build.rs", "rank": 21, "score": 51660.533043396004 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n let current_dir = std::env::current_dir().expect(\"Could not find current working directory.\");\n\n\n\n let mut path_buf = PathBuf::new();\n\n path_buf.push(current_dir);\n\n path_buf.push(opt.snapshot_path);\n\n\n\n let snapshot_path = path_buf.to_str().unwrap();\n\n println!(\"Creating empty snapshot at: {}\", snapshot_path);\n\n\n\n let _stream = SnapshotOutputStream::new(snapshot_path);\n\n}\n\n\n", "file_path": "spatialos-sdk/examples/generate_snapshot.rs", "rank": 23, "score": 50138.803632512776 }, { "content": "enum IntermediateProtocolType {\n\n Tcp(Worker_TcpNetworkParameters),\n\n\n\n Udp {\n\n security_type: u8,\n\n kcp: Option<Worker_Alpha_KcpParameters>,\n\n erasure_codec: Option<Worker_ErasureCodecParameters>,\n\n heartbeat: Option<Worker_HeartbeatParameters>,\n\n flow_control: Option<Worker_Alpha_FlowControlParameters>,\n\n },\n\n}\n", "file_path": "spatialos-sdk/src/worker/parameters.rs", "rank": 29, "score": 48385.58570322805 }, { "content": "fn main() {\n\n let args = get_arguments();\n\n\n\n let headers =\n\n glob(&format!(\"{}/*.h\", args.input_dir)).expect(\"Could not glob input directory.\");\n\n\n\n let mut bindings = bindgen::Builder::default()\n\n .layout_tests(false)\n\n .derive_default(true);\n\n\n\n for path in headers {\n\n bindings = bindings.header(path.unwrap().as_path().to_str().unwrap().to_owned())\n\n }\n\n\n\n let mut out_path = PathBuf::from(args.output_dir);\n\n out_path.push(\"worker.rs\");\n\n\n\n bindings\n\n .generate()\n\n .expect(\"Could not generate bindings.\")\n\n .write_to_file(out_path)\n\n .expect(\"Could not write bindings\");\n\n}\n\n\n", "file_path": "spatialos-sdk-tools/src/generate_bindings/main.rs", "rank": 30, "score": 47480.41689432163 }, { "content": "#[test]\n\nfn position_is_serialized_correctly() {\n\n let builder = EntityBuilder::new(10.0, -10.0, 7.5, \"rusty\");\n\n let entity = builder.build().unwrap();\n\n\n\n let maybe_position = entity.get::<Position>();\n\n assert!(maybe_position.is_some());\n\n\n\n let position = maybe_position.unwrap();\n\n\n\n approx::abs_diff_eq!(10.0, position.coords.x);\n\n approx::abs_diff_eq!(-10.0, position.coords.y);\n\n approx::abs_diff_eq!(7.5, position.coords.z);\n\n}\n\n\n", "file_path": "test-suite/src/entity_builder_tests.rs", "rank": 31, "score": 46312.593299067965 }, { "content": "#[test]\n\nfn metadata_is_serialized_correctly() {\n\n let mut builder = EntityBuilder::new(0.0, 0.0, 0.0, \"rusty\");\n\n builder.set_metadata(\"my_entity\", \"rusty\");\n\n let entity = builder.build().unwrap();\n\n\n\n let maybe_metadata = entity.get::<Metadata>();\n\n assert!(maybe_metadata.is_some());\n\n let metadata = maybe_metadata.unwrap();\n\n\n\n assert_eq!(\"my_entity\", metadata.entity_type);\n\n}\n\n\n", "file_path": "test-suite/src/entity_builder_tests.rs", "rank": 32, "score": 46312.593299067965 }, { "content": "// A trait that's implemented by a component to convert to/from schema handle types.\n\npub trait Component\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n type Update;\n\n type CommandRequest;\n\n type CommandResponse;\n\n\n\n const ID: ComponentId;\n\n\n\n fn from_data(data: &schema::SchemaComponentData) -> Result<Self, String>;\n\n fn from_update(update: &schema::SchemaComponentUpdate) -> Result<Self::Update, String>;\n\n fn from_request(\n\n command_index: CommandIndex,\n\n request: &schema::SchemaCommandRequest,\n\n ) -> Result<Self::CommandRequest, String>;\n\n fn from_response(\n\n command_index: CommandIndex,\n\n response: &schema::SchemaCommandResponse,\n\n ) -> Result<Self::CommandResponse, String>;\n", "file_path": "spatialos-sdk/src/worker/component.rs", "rank": 33, "score": 45237.45556501597 }, { "content": "/// Connection trait to allow for mocking the connection.\n\npub trait Connection {\n\n fn send_log_message(\n\n &mut self,\n\n level: LogLevel,\n\n logger_name: &str,\n\n message: &str,\n\n entity_id: Option<EntityId>,\n\n );\n\n fn send_metrics(&mut self, metrics: &Metrics);\n\n\n\n fn send_reserve_entity_ids_request(\n\n &mut self,\n\n payload: ReserveEntityIdsRequest,\n\n timeout_millis: Option<u32>,\n\n ) -> RequestId<ReserveEntityIdsRequest>;\n\n fn send_create_entity_request(\n\n &mut self,\n\n entity: Entity,\n\n entity_id: Option<EntityId>,\n\n timeout_millis: Option<u32>,\n", "file_path": "spatialos-sdk/src/worker/connection.rs", "rank": 34, "score": 45237.45556501597 }, { "content": "#[test]\n\nfn error_is_returned_if_invalid_entity() {\n\n let mut builder = EntityBuilder::new(0.0, 0.0, 0.0, \"rusty\");\n\n builder.add_component(\n\n Position {\n\n coords: Coordinates {\n\n x: 0.0,\n\n y: 0.0,\n\n z: 0.0,\n\n },\n\n },\n\n \"rusty\",\n\n );\n\n let result = builder.build();\n\n\n\n assert!(result.is_err());\n\n}\n", "file_path": "test-suite/src/entity_builder_tests.rs", "rank": 35, "score": 45235.61265677959 }, { "content": "#[test]\n\nfn entity_acl_is_serialized_correctly() {\n\n let mut builder = EntityBuilder::new(0.0, 0.0, 0.0, \"position_acl\");\n\n builder.add_component(\n\n Metadata {\n\n entity_type: \"test\".to_owned(),\n\n },\n\n \"metadata_acl\",\n\n );\n\n builder.set_entity_acl_write_access(\"entity_acl_acl\");\n\n builder.add_read_access(\"client\");\n\n builder.add_read_access(\"server\");\n\n\n\n let entity = builder.build().unwrap();\n\n\n\n let maybe_acl = entity.get::<EntityAcl>();\n\n assert!(maybe_acl.is_some());\n\n\n\n let acl = maybe_acl.unwrap();\n\n\n\n // First check that we insert each layer into a different set.\n", "file_path": "test-suite/src/entity_builder_tests.rs", "rank": 36, "score": 45235.61265677959 }, { "content": "#[test]\n\nfn persistence_component_is_added_if_set() {\n\n let mut builder = EntityBuilder::new(0.0, 0.0, 0.0, \"rusty\");\n\n builder.set_persistent(\"rusty\");\n\n let entity = builder.build().unwrap();\n\n\n\n assert!(entity.get::<Persistence>().is_some());\n\n}\n\n\n", "file_path": "test-suite/src/entity_builder_tests.rs", "rank": 37, "score": 45235.61265677959 }, { "content": "// A trait that's implemented by a type to convert to/from schema objects.\n\npub trait TypeConversion\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n fn from_type(input: &schema::SchemaObject) -> Result<Self, String>;\n\n fn to_type(input: &Self, output: &mut schema::SchemaObject) -> Result<(), String>;\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/component.rs", "rank": 38, "score": 43961.492706552985 }, { "content": "// A bytes schema field.\n\npub trait SchemaBytesField {\n\n fn get(&self) -> Option<Vec<u8>> {\n\n if self.count() == 0 {\n\n None\n\n } else {\n\n Some(self.get_or_default())\n\n }\n\n }\n\n\n\n fn get_or_default(&self) -> Vec<u8>;\n\n fn index(&self, index: usize) -> Vec<u8>;\n\n fn count(&self) -> usize;\n\n fn add(&mut self, value: &[u8]);\n\n}\n\n\n\n// A string schema field.\n", "file_path": "spatialos-sdk/src/worker/internal/schema.rs", "rank": 39, "score": 41707.331740419584 }, { "content": "#[allow(clippy::ptr_arg)]\n\npub trait SchemaStringField {\n\n fn get(&self) -> Option<String> {\n\n if self.count() == 0 {\n\n None\n\n } else {\n\n Some(self.get_or_default())\n\n }\n\n }\n\n\n\n fn get_or_default(&self) -> String;\n\n fn index(&self, index: usize) -> String;\n\n fn count(&self) -> usize;\n\n\n\n fn add(&mut self, value: &String);\n\n fn add_list(&mut self, value: &[String]);\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/internal/schema.rs", "rank": 40, "score": 41707.331740419584 }, { "content": "// An object schema field.\n\npub trait SchemaObjectField {\n\n fn get(&self) -> Option<SchemaObject> {\n\n if self.count() == 0 {\n\n None\n\n } else {\n\n Some(self.get_or_default())\n\n }\n\n }\n\n\n\n fn get_or_default(&self) -> SchemaObject;\n\n fn index(&self, index: usize) -> SchemaObject;\n\n fn count(&self) -> usize;\n\n\n\n fn add(&mut self) -> SchemaObject;\n\n}\n\n\n\nimpl SchemaObject {\n\n pub fn field<T>(&self, field_id: ComponentId) -> SchemaFieldContainer<T> {\n\n SchemaFieldContainer {\n\n field_id,\n", "file_path": "spatialos-sdk/src/worker/internal/schema.rs", "rank": 41, "score": 41707.331740419584 }, { "content": "fn get_arguments() -> Args {\n\n let matches = App::new(\"Spatial OS SDK Bindings Generator\")\n\n .author(\"Jamie Brynes <[email protected]>\")\n\n .about(\"Generate Rust bindings for the SpatialOS C API.\")\n\n .arg(\n\n Arg::with_name(OUTPUT_DIR_ARG)\n\n .short(\"o\")\n\n .long(OUTPUT_DIR_ARG)\n\n .takes_value(true)\n\n .value_name(&OUTPUT_DIR_ARG.to_uppercase())\n\n .required(true)\n\n .help(\"Output directory for the Rust bindings. Relative to the current working directory.\")\n\n )\n\n .arg(\n\n Arg::with_name(INPUT_DIR_ARG)\n\n .short(\"i\")\n\n .long(INPUT_DIR_ARG)\n\n .takes_value(true)\n\n .value_name(&INPUT_DIR_ARG.to_uppercase())\n\n .required(true)\n\n .help(\"Input directory for C header files. Relative to the current working directory.\")\n\n )\n\n .get_matches();\n\n\n\n Args {\n\n input_dir: matches.value_of(INPUT_DIR_ARG).unwrap().to_owned(),\n\n output_dir: matches.value_of(OUTPUT_DIR_ARG).unwrap().to_owned(),\n\n }\n\n}\n", "file_path": "spatialos-sdk-tools/src/generate_bindings/main.rs", "rank": 42, "score": 41321.09240286227 }, { "content": "fn print_worker_attributes(connection: &WorkerConnection) {\n\n let attrs = connection.get_worker_attributes();\n\n println!(\"The worker has the following attributes: \");\n\n for attr in attrs {\n\n println!(\"{}\", attr)\n\n }\n\n}\n\n\n", "file_path": "project-example/src/main.rs", "rank": 43, "score": 38766.418832669704 }, { "content": "// A primitive schema field.\n\npub trait SchemaPrimitiveField<T> {\n\n fn get(&self) -> Option<T> {\n\n if self.count() == 0 {\n\n None\n\n } else {\n\n Some(self.get_or_default())\n\n }\n\n }\n\n\n\n fn get_or_default(&self) -> T;\n\n fn index(&self, index: usize) -> T;\n\n fn count(&self) -> usize;\n\n\n\n fn add(&mut self, value: T);\n\n fn add_list(&mut self, value: &[T]);\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/internal/schema.rs", "rank": 44, "score": 38104.34505141676 }, { "content": "fn send_query(c: &mut WorkerConnection) {\n\n let query = EntityQuery::new(\n\n QueryConstraint::And(vec![\n\n QueryConstraint::Or(vec![\n\n QueryConstraint::Component(0),\n\n QueryConstraint::Component(1),\n\n ]),\n\n QueryConstraint::And(vec![\n\n QueryConstraint::Sphere(10.0, 10.0, 10.0, 250.0),\n\n QueryConstraint::Not(Box::new(QueryConstraint::Component(2))),\n\n ]),\n\n QueryConstraint::EntityId(EntityId::new(10)),\n\n ]),\n\n ResultType::Count,\n\n );\n\n\n\n c.send_entity_query_request(EntityQueryRequest(query), None);\n\n}\n\n\n", "file_path": "project-example/src/main.rs", "rank": 45, "score": 38044.84888462246 }, { "content": "fn send_metrics(c: &mut WorkerConnection) {\n\n let mut m = Metrics::new()\n\n .with_load(0.2)\n\n .with_gauge_metric(\"some_metric\", 0.15)\n\n .with_histogram_metric(\"histogram_metric\", HistogramMetric::new(&[6.7]));\n\n\n\n let gauge_metric = m.add_gauge_metric(\"another_metric\").unwrap();\n\n *gauge_metric = 0.2;\n\n\n\n let histogram_metric = m\n\n .add_histogram_metric(\"another_histogram\", &[0.1, 0.2, 0.3])\n\n .unwrap();\n\n histogram_metric.add_sample(1.0);\n\n histogram_metric.add_sample(0.5);\n\n\n\n c.send_metrics(&m);\n\n}\n", "file_path": "project-example/src/main.rs", "rank": 46, "score": 38044.84888462246 }, { "content": "fn logic_loop(c: &mut WorkerConnection) {\n\n /// Local tracking of the state of an entity's components. We only track the\n\n /// `Rotate` component because it's the only one we care about for this demo.\n\n #[derive(Debug, Default)]\n\n struct EntityState {\n\n has_authority: bool,\n\n rotate: Option<example::Rotate>,\n\n }\n\n\n\n let mut rng = rand::thread_rng();\n\n\n\n // Store the currently-visible state of the world. Entities/components are added\n\n // and removed from the world as we get ops notifying us of those changes. The\n\n // data in `world` also tracks which `Rotate` components we currently have\n\n // authority over, so that we know which ones we need to be updating.\n\n let mut world = HashMap::new();\n\n\n\n let mut builder = EntityBuilder::new(0.0, 0.0, 0.0, \"rusty\");\n\n\n\n builder.add_component(\n", "file_path": "project-example/src/main.rs", "rank": 47, "score": 38044.84888462246 }, { "content": "pub trait ComponentUpdate<C: Component> {\n\n fn merge(&mut self, update: Self);\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/component.rs", "rank": 48, "score": 37272.62189528407 }, { "content": "pub trait ComponentData<C: Component> {\n\n fn merge(&mut self, update: C::Update);\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/component.rs", "rank": 49, "score": 37272.62189528407 }, { "content": "fn exercise_connection_code_paths(c: &mut WorkerConnection) {\n\n c.send_log_message(LogLevel::Info, \"main\", \"Connected successfully!\", None);\n\n print_worker_attributes(&c);\n\n check_for_flag(c, \"my-flag\");\n\n\n\n let _ = c.get_op_list(0);\n\n c.send_reserve_entity_ids_request(ReserveEntityIdsRequest(1), None);\n\n send_query(c);\n\n\n\n let interested = vec![\n\n InterestOverride::new(1, true),\n\n InterestOverride::new(100, false),\n\n ];\n\n c.send_component_interest(EntityId::new(1), &interested);\n\n c.send_authority_loss_imminent_acknowledgement(EntityId::new(1), 1337);\n\n\n\n send_metrics(c);\n\n c.set_protocol_logging_enabled(false);\n\n\n\n println!(\"Testing completed\");\n\n}\n\n\n", "file_path": "project-example/src/main.rs", "rank": 50, "score": 35971.52761590986 }, { "content": "fn generate_module(package: &Package) -> String {\n\n let submodules = if !package.subpackages.is_empty() {\n\n package\n\n .subpackages\n\n .iter()\n\n .map(|(_, pkg)| generate_module(&pkg))\n\n .fold(\"\".to_string(), |submodule, next| submodule + \"\\n\" + &next)\n\n } else {\n\n \"\".to_string()\n\n };\n\n // Passing `package` to format! causes the T4 template engine to generate output.\n\n let module_contents = format!(\"{}\\n{}\", package, submodules);\n\n // The only package with a depth of 0 is the root package.\n\n if package.depth() == 0 {\n\n let allow_warnings = vec![\n\n \"#![allow(unused_imports)]\",\n\n \"#![allow(unreachable_code)]\",\n\n \"#![allow(unreachable_patterns)]\",\n\n \"#![allow(unused_variables)]\",\n\n \"#![allow(dead_code)]\",\n\n \"#![allow(non_camel_case_types)]\",\n\n \"#![allow(unused_mut)]\",\n\n ]\n\n .join(\"\\n\");\n\n format!(\"{}\\n\\n{}\", allow_warnings, module_contents)\n\n } else {\n\n format!(\"pub mod {} {{\\n{}}}\\n\", package.name, module_contents)\n\n }\n\n}\n\n\n", "file_path": "spatialos-sdk-code-generator/src/generator.rs", "rank": 51, "score": 35971.52761590986 }, { "content": "fn get_test_entity() -> Result<Entity, String> {\n\n let mut builder = EntityBuilder::new(10.0, -10.0, 0.0, \"RustWorker\");\n\n builder.set_persistent(\"RustWorker\");\n\n builder.build()\n\n}\n", "file_path": "test-suite/src/snapshot_integration_tests.rs", "rank": 52, "score": 35047.09684105969 }, { "content": "fn get_connection_poll(future: &mut WorkerConnectionFuture) -> Result<WorkerConnection, String> {\n\n for _ in 0..POLL_NUM_ATTEMPTS {\n\n println!(\"Attempting to poll.\");\n\n match future.poll() {\n\n Ok(res) => {\n\n if let Async::Ready(conn) = res {\n\n return Ok(conn);\n\n }\n\n }\n\n Err(s) => return Err(s),\n\n };\n\n\n\n ::std::thread::sleep(::std::time::Duration::from_millis(\n\n POLL_TIME_BETWEEN_ATTEMPTS_MILLIS,\n\n ));\n\n }\n\n\n\n Err(\"Max connection attempts failed.\".to_owned())\n\n}\n", "file_path": "project-example/src/connection_handler.rs", "rank": 53, "score": 28089.057588113257 }, { "content": "fn empty_string_is_none<'de, D>(d: D) -> Result<Option<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let o: Option<String> = Option::deserialize(d)?;\n\n Ok(o.filter(|s| !s.is_empty()))\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct SourceReference {\n\n pub line: u32,\n\n pub column: u32,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug)]\n\npub enum PrimitiveType {\n\n Invalid = 0,\n\n Int32 = 1,\n\n Int64 = 2,\n", "file_path": "spatialos-sdk-code-generator/src/schema_bundle.rs", "rank": 54, "score": 25782.196965937634 }, { "content": "use spatialos_sdk::worker::internal::schema::*;\n\nuse spatialos_sdk::worker::component::*;\n\nuse std::collections::BTreeMap;\n\n\n\nuse super::super::generated as generated;\n\n\n\n/* Enums. */\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum TestEnum {\n\n\n\n FIRST,\n\n SECOND,\n\n}\n\n\n\nimpl From<u32> for TestEnum {\n\n fn from(value: u32) -> Self {\n\n match value {\n\n\n\n 0 => TestEnum::FIRST, \n\n 1 => TestEnum::SECOND, \n", "file_path": "project-example/src/generated.rs", "rank": 56, "score": 35.449164767180946 }, { "content": "use spatialos_sdk::worker::internal::schema::*;\n\nuse spatialos_sdk::worker::component::*;\n\nuse std::collections::BTreeMap;\n\n\n\nuse <#= vec![\"super\".to_string(); self.depth() + 1].join(\"::\") #>::generated as generated;\n\n\n\n/* Enums. */<# for enum_name in &self.enums {\n\nlet enum_def = self.get_enum_definition(enum_name);\n\nlet enum_rust_name = self.rust_name(&enum_def.qualified_name);\n\n#>\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum <#= enum_rust_name #> {\n\n<# for enum_value in &enum_def.values { #>\n\n <#= enum_value.name #>,<# } #>\n\n}\n\n\n\nimpl From<u32> for <#= enum_rust_name #> {\n\n fn from(value: u32) -> Self {\n\n match value {\n\n<# for enum_value in &enum_def.values { #>\n", "file_path": "spatialos-sdk-code-generator/src/generated_code_mod.tt.rs", "rank": 57, "score": 30.732391517872784 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\nuse std::env;\n\nuse std::path::Path;\n\n\n\n#[cfg(windows)]\n\nlazy_static! {\n\n static ref LIBS: Vec<&'static str> =\n\n vec![\"improbable_worker\", \"RakNetLibStatic\", \"ssl\", \"zlibstatic\",];\n\n}\n\n\n\n#[cfg(unix)]\n\nlazy_static! {\n\n static ref LIBS: Vec<&'static str> = vec![\"improbable_worker\", \"RakNetLibStatic\", \"ssl\", \"z\",];\n\n}\n\n\n\n#[cfg(target_os = \"linux\")]\n\nstatic PACKAGE_DIR: &str = \"linux\";\n\n#[cfg(target_os = \"macos\")]\n\nstatic PACKAGE_DIR: &str = \"macos\";\n\n#[cfg(target_os = \"windows\")]\n\nstatic PACKAGE_DIR: &str = \"win\";\n\n\n", "file_path": "spatialos-sdk-sys/build.rs", "rank": 58, "score": 30.216914781249002 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]\n\npub enum Authority {\n\n Authoritative,\n\n AuthorityLossImminent,\n\n NotAuthoritative,\n\n}\n\n\n\nimpl Authority {\n\n pub fn has_authority(self) -> bool {\n\n self != Authority::NotAuthoritative\n\n }\n\n}\n\n\n\nimpl From<u8> for Authority {\n\n fn from(auth: u8) -> Self {\n\n match auth {\n\n 0 => Authority::NotAuthoritative,\n", "file_path": "spatialos-sdk/src/worker/mod.rs", "rank": 59, "score": 28.894362161268905 }, { "content": " 1 => Authority::Authoritative,\n\n 2 => Authority::AuthorityLossImminent,\n\n _ => panic!(\"Unknown authority state: {}\", auth),\n\n }\n\n }\n\n}\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd)]\n\npub enum LogLevel {\n\n Debug,\n\n Info,\n\n Warn,\n\n Error,\n\n Fatal,\n\n}\n\n\n\nimpl LogLevel {\n\n fn to_worker_sdk(self) -> u8 {\n\n match self {\n\n LogLevel::Debug => 1,\n\n LogLevel::Info => 2,\n", "file_path": "spatialos-sdk/src/worker/mod.rs", "rank": 61, "score": 26.734139008966935 }, { "content": " }\n\n}\n\n\n\ninventory::submit!(VTable::new::<Position>());\n\n\n\n\n\n\n\npub mod restricted {\n\nuse spatialos_sdk::worker::internal::schema::*;\n\nuse spatialos_sdk::worker::component::*;\n\nuse std::collections::BTreeMap;\n\n\n\nuse super::super::super::generated as generated;\n\n\n\n/* Enums. */\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum Connection_ConnectionStatus {\n\n\n\n UNKNOWN,\n\n AWAITING_WORKER_CONNECTION,\n", "file_path": "project-example/src/generated.rs", "rank": 62, "score": 26.316304342032467 }, { "content": "use std::marker::PhantomData;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]\n\npub struct EntityId {\n\n pub id: i64,\n\n}\n\n\n\nimpl EntityId {\n\n pub fn new(id: i64) -> EntityId {\n\n EntityId { id }\n\n }\n\n\n\n pub fn is_valid(self) -> bool {\n\n self.id > 0\n\n }\n\n}\n\n\n\nimpl Display for EntityId {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {\n\n write!(f, \"EntityId: {}\", self.id)\n", "file_path": "spatialos-sdk/src/worker/mod.rs", "rank": 63, "score": 25.515734267253016 }, { "content": "}\n\nimpl Default for Worker_FlagUpdateOp {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Data for a log message from the SDK.\"]\n\n#[doc = \" Note: Worker_LogMessageOp has been deprecated and will be removed in a future version of\"]\n\n#[doc = \" SpatialOS.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_LogMessageOp {\n\n #[doc = \" The severity of the log message; defined in the Worker_LogLevel enumeration.\"]\n\n pub level: u8,\n\n #[doc = \" The message.\"]\n\n pub message: *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_LogMessageOp {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 64, "score": 24.26522670149743 }, { "content": " }\n\n}\n\n#[doc = \" Tuning parameters for configuring protocol logging in the SDK. Used by\"]\n\n#[doc = \" Worker_ConnectionParameters.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_ProtocolLoggingParameters {\n\n #[doc = \" Log file names are prefixed with this prefix, are numbered, and have the extension .log.\"]\n\n pub log_prefix: *const ::std::os::raw::c_char,\n\n #[doc = \" Maximum number of log files to keep. Note that logs from any previous protocol logging\"]\n\n #[doc = \" sessions will be overwritten.\"]\n\n pub max_log_files: u32,\n\n #[doc = \" Once the size of a log file reaches this size, a new log file is created.\"]\n\n pub max_log_file_size_bytes: u32,\n\n}\n\nimpl Default for Worker_ProtocolLoggingParameters {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 65, "score": 23.43249053794343 }, { "content": " pub login_token: Worker_LoginTokenCredentials,\n\n #[doc = \" Parameters used if the WORKER_STEAM_CREDENTIALS flag is set.\"]\n\n pub steam: Worker_SteamCredentials,\n\n #[doc = \" The player identity token/login token pair used for authentication.\"]\n\n pub player_identity: Worker_PlayerIdentityCredentials,\n\n #[doc = \" Whether to use an insecure (non-TLS) connection for local development.\"]\n\n pub use_insecure_connection: u8,\n\n #[doc = \" Parameters for configuring logging.\"]\n\n pub logging: Worker_ProtocolLoggingParameters,\n\n #[doc = \" Whether to enable logging for the Locator flow.\"]\n\n pub enable_logging: u8,\n\n}\n\nimpl Default for Worker_LocatorParameters {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Details of a specific deployment obtained via Worker_Locator_GetDeploymentListAsync.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 66, "score": 22.839887281540463 }, { "content": " }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_OrConstraint {\n\n pub constraint_count: u32,\n\n pub constraints: *mut Worker_Constraint,\n\n}\n\nimpl Default for Worker_OrConstraint {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_NotConstraint {\n\n pub constraint: *mut Worker_Constraint,\n\n}\n\nimpl Default for Worker_NotConstraint {\n\n fn default() -> Self {\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 67, "score": 22.7033163020236 }, { "content": " #[doc = \" The severity of the log message; defined in the Worker_LogLevel enumeration.\"]\n\n pub level: u8,\n\n #[doc = \" The name of the logger.\"]\n\n pub logger_name: *const ::std::os::raw::c_char,\n\n #[doc = \" The full log message.\"]\n\n pub message: *const ::std::os::raw::c_char,\n\n #[doc = \" The ID of the entity this message relates to, or NULL for none.\"]\n\n pub entity_id: *const Worker_EntityId,\n\n}\n\nimpl Default for Worker_LogMessage {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Parameters for a gauge metric.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_GaugeMetric {\n\n pub key: *const ::std::os::raw::c_char,\n\n pub value: f64,\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 68, "score": 22.452280269024136 }, { "content": " }\n\n}\n\n\n\n#[derive(Derivative)]\n\n#[derivative(\n\n Debug(bound = \"\"),\n\n Copy(bound = \"\"),\n\n Clone(bound = \"\"),\n\n PartialEq(bound = \"\"),\n\n Eq(bound = \"\"),\n\n Hash(bound = \"\")\n\n)]\n\npub struct RequestId<T> {\n\n id: i64,\n\n _type: PhantomData<*const T>,\n\n}\n\n\n\nimpl<T> Ord for RequestId<T> {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n self.id.cmp(&other.id)\n", "file_path": "spatialos-sdk/src/worker/mod.rs", "rank": 69, "score": 22.403765686312468 }, { "content": "}\n\nimpl Default for Worker_SteamCredentials {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Parameters for authenticating using a Player Identity Token and Login Token.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_PlayerIdentityCredentials {\n\n #[doc = \" Authenticates a user to a single deployment. Obtained from a game authentication server\"]\n\n #[doc = \" using a PIT.\"]\n\n pub player_identity_token: *const ::std::os::raw::c_char,\n\n #[doc = \" Uniquely identifies a user across deployments, and is provided by a game authentication\"]\n\n #[doc = \" server.\"]\n\n pub login_token: *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_PlayerIdentityCredentials {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 70, "score": 22.244807722869826 }, { "content": "impl ConnectionParameters {\n\n pub fn new<T: AsRef<str>>(worker_type: T) -> Self {\n\n let mut params = ConnectionParameters::default();\n\n params.worker_type =\n\n CString::new(worker_type.as_ref()).expect(\"`worker_type` contains a null byte\");\n\n params\n\n }\n\n\n\n pub fn with_protocol_logging<T: AsRef<str>>(mut self, log_prefix: T) -> Self {\n\n self.enable_protocol_logging_at_startup = true;\n\n self.protocol_logging.log_prefix =\n\n CString::new(log_prefix.as_ref()).expect(\"`log_prefix` contained a null byte\");\n\n self\n\n }\n\n\n\n pub fn using_tcp(self) -> Self {\n\n self.using_tcp_with_params(TcpNetworkParameters::default())\n\n }\n\n\n\n pub fn using_tcp_with_params(mut self, params: TcpNetworkParameters) -> Self {\n", "file_path": "spatialos-sdk/src/worker/parameters.rs", "rank": 71, "score": 22.038505660940313 }, { "content": "#[derive(Debug, Copy, Clone)]\n\npub struct Worker_LoginTokenCredentials {\n\n #[doc = \" The token would typically be provided on the command-line by the SpatialOS launcher.\"]\n\n pub token: *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_LoginTokenCredentials {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Parameters for authenticating using Steam credentials.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_SteamCredentials {\n\n #[doc = \" Steam ticket for the steam app ID and publisher key corresponding to the project name specified\"]\n\n #[doc = \" in the Worker_LocatorParameters. Typically obtained from the steam APIs.\"]\n\n pub ticket: *const ::std::os::raw::c_char,\n\n #[doc = \" Deployment tag to request access for. If non-empty, must match the following regex:\"]\n\n #[doc = \" [A-Za-z0-9][A-Za-z0-9_]*\"]\n\n pub deployment_tag: *const ::std::os::raw::c_char,\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 72, "score": 22.014239572406595 }, { "content": "impl Default for Worker_CommandResponse {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" An object used to represent a component data snapshot by either raw schema data or some\"]\n\n#[doc = \" user-defined handle type.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_ComponentData {\n\n pub reserved: *mut ::std::os::raw::c_void,\n\n pub component_id: Worker_ComponentId,\n\n pub schema_type: *mut Schema_ComponentData,\n\n pub user_handle: *mut Worker_ComponentDataHandle,\n\n}\n\nimpl Default for Worker_ComponentData {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 73, "score": 21.842931662971278 }, { "content": " /// Resolves the unresolved path into an absolute path.\n\n fn resolve_path(&self, unresolved_path: &str) -> String {\n\n let path = Path::new(unresolved_path);\n\n\n\n if path.is_absolute() {\n\n return path.to_str().unwrap().to_owned();\n\n }\n\n\n\n let mut cwd = ::std::env::current_dir().unwrap();\n\n cwd.push(path);\n\n\n\n cwd.to_str().unwrap().to_owned()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum BuildProfile {\n\n Debug,\n\n Release,\n\n}\n", "file_path": "cargo-spatial/src/config.rs", "rank": 74, "score": 21.474657682157847 }, { "content": " pub error: *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_QueueStatus {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Component update parameters. Used to modify the behaviour of a component update request.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct Worker_UpdateParameters {\n\n #[doc = \" Controls how the update is sent back to the worker from which it was sent. Defined in the\"]\n\n #[doc = \" Worker_ComponentUpdateLoopback enumeration.\"]\n\n pub loopback: u8,\n\n}\n\n#[doc = \" Command parameters. Used to modify the behaviour of a command request.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct Worker_CommandParameters {\n\n #[doc = \" Allow command requests to bypass the bridge when this worker is authoritative over the target\"]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 75, "score": 21.409560666363674 }, { "content": " pub use_insecure_connection: u8,\n\n}\n\nimpl Default for Worker_Alpha_PlayerIdentityTokenRequest {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" The result of creating a player identity token.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_Alpha_PlayerIdentityTokenResponse {\n\n #[doc = \" The returned player identity token.\"]\n\n pub player_identity_token: *const ::std::os::raw::c_char,\n\n #[doc = \" The status code and a human readable description of the status of the request.\"]\n\n pub status: Worker_ConnectionStatus,\n\n}\n\nimpl Default for Worker_Alpha_PlayerIdentityTokenResponse {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 76, "score": 21.254693077511018 }, { "content": "use crate::worker::component::{self, Component, ComponentId, DATABASE};\n\nuse crate::worker::internal::schema::SchemaComponentData;\n\nuse spatialos_sdk_sys::worker::{Schema_DestroyComponentData, Worker_ComponentData, Worker_Entity};\n\nuse std::collections::HashMap;\n\nuse std::ptr;\n\nuse std::slice;\n\n\n\n#[derive(Debug)]\n\npub struct Entity {\n\n components: HashMap<ComponentId, Worker_ComponentData>,\n\n}\n\n\n\nimpl Entity {\n\n pub fn new() -> Self {\n\n Entity::default()\n\n }\n\n\n\n pub(crate) unsafe fn from_worker_sdk(raw_entity: &Worker_Entity) -> Result<Self, String> {\n\n let mut entity = Entity::new();\n\n\n", "file_path": "spatialos-sdk/src/worker/entity.rs", "rank": 77, "score": 21.221492182109113 }, { "content": "}\n\nimpl Default for Worker_GaugeMetric {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct Worker_HistogramMetricBucket {\n\n pub upper_bound: f64,\n\n pub samples: u32,\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_HistogramMetric {\n\n pub key: *const ::std::os::raw::c_char,\n\n pub sum: f64,\n\n pub bucket_count: u32,\n\n pub buckets: *const Worker_HistogramMetricBucket,\n\n}\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 78, "score": 20.72244843032482 }, { "content": " }\n\n\n\n pub fn with_logging(self) -> Self {\n\n self.with_logging_params(ProtocolLoggingParameters::default())\n\n }\n\n\n\n pub fn with_logging_params(mut self, params: ProtocolLoggingParameters) -> Self {\n\n self.logging = Some(params);\n\n self\n\n }\n\n}\n\n\n\npub struct PlayerIdentityCredentials {\n\n player_identity_token: CString,\n\n login_token: CString,\n\n}\n\n\n\nimpl PlayerIdentityCredentials {\n\n pub fn new<S: AsRef<str>, T: AsRef<str>>(pit: S, token: T) -> Self {\n\n PlayerIdentityCredentials {\n", "file_path": "spatialos-sdk/src/worker/locator.rs", "rank": 79, "score": 20.27803428133916 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct Worker_Constraint {\n\n #[doc = \" The type of constraint, defined using Worker_ConstraintType.\"]\n\n pub constraint_type: u8,\n\n #[doc = \" Union with fields corresponding to each constraint type.\"]\n\n pub constraint: Worker_Constraint_Union,\n\n}\n\nimpl Default for Worker_Constraint {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\npub const Worker_ResultType_WORKER_RESULT_TYPE_COUNT: Worker_ResultType = 1;\n\npub const Worker_ResultType_WORKER_RESULT_TYPE_SNAPSHOT: Worker_ResultType = 2;\n\npub type Worker_ResultType = i32;\n\n#[doc = \" An entity query.\"]\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n\npub struct Worker_EntityQuery {\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 80, "score": 20.209256251040372 }, { "content": "pub struct Worker_Deployment {\n\n #[doc = \" Name of the deployment.\"]\n\n pub deployment_name: *const ::std::os::raw::c_char,\n\n #[doc = \" The name of the assembly used by this deployment.\"]\n\n pub assembly_name: *const ::std::os::raw::c_char,\n\n #[doc = \" Description of the deployment.\"]\n\n pub description: *const ::std::os::raw::c_char,\n\n #[doc = \" Number of users currently connected to the deployment.\"]\n\n pub users_connected: u32,\n\n #[doc = \" Total user capacity of the deployment.\"]\n\n pub users_capacity: u32,\n\n}\n\nimpl Default for Worker_Deployment {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" A deployment list obtained via Worker_Locator_GetDeploymentListAsync.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 81, "score": 20.11543162654935 }, { "content": " use_insecure_connection: self.use_insecure_connection as u8,\n\n logging: match self.logging {\n\n Some(ref params) => params.to_worker_sdk(),\n\n None => ProtocolLoggingParameters::default().to_worker_sdk(),\n\n },\n\n enable_logging: self.logging.is_some() as u8,\n\n }\n\n }\n\n\n\n pub fn new(credentials: PlayerIdentityCredentials) -> Self {\n\n LocatorParameters {\n\n credentials,\n\n use_insecure_connection: false,\n\n logging: None,\n\n }\n\n }\n\n\n\n pub fn with_insecure_connection(mut self) -> Self {\n\n self.use_insecure_connection = true;\n\n self\n", "file_path": "spatialos-sdk/src/worker/locator.rs", "rank": 82, "score": 19.991306902441092 }, { "content": "pub struct Worker_ComponentConstraint {\n\n pub component_id: Worker_ComponentId,\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct Worker_SphereConstraint {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64,\n\n pub radius: f64,\n\n}\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_AndConstraint {\n\n pub constraint_count: u32,\n\n pub constraints: *mut Worker_Constraint,\n\n}\n\nimpl Default for Worker_AndConstraint {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 83, "score": 19.911759565500695 }, { "content": "/// # Examples\n\n///\n\n/// ```\n\n/// use spatialos_sdk::worker::commands::CommandParameters;\n\n/// use tap::*;\n\n///\n\n/// let params = CommandParameters::new()\n\n/// .tap(CommandParameters::allow_short_circuit);\n\n/// ```\n\n///\n\n/// [short-circuit]: https://docs.improbable.io/reference/14.1/shared/design/commands#component-commands\n\n/// [tap]: https://crates.io/crates/tap\n\n#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct CommandParameters {\n\n allow_short_circuit: bool,\n\n}\n\n\n\nimpl CommandParameters {\n\n /// Creates a new `CommandParameters` with default values for all parameters.\n\n pub fn new() -> CommandParameters {\n", "file_path": "spatialos-sdk/src/worker/commands.rs", "rank": 84, "score": 19.75909911382676 }, { "content": " pub op: Worker_Op_Union,\n\n}\n\nimpl Default for Worker_Op {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" An op list, usually returned by Worker_Connection_GetOpList.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_OpList {\n\n pub ops: *mut Worker_Op,\n\n pub op_count: u32,\n\n}\n\nimpl Default for Worker_OpList {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\npub const Worker_NetworkSecurityType_WORKER_NETWORK_SECURITY_TYPE_INSECURE:\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 85, "score": 19.72826548034094 }, { "content": " #[doc = \" Whether the worker is interested in this component.\"]\n\n pub is_interested: u8,\n\n}\n\n#[doc = \" Worker attributes that are part of a worker's runtime configuration.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_WorkerAttributes {\n\n #[doc = \" Number of worker attributes.\"]\n\n pub attribute_count: u32,\n\n #[doc = \" Will be NULL if there are no attributes associated with the worker.\"]\n\n pub attributes: *mut *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_WorkerAttributes {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Data for a disconnect message from the SDK.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 86, "score": 19.63989084964265 }, { "content": " _ => panic!(format!(\"Could not convert {} to enum TestEnum.\", value))\n\n }\n\n }\n\n}\n\n\n\nimpl TestEnum {\n\n pub(crate) fn as_u32(self) -> u32 {\n\n match self {\n\n \n\n TestEnum::FIRST => 0, \n\n TestEnum::SECOND => 1, \n\n }\n\n }\n\n}\n\n\n\n/* Types. */\n\n#[derive(Debug, Clone)]\n\npub struct CommandData {\n\n pub value: i32,\n\n}\n", "file_path": "project-example/src/generated.rs", "rank": 87, "score": 19.554706518898865 }, { "content": " pub entity_id: Worker_EntityId,\n\n #[doc = \" Status code of the response, using Worker_StatusCode.\"]\n\n pub status_code: u8,\n\n #[doc = \" The error message.\"]\n\n pub message: *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_DeleteEntityResponseOp {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" A response indicating the result of an entity query request.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_EntityQueryResponseOp {\n\n #[doc = \" The ID of the entity query request for which there was a response.\"]\n\n pub request_id: Worker_RequestId,\n\n #[doc = \" Status code of the response, using Worker_StatusCode.\"]\n\n pub status_code: u8,\n\n #[doc = \" The error message.\"]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 88, "score": 19.531988246798647 }, { "content": " pub component_data_copy: Worker_ComponentDataCopy,\n\n pub component_data_deserialize: Worker_ComponentDataDeserialize,\n\n pub component_data_serialize: Worker_ComponentDataSerialize,\n\n pub component_update_free: Worker_ComponentUpdateFree,\n\n pub component_update_copy: Worker_ComponentUpdateCopy,\n\n pub component_update_deserialize: Worker_ComponentUpdateDeserialize,\n\n pub component_update_serialize: Worker_ComponentUpdateSerialize,\n\n}\n\nimpl Default for Worker_ComponentVtable {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" An object used to represent a command request by either raw schema data or some user-defined\"]\n\n#[doc = \" handle type.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_CommandRequest {\n\n pub reserved: *mut ::std::os::raw::c_void,\n\n pub component_id: Worker_ComponentId,\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 89, "score": 19.504830318868482 }, { "content": " #[doc = \" entity-component.\"]\n\n pub allow_short_circuit: u8,\n\n}\n\n#[doc = \" Information about status of a network request.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_ConnectionStatus {\n\n #[doc = \" The status of the request. This value is a member of the enum Worker_ConnectionStatusCode.\"]\n\n pub code: u8,\n\n #[doc = \" Detailed, human readable description of the connection status.\"]\n\n #[doc = \" Will be \\\"OK\\\" if no error occurred.\"]\n\n pub detail: *const ::std::os::raw::c_char,\n\n}\n\nimpl Default for Worker_ConnectionStatus {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" The parameters used when creating a player identity token.\"]\n\n#[repr(C)]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 90, "score": 19.480543371539834 }, { "content": "}\n\n#[doc = \" The parameters used when creating a login token.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_Alpha_LoginTokensRequest {\n\n #[doc = \" The player identity token of the player\"]\n\n pub player_identity_token: *const ::std::os::raw::c_char,\n\n #[doc = \" The worker type for which the requested LTs are scoped for.\"]\n\n pub worker_type: *const ::std::os::raw::c_char,\n\n #[doc = \" The lifetime duration of the requested LTs. This is an optional field.\"]\n\n #[doc = \" If the pointer is null, a default value of 15 minutes will be used.\"]\n\n pub duration_seconds: *const u32,\n\n #[doc = \" Whether to use an insecure (non-TLS) connection for local development.\"]\n\n #[doc = \" An insecure connection must be used when connecting to a local development login service.\"]\n\n #[doc = \" A secure connection must be used when connecting to a cloud development login service.\"]\n\n pub use_insecure_connection: u8,\n\n}\n\nimpl Default for Worker_Alpha_LoginTokensRequest {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 91, "score": 19.478443561177492 }, { "content": "use crate::config::Config;\n\nuse crate::format_arg;\n\nuse log::*;\n\nuse spatialos_sdk_code_generator::{generator, schema_bundle};\n\nuse std::fs::{self, File};\n\nuse std::io::prelude::*;\n\nuse std::path::*;\n\nuse std::process::Command;\n\n\n\n/// Performs code generation for the project described by `config`.\n\n///\n\n/// Assumes that the current working directory is the root directory of the project,\n\n/// i.e. the directory that has the `Spatial.toml` file.\n", "file_path": "cargo-spatial/src/codegen.rs", "rank": 93, "score": 18.709435389170586 }, { "content": " }\n\n}\n\n#[doc = \" Data for a set of built-in metrics reported by the SDK.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_MetricsOp {\n\n pub metrics: Worker_Metrics,\n\n}\n\nimpl Default for Worker_MetricsOp {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Data for a critical section boundary (enter or leave) operation.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Default, Copy, Clone)]\n\npub struct Worker_CriticalSectionOp {\n\n #[doc = \" Whether the protocol is entering a critical section (true) or leaving it (false).\"]\n\n pub in_critical_section: u8,\n\n}\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 94, "score": 18.681051495560816 }, { "content": " pub request_id: Worker_RequestId,\n\n #[doc = \" Status code of the response, using Worker_StatusCode.\"]\n\n pub status_code: u8,\n\n #[doc = \" The error message.\"]\n\n pub message: *const ::std::os::raw::c_char,\n\n #[doc = \" If successful, the entity ID of the newly created entity.\"]\n\n pub entity_id: Worker_EntityId,\n\n}\n\nimpl Default for Worker_CreateEntityResponseOp {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Data for a DeleteEntity operation.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_DeleteEntityResponseOp {\n\n #[doc = \" The ID of the delete entity request for which there was a command response.\"]\n\n pub request_id: Worker_RequestId,\n\n #[doc = \" The ID of the target entity of this request.\"]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 95, "score": 18.276806845531304 }, { "content": " pub request: Worker_CommandRequest,\n\n}\n\nimpl Default for Worker_CommandRequestOp {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" Data for a CommandResponse operation.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_CommandResponseOp {\n\n #[doc = \" The ID of the command request for which there was a command response.\"]\n\n pub request_id: Worker_RequestId,\n\n #[doc = \" The ID of the entity originally targeted by the command request.\"]\n\n pub entity_id: Worker_EntityId,\n\n #[doc = \" Status code of the response, using Worker_StatusCode.\"]\n\n pub status_code: u8,\n\n #[doc = \" The error message.\"]\n\n pub message: *const ::std::os::raw::c_char,\n\n #[doc = \" The command response data. Deserialized with the corresponding vtable deserialize function and\"]\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 96, "score": 18.27680684553131 }, { "content": " log_prefix: CString,\n\n max_log_files: u32,\n\n max_log_file_size_bytes: u32,\n\n}\n\n\n\nimpl ProtocolLoggingParameters {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n /// Sets the prefix string to be used for log file names.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This will panic if `prefix` contains a 0 byte. This is a requirement imposed\n\n /// by the underlying SpatialOS API.\n\n pub fn set_prefix<T: AsRef<str>>(&mut self, prefix: T) {\n\n self.log_prefix = CString::new(prefix.as_ref()).expect(\"`prefix` contained a null byte\");\n\n }\n\n\n", "file_path": "spatialos-sdk/src/worker/parameters.rs", "rank": 97, "score": 18.255431016580182 }, { "content": " pub command_index: Worker_CommandIndex,\n\n pub schema_type: *mut Schema_CommandRequest,\n\n pub user_handle: *mut Worker_CommandRequestHandle,\n\n}\n\nimpl Default for Worker_CommandRequest {\n\n fn default() -> Self {\n\n unsafe { ::std::mem::zeroed() }\n\n }\n\n}\n\n#[doc = \" An object used to represent a command response by either raw schema data or some user-defined\"]\n\n#[doc = \" handle type.\"]\n\n#[repr(C)]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Worker_CommandResponse {\n\n pub reserved: *mut ::std::os::raw::c_void,\n\n pub component_id: Worker_ComponentId,\n\n pub command_index: Worker_CommandIndex,\n\n pub schema_type: *mut Schema_CommandResponse,\n\n pub user_handle: *mut Worker_CommandResponseHandle,\n\n}\n", "file_path": "spatialos-sdk-sys/src/worker.rs", "rank": 98, "score": 18.17902694569161 }, { "content": "use crate::worker::{component::DATABASE, vtable};\n\nuse spatialos_sdk_sys::worker::*;\n\nuse std::{\n\n ffi::{CStr, CString},\n\n ptr,\n\n};\n\n\n\npub struct ConnectionParameters {\n\n pub worker_type: CString,\n\n pub network: NetworkParameters,\n\n pub send_queue_capacity: u32,\n\n pub receive_queue_capacity: u32,\n\n pub log_message_queue_capacity: u32,\n\n pub built_in_metrics_report_period_millis: u32,\n\n pub protocol_logging: ProtocolLoggingParameters,\n\n pub enable_protocol_logging_at_startup: bool,\n\n pub thread_affinity: ThreadAffinityParameters,\n\n use_internal_serialization: bool,\n\n}\n\n\n", "file_path": "spatialos-sdk/src/worker/parameters.rs", "rank": 99, "score": 18.155361911308024 } ]
Rust
rg3d-ui/src/check_box.rs
vigdail/rg3d
b65bfdab350f8c1d48bcc288a8449cc74653ef51
use crate::grid::{Column, GridBuilder, Row}; use crate::message::MessageDirection; use crate::vector_image::{Primitive, VectorImageBuilder}; use crate::{ border::BorderBuilder, brush::Brush, core::{color::Color, pool::Handle}, message::{CheckBoxMessage, UiMessage, UiMessageData, WidgetMessage}, widget::{Widget, WidgetBuilder}, BuildContext, Control, HorizontalAlignment, NodeHandleMapping, Thickness, UiNode, UserInterface, VerticalAlignment, BRUSH_BRIGHT, BRUSH_DARK, BRUSH_LIGHT, BRUSH_TEXT, }; use rg3d_core::algebra::Vector2; use std::any::Any; use std::ops::{Deref, DerefMut}; #[derive(Clone)] pub struct CheckBox { pub widget: Widget, pub checked: Option<bool>, pub check_mark: Handle<UiNode>, pub uncheck_mark: Handle<UiNode>, pub undefined_mark: Handle<UiNode>, } crate::define_widget_deref!(CheckBox); impl Control for CheckBox { fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn clone_boxed(&self) -> Box<dyn Control> { Box::new(self.clone()) } fn resolve(&mut self, node_map: &NodeHandleMapping) { node_map.resolve(&mut self.check_mark); node_map.resolve(&mut self.uncheck_mark); node_map.resolve(&mut self.undefined_mark); } fn handle_routed_message(&mut self, ui: &mut UserInterface, message: &mut UiMessage) { self.widget.handle_routed_message(ui, message); match message.data() { UiMessageData::Widget(ref msg) => { match msg { WidgetMessage::MouseDown { .. } => { if message.destination() == self.handle() || self.widget.has_descendant(message.destination(), ui) { ui.capture_mouse(self.handle()); } } WidgetMessage::MouseUp { .. } => { if message.destination() == self.handle() || self.widget.has_descendant(message.destination(), ui) { ui.release_mouse_capture(); if let Some(value) = self.checked { ui.send_message(CheckBoxMessage::checked( self.handle(), MessageDirection::ToWidget, Some(!value), )); } else { ui.send_message(CheckBoxMessage::checked( self.handle(), MessageDirection::ToWidget, Some(true), )); } } } _ => (), } } &UiMessageData::CheckBox(CheckBoxMessage::Check(value)) if message.direction() == MessageDirection::ToWidget && message.destination() == self.handle() => { if self.checked != value { self.checked = value; ui.send_message(message.reverse()); if self.check_mark.is_some() { match value { None => { ui.send_message(WidgetMessage::visibility( self.check_mark, MessageDirection::ToWidget, false, )); ui.send_message(WidgetMessage::visibility( self.uncheck_mark, MessageDirection::ToWidget, false, )); ui.send_message(WidgetMessage::visibility( self.undefined_mark, MessageDirection::ToWidget, true, )); } Some(value) => { ui.send_message(WidgetMessage::visibility( self.check_mark, MessageDirection::ToWidget, value, )); ui.send_message(WidgetMessage::visibility( self.uncheck_mark, MessageDirection::ToWidget, !value, )); ui.send_message(WidgetMessage::visibility( self.undefined_mark, MessageDirection::ToWidget, false, )); } } } } } _ => {} } } fn remove_ref(&mut self, handle: Handle<UiNode>) { if self.check_mark == handle { self.check_mark = Handle::NONE; } if self.uncheck_mark == handle { self.uncheck_mark = Handle::NONE; } if self.undefined_mark == handle { self.undefined_mark = Handle::NONE; } } } pub struct CheckBoxBuilder { widget_builder: WidgetBuilder, checked: Option<bool>, check_mark: Option<Handle<UiNode>>, uncheck_mark: Option<Handle<UiNode>>, undefined_mark: Option<Handle<UiNode>>, content: Handle<UiNode>, } impl CheckBoxBuilder { pub fn new(widget_builder: WidgetBuilder) -> Self { Self { widget_builder, checked: Some(false), check_mark: None, uncheck_mark: None, undefined_mark: None, content: Handle::NONE, } } pub fn checked(mut self, value: Option<bool>) -> Self { self.checked = value; self } pub fn with_check_mark(mut self, check_mark: Handle<UiNode>) -> Self { self.check_mark = Some(check_mark); self } pub fn with_uncheck_mark(mut self, uncheck_mark: Handle<UiNode>) -> Self { self.uncheck_mark = Some(uncheck_mark); self } pub fn with_undefined_mark(mut self, undefined_mark: Handle<UiNode>) -> Self { self.undefined_mark = Some(undefined_mark); self } pub fn with_content(mut self, content: Handle<UiNode>) -> Self { self.content = content; self } pub fn build(self, ctx: &mut BuildContext) -> Handle<UiNode> { let check_mark = self.check_mark.unwrap_or_else(|| { VectorImageBuilder::new( WidgetBuilder::new() .with_vertical_alignment(VerticalAlignment::Center) .with_horizontal_alignment(HorizontalAlignment::Center) .with_foreground(BRUSH_TEXT), ) .with_primitives(vec![ Primitive::Line { begin: Vector2::new(0.0, 6.0), end: Vector2::new(6.0, 12.0), thickness: 2.0, }, Primitive::Line { begin: Vector2::new(6.0, 12.0), end: Vector2::new(12.0, 0.0), thickness: 2.0, }, ]) .build(ctx) }); ctx[check_mark].set_visibility(self.checked.unwrap_or(false)); let uncheck_mark = self.uncheck_mark.unwrap_or_else(|| { BorderBuilder::new( WidgetBuilder::new() .with_background(Brush::Solid(Color::TRANSPARENT)) .with_foreground(Brush::Solid(Color::TRANSPARENT)), ) .build(ctx) }); ctx[uncheck_mark].set_visibility(!self.checked.unwrap_or(true)); let undefined_mark = self.undefined_mark.unwrap_or_else(|| { BorderBuilder::new( WidgetBuilder::new() .with_margin(Thickness::uniform(1.0)) .with_background(BRUSH_BRIGHT) .with_foreground(Brush::Solid(Color::TRANSPARENT)), ) .build(ctx) }); ctx[undefined_mark].set_visibility(self.checked.is_none()); if self.content.is_some() { ctx[self.content].set_row(0).set_column(1); } let grid = GridBuilder::new( WidgetBuilder::new() .with_child( BorderBuilder::new( WidgetBuilder::new() .with_child(check_mark) .with_child(uncheck_mark) .with_child(undefined_mark) .with_background(BRUSH_DARK) .with_foreground(BRUSH_LIGHT), ) .with_stroke_thickness(Thickness::uniform(1.0)) .build(ctx), ) .with_child(self.content), ) .add_row(Row::stretch()) .add_column(Column::strict(20.0)) .add_column(Column::stretch()) .build(ctx); let cb = CheckBox { widget: self.widget_builder.with_child(grid).build(), checked: self.checked, check_mark, uncheck_mark, undefined_mark, }; ctx.add_node(UiNode::new(cb)) } } #[cfg(test)] mod test { use crate::{ check_box::CheckBoxBuilder, core::algebra::Vector2, message::{CheckBoxMessage, MessageDirection}, widget::WidgetBuilder, UserInterface, }; #[test] fn check_box() { let mut ui = UserInterface::new(Vector2::new(1000.0, 1000.0)); assert_eq!(ui.poll_message(), None); let check_box = CheckBoxBuilder::new(WidgetBuilder::new()).build(&mut ui.build_ctx()); assert_eq!(ui.poll_message(), None); let input_message = CheckBoxMessage::checked(check_box, MessageDirection::ToWidget, Some(true)); ui.send_message(input_message.clone()); assert_eq!(ui.poll_message(), Some(input_message.clone())); assert_eq!(ui.poll_message(), Some(input_message.reverse())); } }
use crate::grid::{Column, GridBuilder, Row}; use crate::message::MessageDirection; use crate::vector_image::{Primitive, VectorImageBuilder}; use crate::{ border::BorderBuilder, brush::Brush, core::{color::Color, pool::Handle}, message::{CheckBoxMessage, UiMessage, UiMessageData, WidgetMessage}, widget::{Widget, WidgetBuilder}, BuildContext, Control, HorizontalAlignment, NodeHandleMapping, Thickness, UiNode, UserInterface, VerticalAlignment, BRUSH_BRIGHT, BRUSH_DARK, BRUSH_LIGHT, BRUSH_TEXT, }; use rg3d_core::algebra::Vector2; use std::any::Any; use std::ops::{Deref, DerefMut}; #[derive(Clone)] pub struct CheckBox { pub widget: Widget, pub checked: Option<bool>, pub check_mark: Handle<UiNode>, pub uncheck_mark: Handle<UiNode>, pub undefined_mark: Handle<UiNode>, } crate::define_widget_deref!(CheckBox); impl Control for CheckBox { fn as_any(&self) -> &dyn Any { self } fn as_any_mut(&mut self) -> &mut dyn Any { self } fn clone_boxed(&self) -> Box<dyn Control> { Box::new(self.clone()) } fn resolve(&mut self, node_map: &NodeHandleMapping) { node_map.resolve(&mut self.check_mark); node_map.resolve(&mut self.uncheck_mark); node_map.resolve(&mut self.undefined_mark); } fn handle_routed_message(&mut self, ui: &mut UserInterface, message: &mut UiMessage) { self.widget.handle_routed_message(ui, message); match message.data() { UiMessageData::Widget(ref msg) => { match msg { WidgetMessage::MouseDown { .. } => { if message.destination() == self.handle() || self.widget.has_descendant(message.destination(), ui) { ui.capture_mouse(self.handle()); } } WidgetMessage::MouseUp { .. } => { if message.destination() == self.handle() || self.widget.has_descendant(message.destination(), ui) { ui.release_mouse_capture(); if let Some(value) = self.checked { ui.send_message(CheckBoxMessage::checked( self.handle(), MessageDirection::ToWidget, Some(!value), )); } else { ui.send_message(CheckBoxMessage::checked( self.handle(), MessageDirection::ToWidget, Some(true), )); } } } _ => (), } } &UiMessageData::CheckBox(CheckBoxMessage::Check(value)) if message.direction() == MessageDirection::ToWidget && message.destination() == self.handle() => { if self.checked != value { self.checked = value; ui.send_message(message.reverse()); if self.check_mark.is_some() { match value { None => { ui.send_message(WidgetMessage::visibility( self.check_mark, MessageDirection::ToWidget, false, )); ui.send_message(WidgetMessage::visibility( self.uncheck_mark, MessageDirection::ToWidget, false, )); ui.send_message(WidgetMessage::visibility( self.undefined_mark, MessageDirection::ToWidget, true, )); } Some(value) => { ui.send_message(WidgetMessage::visibility( self.check_mark, MessageDirection::ToWidget, value, )); ui.send_message(WidgetMessage::visibility( self.uncheck_mark, MessageDirection::ToWidget, !value, )); ui.send_message(WidgetMessage::visibility( self.undefined_mark, MessageDirection::ToWidget, false, )); } } } } } _ => {} } } fn remove_ref(&mut self, handle: Handle<UiNode>) { if self.check_mark == handle { self.check_mark = Handle::NONE; } if self.uncheck_mark == handle { self.uncheck_mark = Handle::NONE; } if self.undefined_mark == handle { self.undefined_mark = Handle::NONE; } } } pub struct CheckBoxBuilder { widget_builder: WidgetBuilder, checked: Option<bool>, check_mark: Option<Handle<UiNode>>, uncheck_mark: Option<Handle<UiNode>>, undefined_mark: Option<Handle<UiNode>>, content: Handle<UiNode>, } impl CheckBoxBuilder { pub fn new(widget_builder: WidgetBuilder) -> Self { Self { widget_builder, checked: Some(false), check_mark: None, uncheck_mark: None, undefined_mark: None, content: Handle::NONE, } } pub fn checked(mut self, value: Option<bool>) -> Self { self.checked = value; self } pub fn with_check_mark(mut self, check_mark: Handle<UiNode>) -> Self { self.check_mark = Some(check_mark); self } pub fn with_uncheck_mark(mut self, uncheck_mark: Handle<UiNode>) -> Self { self.uncheck_mark = Some(uncheck_mark); self } pub fn with_undefined_mark(mut self, undefined_mark: Handle<UiNode>) -> Self { self.undefined_mark = Some(undefined_mark); self } pub fn with_content(mut self, content: Handle<UiNode>) -> Self { self.content = content; self }
} #[cfg(test)] mod test { use crate::{ check_box::CheckBoxBuilder, core::algebra::Vector2, message::{CheckBoxMessage, MessageDirection}, widget::WidgetBuilder, UserInterface, }; #[test] fn check_box() { let mut ui = UserInterface::new(Vector2::new(1000.0, 1000.0)); assert_eq!(ui.poll_message(), None); let check_box = CheckBoxBuilder::new(WidgetBuilder::new()).build(&mut ui.build_ctx()); assert_eq!(ui.poll_message(), None); let input_message = CheckBoxMessage::checked(check_box, MessageDirection::ToWidget, Some(true)); ui.send_message(input_message.clone()); assert_eq!(ui.poll_message(), Some(input_message.clone())); assert_eq!(ui.poll_message(), Some(input_message.reverse())); } }
pub fn build(self, ctx: &mut BuildContext) -> Handle<UiNode> { let check_mark = self.check_mark.unwrap_or_else(|| { VectorImageBuilder::new( WidgetBuilder::new() .with_vertical_alignment(VerticalAlignment::Center) .with_horizontal_alignment(HorizontalAlignment::Center) .with_foreground(BRUSH_TEXT), ) .with_primitives(vec![ Primitive::Line { begin: Vector2::new(0.0, 6.0), end: Vector2::new(6.0, 12.0), thickness: 2.0, }, Primitive::Line { begin: Vector2::new(6.0, 12.0), end: Vector2::new(12.0, 0.0), thickness: 2.0, }, ]) .build(ctx) }); ctx[check_mark].set_visibility(self.checked.unwrap_or(false)); let uncheck_mark = self.uncheck_mark.unwrap_or_else(|| { BorderBuilder::new( WidgetBuilder::new() .with_background(Brush::Solid(Color::TRANSPARENT)) .with_foreground(Brush::Solid(Color::TRANSPARENT)), ) .build(ctx) }); ctx[uncheck_mark].set_visibility(!self.checked.unwrap_or(true)); let undefined_mark = self.undefined_mark.unwrap_or_else(|| { BorderBuilder::new( WidgetBuilder::new() .with_margin(Thickness::uniform(1.0)) .with_background(BRUSH_BRIGHT) .with_foreground(Brush::Solid(Color::TRANSPARENT)), ) .build(ctx) }); ctx[undefined_mark].set_visibility(self.checked.is_none()); if self.content.is_some() { ctx[self.content].set_row(0).set_column(1); } let grid = GridBuilder::new( WidgetBuilder::new() .with_child( BorderBuilder::new( WidgetBuilder::new() .with_child(check_mark) .with_child(uncheck_mark) .with_child(undefined_mark) .with_background(BRUSH_DARK) .with_foreground(BRUSH_LIGHT), ) .with_stroke_thickness(Thickness::uniform(1.0)) .build(ctx), ) .with_child(self.content), ) .add_row(Row::stretch()) .add_column(Column::strict(20.0)) .add_column(Column::stretch()) .build(ctx); let cb = CheckBox { widget: self.widget_builder.with_child(grid).build(), checked: self.checked, check_mark, uncheck_mark, undefined_mark, }; ctx.add_node(UiNode::new(cb)) }
function_block-full_function
[ { "content": "pub fn make_default_anchor(ctx: &mut BuildContext, row: usize, column: usize) -> Handle<UiNode> {\n\n let default_anchor_size = 30.0;\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(default_anchor_size)\n\n .with_height(default_anchor_size)\n\n .with_visibility(false)\n\n .on_row(row)\n\n .on_column(column)\n\n .with_draw_on_top(true)\n\n .with_background(Brush::Solid(DEFAULT_ANCHOR_COLOR)),\n\n )\n\n .build(ctx)\n\n}\n\n\n\nimpl TileBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n\n content: TileContent::Empty,\n", "file_path": "rg3d-ui/src/dock.rs", "rank": 0, "score": 376216.7833941174 }, { "content": "pub fn make_button(ctx: &mut BuildContext, arrow: ArrowDirection, row: usize) -> Handle<UiNode> {\n\n ButtonBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::right(1.0))\n\n .on_row(row),\n\n )\n\n .with_back(\n\n DecoratorBuilder::new(BorderBuilder::new(\n\n WidgetBuilder::new().with_foreground(Brush::Solid(Color::opaque(90, 90, 90))),\n\n ))\n\n .with_normal_brush(Brush::Solid(Color::opaque(60, 60, 60)))\n\n .with_hover_brush(Brush::Solid(Color::opaque(80, 80, 80)))\n\n .with_pressed_brush(Brush::Solid(Color::opaque(80, 118, 178)))\n\n .build(ctx),\n\n )\n\n .with_content(make_arrow(ctx, arrow, 6.0))\n\n .build(ctx)\n\n}\n\n\n\nimpl NumericUpDownBuilder {\n", "file_path": "rg3d-ui/src/numeric.rs", "rank": 1, "score": 376216.7833941174 }, { "content": "/// Trait for all UI controls in library.\n\npub trait Control: 'static + Deref<Target = Widget> + DerefMut {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn Any;\n\n\n\n fn clone_boxed(&self) -> Box<dyn Control>;\n\n\n\n fn resolve(&mut self, _node_map: &NodeHandleMapping) {}\n\n\n\n fn measure_override(&self, ui: &UserInterface, available_size: Vector2<f32>) -> Vector2<f32> {\n\n scope_profile!();\n\n\n\n self.deref().measure_override(ui, available_size)\n\n }\n\n\n\n fn arrange_override(&self, ui: &UserInterface, final_size: Vector2<f32>) -> Vector2<f32> {\n\n scope_profile!();\n\n\n\n self.deref().arrange_override(ui, final_size)\n\n }\n", "file_path": "rg3d-ui/src/lib.rs", "rank": 2, "score": 347255.366194612 }, { "content": "pub fn make_numeric_input(ctx: &mut BuildContext, column: usize, value: f32) -> Handle<UiNode> {\n\n NumericUpDownBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(0)\n\n .on_column(column)\n\n .with_margin(Thickness {\n\n left: 1.0,\n\n top: 0.0,\n\n right: 1.0,\n\n bottom: 0.0,\n\n }),\n\n )\n\n .with_value(value)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "rg3d-ui/src/vec/mod.rs", "rank": 3, "score": 347019.0735168791 }, { "content": "pub fn create_ui(ui: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text = TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0))\n\n .with_wrap(WrapMode::Word)\n\n .build(ui);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ui);\n\n progress_bar\n\n })\n\n .with_child({\n", "file_path": "examples/shared/mod.rs", "rank": 4, "score": 316273.96678804676 }, { "content": "fn make_text_mark(ctx: &mut BuildContext, text: &str, row: usize, column: usize) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_vertical_alignment(VerticalAlignment::Center)\n\n .on_row(row)\n\n .on_column(column),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "rg3d-ui/src/color.rs", "rank": 5, "score": 304733.4592713668 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/lod.rs", "rank": 6, "score": 287615.84311128437 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/navmesh.rs", "rank": 7, "score": 287615.84311128437 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 8, "score": 287615.84311128437 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n\n#[wasm_bindgen]\n\nextern \"C\" {\n\n #[wasm_bindgen(js_namespace = console)]\n\n fn error(msg: String);\n\n\n\n type Error;\n\n\n\n #[wasm_bindgen(constructor)]\n\n fn new() -> Error;\n\n\n\n #[wasm_bindgen(structural, method, getter)]\n\n fn stack(error: &Error) -> String;\n\n}\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 9, "score": 280646.12463261327 }, { "content": "fn generate_item_container(ctx: &mut BuildContext, item: Handle<UiNode>) -> Handle<UiNode> {\n\n let item = ListViewItem {\n\n widget: WidgetBuilder::new().with_child(item).build(),\n\n };\n\n\n\n ctx.add_node(UiNode::new(item))\n\n}\n\n\n", "file_path": "rg3d-ui/src/list_view.rs", "rank": 10, "score": 272466.7799684746 }, { "content": "/// Translates window mouse button into rg3d-ui mouse button.\n\npub fn translate_button(button: crate::event::MouseButton) -> crate::gui::message::MouseButton {\n\n match button {\n\n crate::event::MouseButton::Left => crate::gui::message::MouseButton::Left,\n\n crate::event::MouseButton::Right => crate::gui::message::MouseButton::Right,\n\n crate::event::MouseButton::Middle => crate::gui::message::MouseButton::Middle,\n\n crate::event::MouseButton::Other(i) => crate::gui::message::MouseButton::Other(i),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 11, "score": 268776.844024753 }, { "content": "/// Translates cursor icon from rg3d-ui library to glutin format.\n\npub fn translate_cursor_icon(icon: crate::gui::message::CursorIcon) -> crate::window::CursorIcon {\n\n match icon {\n\n crate::gui::message::CursorIcon::Default => crate::window::CursorIcon::Default,\n\n crate::gui::message::CursorIcon::Crosshair => crate::window::CursorIcon::Crosshair,\n\n crate::gui::message::CursorIcon::Hand => crate::window::CursorIcon::Hand,\n\n crate::gui::message::CursorIcon::Arrow => crate::window::CursorIcon::Arrow,\n\n crate::gui::message::CursorIcon::Move => crate::window::CursorIcon::Move,\n\n crate::gui::message::CursorIcon::Text => crate::window::CursorIcon::Text,\n\n crate::gui::message::CursorIcon::Wait => crate::window::CursorIcon::Wait,\n\n crate::gui::message::CursorIcon::Help => crate::window::CursorIcon::Help,\n\n crate::gui::message::CursorIcon::Progress => crate::window::CursorIcon::Progress,\n\n crate::gui::message::CursorIcon::NotAllowed => crate::window::CursorIcon::NotAllowed,\n\n crate::gui::message::CursorIcon::ContextMenu => crate::window::CursorIcon::ContextMenu,\n\n crate::gui::message::CursorIcon::Cell => crate::window::CursorIcon::Cell,\n\n crate::gui::message::CursorIcon::VerticalText => crate::window::CursorIcon::VerticalText,\n\n crate::gui::message::CursorIcon::Alias => crate::window::CursorIcon::Alias,\n\n crate::gui::message::CursorIcon::Copy => crate::window::CursorIcon::Copy,\n\n crate::gui::message::CursorIcon::NoDrop => crate::window::CursorIcon::NoDrop,\n\n crate::gui::message::CursorIcon::Grab => crate::window::CursorIcon::Grab,\n\n crate::gui::message::CursorIcon::Grabbing => crate::window::CursorIcon::Grabbing,\n", "file_path": "src/utils/mod.rs", "rank": 12, "score": 265779.7514632906 }, { "content": "fn make_text_title(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n TextBuilder::new(\n\n WidgetBuilder::new()\n\n .with_margin(Thickness::uniform(5.0))\n\n .on_row(0)\n\n .on_column(0),\n\n )\n\n .with_text(text)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "rg3d-ui/src/window.rs", "rank": 13, "score": 263554.01949377905 }, { "content": "fn make_mark(ctx: &mut BuildContext, button: HeaderButton) -> Handle<UiNode> {\n\n VectorImageBuilder::new(\n\n WidgetBuilder::new()\n\n .with_horizontal_alignment(HorizontalAlignment::Center)\n\n .with_vertical_alignment(match button {\n\n HeaderButton::Close => VerticalAlignment::Center,\n\n HeaderButton::Minimize => VerticalAlignment::Bottom,\n\n })\n\n .with_margin(match button {\n\n HeaderButton::Close => Thickness::uniform(0.0),\n\n HeaderButton::Minimize => Thickness::bottom(3.0),\n\n })\n\n .with_foreground(BRUSH_BRIGHT),\n\n )\n\n .with_primitives(match button {\n\n HeaderButton::Close => {\n\n vec![\n\n Primitive::Line {\n\n begin: Vector2::new(0.0, 0.0),\n\n end: Vector2::new(12.0, 12.0),\n", "file_path": "rg3d-ui/src/window.rs", "rank": 14, "score": 263554.01949377905 }, { "content": "fn make_header_button(ctx: &mut BuildContext, button: HeaderButton) -> Handle<UiNode> {\n\n ButtonBuilder::new(WidgetBuilder::new().with_margin(Thickness::uniform(2.0)))\n\n .with_back(\n\n DecoratorBuilder::new(\n\n BorderBuilder::new(WidgetBuilder::new())\n\n .with_stroke_thickness(Thickness::uniform(0.0)),\n\n )\n\n .with_normal_brush(Brush::Solid(Color::TRANSPARENT))\n\n .with_hover_brush(BRUSH_LIGHT)\n\n .with_pressed_brush(BRUSH_LIGHTEST)\n\n .build(ctx),\n\n )\n\n .with_content(make_mark(ctx, button))\n\n .build(ctx)\n\n}\n\n\n\nimpl<'a> WindowBuilder {\n\n pub fn new(widget_builder: WidgetBuilder) -> Self {\n\n Self {\n\n widget_builder,\n", "file_path": "rg3d-ui/src/window.rs", "rank": 15, "score": 260579.46328660686 }, { "content": "fn create_section_header(ctx: &mut BuildContext, text: &str) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new().with_margin(Thickness::uniform(1.0)))\n\n .with_text(text)\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .build(ctx)\n\n}\n\n\n", "file_path": "rg3d-ui/src/inspector/mod.rs", "rank": 16, "score": 260579.46328660686 }, { "content": "fn mark_handled(message: UiMessage) -> UiMessage {\n\n message.set_handled(true);\n\n message\n\n}\n\n\n\nimpl ColorPicker {\n\n fn sync_fields(&self, ui: &mut UserInterface, color: Color, hsv: Hsv) {\n\n ui.send_message(mark_handled(NumericUpDownMessage::value(\n\n self.hue,\n\n MessageDirection::ToWidget,\n\n hsv.hue(),\n\n )));\n\n\n\n ui.send_message(mark_handled(NumericUpDownMessage::value(\n\n self.saturation,\n\n MessageDirection::ToWidget,\n\n hsv.saturation(),\n\n )));\n\n\n\n ui.send_message(mark_handled(NumericUpDownMessage::value(\n", "file_path": "rg3d-ui/src/color.rs", "rank": 17, "score": 254005.96975483073 }, { "content": "fn create_item_views(items: &[Item], ctx: &mut BuildContext) -> Vec<Handle<UiNode>> {\n\n items\n\n .iter()\n\n .enumerate()\n\n .map(|(n, item)| {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .with_child(\n\n ExpanderBuilder::new(WidgetBuilder::new())\n\n .with_header(\n\n GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(\n\n VerticalAlignment::Center,\n\n )\n\n .with_text(format!(\"Item {}\", n))\n\n .build(ctx),\n\n )\n", "file_path": "rg3d-ui/src/inspector/editors/collection.rs", "rank": 18, "score": 251618.76260662643 }, { "content": "/// Performs hashing of a sized value by interpreting it as raw memory.\n\npub fn hash_as_bytes<T: Sized, H: Hasher>(value: &T, hasher: &mut H) {\n\n hasher.write(value_as_u8_slice(value))\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 19, "score": 244819.44435717873 }, { "content": "pub fn wrapf(mut n: f32, mut min_limit: f32, mut max_limit: f32) -> f32 {\n\n if n >= min_limit && n <= max_limit {\n\n return n;\n\n }\n\n\n\n if max_limit == 0.0 && min_limit == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n max_limit -= min_limit;\n\n\n\n let offset = min_limit;\n\n min_limit = 0.0;\n\n n -= offset;\n\n\n\n let num_of_max = (n / max_limit).abs().floor();\n\n\n\n if n >= max_limit {\n\n n -= num_of_max * max_limit;\n\n } else if n < min_limit {\n\n n += (num_of_max + 1.0) * max_limit;\n\n }\n\n\n\n n + offset\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 20, "score": 228154.8003304491 }, { "content": "pub fn check(err_code: c_int) -> Result<(), SoundError> {\n\n if err_code < 0 {\n\n Err(SoundError::FailedToInitializeDevice(err_code_to_string(\n\n err_code,\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl AlsaSoundDevice {\n\n pub fn new<F: FnMut(&mut [(f32, f32)]) + Send + 'static>(\n\n buffer_len_bytes: u32,\n\n callback: F,\n\n ) -> Result<Self, SoundError> {\n\n unsafe {\n\n let name = CString::new(\"default\").unwrap();\n\n // 16-bit stereo is 4 bytes, so frame count is bufferHalfSize / 4\n\n let frame_count = buffer_len_bytes / 4;\n\n let mut playback_device = std::ptr::null_mut();\n", "file_path": "rg3d-sound/src/device/alsa.rs", "rank": 21, "score": 222998.92013909158 }, { "content": "pub fn fix_shadows_distance(mut quality: QualitySettings) -> QualitySettings {\n\n // Scale distance because game world has different scale.\n\n quality.spot_shadows_distance *= 2.0;\n\n quality.point_shadows_distance *= 2.0;\n\n quality\n\n}\n", "file_path": "examples/shared/mod.rs", "rank": 22, "score": 222517.32502666634 }, { "content": "/// \"Transmutes\" value of any sized type to a slice of bytes.\n\npub fn value_as_u8_slice<T: Sized>(v: &T) -> &'_ [u8] {\n\n // SAFETY: It is safe to reinterpret data to read it.\n\n unsafe { std::slice::from_raw_parts(v as *const T as *const u8, std::mem::size_of::<T>()) }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 23, "score": 221045.03283963673 }, { "content": "pub fn make_arrow(\n\n ctx: &mut BuildContext,\n\n orientation: ArrowDirection,\n\n size: f32,\n\n) -> Handle<UiNode> {\n\n VectorImageBuilder::new(\n\n WidgetBuilder::new()\n\n .with_foreground(BRUSH_BRIGHT)\n\n .with_horizontal_alignment(HorizontalAlignment::Center)\n\n .with_vertical_alignment(VerticalAlignment::Center),\n\n )\n\n .with_primitives(vec![match orientation {\n\n ArrowDirection::Top => Primitive::Triangle {\n\n points: [\n\n Vector2::new(size * 0.5, 0.0),\n\n Vector2::new(size, size),\n\n Vector2::new(0.0, size),\n\n ],\n\n },\n\n ArrowDirection::Bottom => Primitive::Triangle {\n", "file_path": "rg3d-ui/src/utils.rs", "rank": 24, "score": 220987.44933732622 }, { "content": "// User interface in the engine build up on graph data structure, on tree to be\n\n// more precise. Each UI element can have single parent and multiple children.\n\n// UI uses complex layout system which automatically organizes your widgets.\n\n// In this example we'll use Grid and StackPanel layout controls. Grid can be\n\n// divided in rows and columns, its child element can set their desired column\n\n// and row and grid will automatically put them in correct position. StackPanel\n\n// will \"stack\" UI elements either on top of each other or in one line. Such\n\n// complex layout system was borrowed from WPF framework. You can read more here:\n\n// https://docs.microsoft.com/en-us/dotnet/framework/wpf/advanced/layout\n\nfn create_ui(engine: &mut Engine) -> Interface {\n\n let window_width = engine.renderer.get_frame_size().0 as f32;\n\n\n\n // Gather all suitable video modes, we'll use them to fill combo box of\n\n // available resolutions.\n\n let video_modes = engine\n\n .get_window()\n\n .primary_monitor()\n\n .unwrap()\n\n .video_modes()\n\n .filter(|vm| {\n\n // Leave only modern video modes, we are not in 1998.\n\n vm.size().width > 800 && vm.size().height > 600 && vm.bit_depth() == 32\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n\n\n // First of all create debug text that will show title of example and current FPS.\n\n let debug_text = TextBuilder::new(WidgetBuilder::new()).build(ctx);\n", "file_path": "examples/ui.rs", "rank": 25, "score": 220724.18147466824 }, { "content": "// MenuItem uses popup to show its content, popup can be top-most only if it is\n\n// direct child of root canvas of UI. This fact adds some complications to search\n\n// of parent menu - we can't just traverse the tree because popup is not a child\n\n// of menu item, instead we trying to fetch handle to parent menu item from popup's\n\n// user data and continue up-search until we find menu.\n\nfn find_menu(from: Handle<UiNode>, ui: &UserInterface) -> Handle<UiNode> {\n\n let mut handle = from;\n\n while handle.is_some() {\n\n if let Some((_, popup)) = ui.try_borrow_by_type_up::<Popup>(handle) {\n\n // Continue search from parent menu item of popup.\n\n handle = popup\n\n .user_data_ref::<Handle<UiNode>>()\n\n .cloned()\n\n .unwrap_or_default();\n\n } else {\n\n // Maybe we have Menu as parent for MenuItem.\n\n return ui.find_by_criteria_up(handle, |n| n.cast::<Menu>().is_some());\n\n }\n\n }\n\n Default::default()\n\n}\n\n\n", "file_path": "rg3d-ui/src/menu.rs", "rank": 26, "score": 218985.13202207064 }, { "content": "/// Handles error codes of coreaudio functions\n\nfn check(error: OSStatus, msg: &str) -> Result<(), SoundError> {\n\n if error == noErr as i32 {\n\n Ok(())\n\n } else {\n\n let msg = format!(\"{}. Error code {}\", msg, error);\n\n Err(SoundError::FailedToInitializeDevice(msg))\n\n }\n\n}\n\n\n\n/// Callback function set on `AudioQueueNewOutput`\n\nunsafe extern \"C\" fn audio_queue_callback(\n\n user_data: *mut c_void,\n\n queue: AudioQueueRef,\n\n buf: AudioQueueBufferRef,\n\n) {\n\n let inner: &mut Inner = &mut *(user_data as *mut Inner);\n\n inner.mix(); // Device::mix\n\n\n\n // set the buffer data\n\n let src = inner.out_data.as_mut_ptr() as *mut u8;\n", "file_path": "rg3d-sound/src/device/coreaudio.rs", "rank": 27, "score": 218076.87341370335 }, { "content": "pub fn make_mark(\n\n ctx: &mut BuildContext,\n\n text: &str,\n\n column: usize,\n\n color: Color,\n\n) -> Handle<UiNode> {\n\n BorderBuilder::new(\n\n WidgetBuilder::new()\n\n .on_row(0)\n\n .on_column(column)\n\n .with_background(Brush::Solid(color))\n\n .with_foreground(Brush::Solid(Color::TRANSPARENT))\n\n .with_child(\n\n TextBuilder::new(WidgetBuilder::new())\n\n .with_vertical_text_alignment(VerticalAlignment::Center)\n\n .with_text(text)\n\n .build(ctx),\n\n ),\n\n )\n\n .build(ctx)\n\n}\n", "file_path": "rg3d-ui/src/vec/mod.rs", "rank": 28, "score": 217712.71779618526 }, { "content": "pub fn create_impl(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Visit for #ty_ident #ty_generics #where_clause {\n\n fn visit(\n\n &mut self,\n\n name: &str,\n\n visitor: &mut Visitor,\n\n ) -> VisitResult {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/visit/utils.rs", "rank": 29, "score": 216400.82202492736 }, { "content": "pub trait MessageData: 'static + Debug + Send + Any {\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn clone_boxed(&self) -> Box<dyn MessageData>;\n\n\n\n fn compare(&self, other: &dyn MessageData) -> bool;\n\n}\n\n\n\nimpl<T> MessageData for T\n\nwhere\n\n T: 'static + Debug + Clone + PartialEq + Send + Any,\n\n{\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn clone_boxed(&self) -> Box<dyn MessageData> {\n\n Box::new(self.clone())\n\n }\n\n\n", "file_path": "rg3d-ui/src/message.rs", "rank": 30, "score": 216397.90318317086 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ctx);\n\n progress_bar\n\n })\n\n .with_child({\n\n progress_text = TextBuilder::new(\n", "file_path": "examples/async.rs", "rank": 31, "score": 214712.43357483557 }, { "content": "fn create_ui(ctx: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let cancel;\n\n let progress_grid;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text =\n\n TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0)).build(ctx);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_grid = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_visibility(!Path::new(LIGHTMAP_SCENE_PATH).exists())\n\n .on_column(1)\n", "file_path": "examples/lightmap.rs", "rank": 32, "score": 214712.43357483557 }, { "content": "/// Creates `impl Inspect` block\n\npub fn create_impl<'f>(\n\n ty_args: &args::TypeArgs,\n\n field_args: impl Iterator<Item = &'f args::FieldArgs>,\n\n impl_body: TokenStream2,\n\n) -> TokenStream2 {\n\n let ty_ident = &ty_args.ident;\n\n let generics = self::create_impl_generics(&ty_args.generics, field_args);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n quote! {\n\n impl #impl_generics Inspect for #ty_ident #ty_generics #where_clause {\n\n fn properties(&self) -> Vec<PropertyInfo<'_>> {\n\n #impl_body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect/utils.rs", "rank": 33, "score": 212647.2706675348 }, { "content": "/// Saves given `data` and overwrites `data_default` with the saved data.\n\n///\n\n/// Test the equality after running this method!\n\npub fn save_load<T: Visit>(test_name: &str, data: &mut T, data_default: &mut T) {\n\n // Locate output path\n\n let (bin, txt) = {\n\n let manifest_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n let root = PathBuf::from(manifest_dir).join(\"test_output\");\n\n (\n\n root.join(format!(\"{}.bin\", test_name)),\n\n root.join(format!(\"{}.txt\", test_name)),\n\n )\n\n };\n\n\n\n // Save `data`\n\n {\n\n let mut visitor = Visitor::new();\n\n data.visit(\"Data\", &mut visitor).unwrap();\n\n\n\n visitor.save_binary(&bin).unwrap();\n\n let mut file = File::create(&txt).unwrap();\n\n file.write(visitor.save_text().as_bytes()).unwrap();\n\n }\n", "file_path": "rg3d-core-derive/tests/it/visit.rs", "rank": 34, "score": 211444.64041085192 }, { "content": "fn close_menu_chain(from: Handle<UiNode>, ui: &UserInterface) {\n\n let mut handle = from;\n\n while handle.is_some() {\n\n if let Some((popup_handle, popup)) = ui.try_borrow_by_type_up::<Popup>(handle) {\n\n ui.send_message(PopupMessage::close(\n\n popup_handle,\n\n MessageDirection::ToWidget,\n\n ));\n\n\n\n // Continue search from parent menu item of popup.\n\n handle = popup\n\n .user_data_ref::<Handle<UiNode>>()\n\n .cloned()\n\n .unwrap_or_default();\n\n }\n\n }\n\n}\n\n\n\nimpl Control for MenuItem {\n\n fn as_any(&self) -> &dyn Any {\n", "file_path": "rg3d-ui/src/menu.rs", "rank": 35, "score": 210972.52481882364 }, { "content": "fn create_ui(engine: &mut Engine) -> Interface {\n\n let ctx = &mut engine.user_interface.build_ctx();\n\n\n\n let debug_text = TextBuilder::new(WidgetBuilder::new()).build(ctx);\n\n\n\n let definition_container = Arc::new(PropertyEditorDefinitionContainer::new());\n\n\n\n let inspector;\n\n WindowBuilder::new(WidgetBuilder::new().with_width(400.0))\n\n .with_title(WindowTitle::text(\"Inspector\"))\n\n .with_content({\n\n inspector = InspectorBuilder::new(\n\n WidgetBuilder::new().with_desired_position(Vector2::new(200.0, 200.0)),\n\n )\n\n .with_property_editor_definitions(definition_container.clone())\n\n .build(ctx);\n\n inspector\n\n })\n\n .build(ctx);\n\n\n\n Interface {\n\n debug_text,\n\n inspector,\n\n definition_container,\n\n }\n\n}\n\n\n", "file_path": "examples/inspector.rs", "rank": 36, "score": 208702.42246844436 }, { "content": "///\n\n/// Triangulates specified polygon.\n\n///\n\npub fn triangulate(vertices: &[Vector3<f32>], out_triangles: &mut Vec<[usize; 3]>) {\n\n out_triangles.clear();\n\n if vertices.len() == 3 {\n\n // Triangulating a triangle?\n\n out_triangles.push([0, 1, 2]);\n\n } else if vertices.len() == 4 {\n\n // Special case for quadrilaterals (much faster than generic)\n\n let mut start_vertex = 0;\n\n for i in 0..4 {\n\n let v = vertices[i];\n\n let v0 = vertices[(i + 3) % 4];\n\n if let Some(left) = (v0 - v).try_normalize(f32::EPSILON) {\n\n let v1 = vertices[(i + 2) % 4];\n\n if let Some(diag) = (v1 - v).try_normalize(f32::EPSILON) {\n\n let v2 = vertices[(i + 1) % 4];\n\n if let Some(right) = (v2 - v).try_normalize(f32::EPSILON) {\n\n // Check for concave vertex\n\n let angle = left.dot(&diag).acos() + right.dot(&diag).acos();\n\n if angle > std::f32::consts::PI {\n\n start_vertex = i;\n", "file_path": "rg3d-core/src/math/triangulator.rs", "rank": 37, "score": 206259.8254078256 }, { "content": "pub fn read_ascii<R>(reader: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let mut nodes: Pool<FbxNode> = Pool::new();\n\n let root_handle = nodes.spawn(FbxNode {\n\n name: String::from(\"__ROOT__\"),\n\n children: Vec::new(),\n\n parent: Handle::NONE,\n\n attributes: Vec::new(),\n\n });\n\n let mut parent_handle: Handle<FbxNode> = root_handle;\n\n let mut node_handle: Handle<FbxNode> = Handle::NONE;\n\n let mut buffer: Vec<u8> = Vec::new();\n\n let mut name: Vec<u8> = Vec::new();\n\n let mut value: Vec<u8> = Vec::new();\n\n\n\n let buf_len = reader.seek(SeekFrom::End(0))?;\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n", "file_path": "src/resource/fbx/document/ascii.rs", "rank": 38, "score": 202034.74186644307 }, { "content": "pub fn read_binary<R>(file: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let total_length = file.seek(SeekFrom::End(0))?;\n\n file.seek(SeekFrom::Start(0))?;\n\n\n\n // Ignore all stuff until version.\n\n let mut temp = [0; 23];\n\n file.read_exact(&mut temp)?;\n\n\n\n // Verify version.\n\n let version = file.read_u32::<LittleEndian>()? as i32;\n\n\n\n // Anything else should be supported.\n\n if version < 7100 {\n\n return Err(FbxError::UnsupportedVersion(version));\n\n }\n\n\n\n let mut nodes = Pool::new();\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 39, "score": 202034.74186644307 }, { "content": "fn find_tree<P: AsRef<Path>>(node: Handle<UiNode>, path: &P, ui: &UserInterface) -> Handle<UiNode> {\n\n let mut tree_handle = Handle::NONE;\n\n let node_ref = ui.node(node);\n\n\n\n if let Some(tree) = node_ref.cast::<Tree>() {\n\n let tree_path = tree.user_data_ref::<PathBuf>().unwrap();\n\n if tree_path == path.as_ref() {\n\n tree_handle = node;\n\n } else {\n\n for &item in tree.items() {\n\n let tree = find_tree(item, path, ui);\n\n if tree.is_some() {\n\n tree_handle = tree;\n\n break;\n\n }\n\n }\n\n }\n\n } else if let Some(root) = node_ref.cast::<TreeRoot>() {\n\n for &item in root.items() {\n\n let tree = find_tree(item, path, ui);\n", "file_path": "rg3d-ui/src/file_browser.rs", "rank": 40, "score": 196949.17078315833 }, { "content": "// impl `#[derive(Visit)]` for `struct` or `enum`\n\npub fn impl_visit(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_visit_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variants) => self::impl_visit_enum(&ty_args, variants),\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/visit.rs", "rank": 41, "score": 194775.25412308643 }, { "content": "pub fn impl_inspect(ast: DeriveInput) -> TokenStream2 {\n\n let ty_args = args::TypeArgs::from_derive_input(&ast).unwrap();\n\n match &ty_args.data {\n\n ast::Data::Struct(ref field_args) => self::impl_inspect_struct(&ty_args, field_args),\n\n ast::Data::Enum(ref variant_args) => self::impl_inspect_enum(&ty_args, variant_args),\n\n }\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect.rs", "rank": 42, "score": 194763.1158449663 }, { "content": "fn remap_handles(old_new_mapping: &HashMap<Handle<Node>, Handle<Node>>, dest_graph: &mut Graph) {\n\n // Iterate over instantiated nodes and remap handles.\n\n for (_, &new_node_handle) in old_new_mapping.iter() {\n\n let new_node = &mut dest_graph.pool[new_node_handle];\n\n\n\n if let Node::Mesh(mesh) = new_node {\n\n for surface in mesh.surfaces_mut() {\n\n for bone_handle in surface.bones.iter_mut() {\n\n if let Some(entry) = old_new_mapping.get(bone_handle) {\n\n *bone_handle = *entry;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // LODs also have handles that must be remapped too.\n\n if let Some(lod_group) = new_node.lod_group_mut() {\n\n for level in lod_group.levels.iter_mut() {\n\n level.objects.retain_mut(|object| {\n\n if let Some(entry) = old_new_mapping.get(object) {\n", "file_path": "src/scene/graph.rs", "rank": 43, "score": 192977.04034903913 }, { "content": "fn is_node_enabled(nodes: &Pool<UiNode>, handle: Handle<UiNode>) -> bool {\n\n let root_node = &nodes[handle];\n\n let mut enabled = root_node.enabled();\n\n let mut parent = root_node.parent();\n\n while parent.is_some() {\n\n let node = &nodes[parent];\n\n if !node.enabled() {\n\n enabled = false;\n\n break;\n\n }\n\n parent = node.parent();\n\n }\n\n enabled\n\n}\n\n\n\nimpl UserInterface {\n\n pub fn new(screen_size: Vector2<f32>) -> UserInterface {\n\n let (sender, receiver) = mpsc::channel();\n\n let mut ui = UserInterface {\n\n screen_size,\n", "file_path": "rg3d-ui/src/lib.rs", "rank": 44, "score": 186837.06201786152 }, { "content": "fn filtered_out(filter: &mut Option<Filter>, path: &Path) -> bool {\n\n match filter.as_mut() {\n\n Some(filter) => !filter.0.borrow_mut().deref_mut().lock().unwrap()(path),\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "rg3d-ui/src/file_browser.rs", "rank": 45, "score": 186541.3998544055 }, { "content": "/// impl `Visit` for `struct`\n\nfn impl_visit_struct(\n\n ty_args: &args::TypeArgs,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n let visit_fn_body = if field_args.style == ast::Style::Unit {\n\n quote! { Ok(()) }\n\n } else {\n\n // `field.visit(..);` parts\n\n let field_visits =\n\n utils::create_field_visits(None, field_args.fields.iter(), field_args.style);\n\n\n\n quote! {\n\n visitor.enter_region(name)?;\n\n #(self.#field_visits)*\n\n visitor.leave_region()\n\n }\n\n };\n\n\n\n utils::create_impl(ty_args, field_args.iter().cloned(), visit_fn_body)\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/visit.rs", "rank": 46, "score": 185228.15251396626 }, { "content": "fn impl_inspect_struct(\n\n ty_args: &args::TypeArgs,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n let body = utils::gen_inspect_fn_body(ty_args, utils::FieldPrefix::Self_, field_args);\n\n utils::create_impl(ty_args, field_args.iter(), body)\n\n}\n\n\n", "file_path": "rg3d-core-derive/src/inspect.rs", "rank": 47, "score": 185215.84985526773 }, { "content": "fn check<S: Into<String>>(code: i32, message: S) -> Result<(), SoundError> {\n\n if code == DS_OK {\n\n Ok(())\n\n } else {\n\n Err(SoundError::FailedToInitializeDevice(message.into()))\n\n }\n\n}\n\n\n\nimpl DirectSoundDevice {\n\n pub fn new<F: FnMut(&mut [(f32, f32)]) + Send + 'static>(\n\n buffer_len_bytes: u32,\n\n callback: F,\n\n ) -> Result<Self, SoundError> {\n\n unsafe {\n\n let mut direct_sound = std::ptr::null_mut();\n\n check(\n\n DirectSoundCreate(std::ptr::null(), &mut direct_sound, std::ptr::null_mut()),\n\n \"Failed to initialize DirectSound\",\n\n )?;\n\n\n", "file_path": "rg3d-sound/src/device/dsound.rs", "rank": 48, "score": 179337.45961225897 }, { "content": "#[inline]\n\npub fn type_name_of<T>(_: T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\n#[cfg(feature = \"enable_profiler\")]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {\n\n let function_name = {\n\n fn scope() {}\n\n $crate::profiler::type_name_of(scope)\n\n };\n\n let _scope_guard = $crate::profiler::ScopeDefinition::new(function_name, line!());\n\n };\n\n}\n\n\n\n#[cfg(not(feature = \"enable_profiler\"))]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {};\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 49, "score": 177239.4811744279 }, { "content": "pub fn gen_inspect_fn_body(\n\n ty_args: &args::TypeArgs,\n\n field_prefix: FieldPrefix,\n\n field_args: &ast::Fields<args::FieldArgs>,\n\n) -> TokenStream2 {\n\n let owner_name = ty_args.ident.to_string();\n\n\n\n // `inspect` function body, consisting of a sequence of quotes\n\n let mut quotes = Vec::new();\n\n\n\n // collect non-expanible field properties\n\n let props = field_args\n\n .fields\n\n .iter()\n\n .enumerate()\n\n .filter(|(_i, f)| !f.skip && !(f.expand || f.expand_subtree))\n\n .map(|(i, field)| {\n\n self::quote_field_prop(&owner_name, field_prefix, i, field, field_args.style)\n\n });\n\n\n", "file_path": "rg3d-core-derive/src/inspect/utils.rs", "rank": 50, "score": 176075.75139421714 }, { "content": "#[inline]\n\npub fn set_once() {\n\n use std::sync::Once;\n\n static SET_HOOK: Once = Once::new();\n\n SET_HOOK.call_once(|| {\n\n panic::set_hook(Box::new(hook));\n\n });\n\n}\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 51, "score": 173681.14124279135 }, { "content": "/// Generates UV map for given surface data.\n\n///\n\n/// # Performance\n\n///\n\n/// This method utilizes lots of \"brute force\" algorithms, so it is not fast as it\n\n/// could be in ideal case. It also allocates some memory for internal needs.\n\npub fn generate_uvs(\n\n data: &mut SurfaceData,\n\n spacing: f32,\n\n) -> Result<SurfaceDataPatch, VertexFetchError> {\n\n let uv_box = generate_uv_box(data);\n\n\n\n let data_id = data.content_hash();\n\n let mut vertex_buffer_mut = data.vertex_buffer.modify();\n\n let mut geometry_buffer_mut = data.geometry_buffer.modify();\n\n let (mut meshes, mut patch) = generate_uv_meshes(\n\n &uv_box,\n\n data_id,\n\n &mut vertex_buffer_mut,\n\n &mut geometry_buffer_mut,\n\n );\n\n drop(geometry_buffer_mut);\n\n\n\n // Step 4. Arrange and scale all meshes on uv map so it fits into [0;1] range.\n\n let area = meshes.iter().fold(0.0, |area, mesh| area + mesh.area());\n\n let square_side = area.sqrt() + spacing * meshes.len() as f32;\n", "file_path": "src/utils/uvgen.rs", "rank": 52, "score": 173681.14124279135 }, { "content": "/// \"Transmutes\" array of any sized type to a slice of bytes.\n\npub fn array_as_u8_slice<T: Sized>(v: &[T]) -> &'_ [u8] {\n\n // SAFETY: It is safe to reinterpret data to read it.\n\n unsafe {\n\n std::slice::from_raw_parts(v.as_ptr() as *const u8, std::mem::size_of::<T>() * v.len())\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 53, "score": 173679.02590266563 }, { "content": "#[wasm_bindgen]\n\npub fn main_js() {\n\n set_once();\n\n\n\n let event_loop = EventLoop::new();\n\n\n\n let window_builder = rg3d::window::WindowBuilder::new()\n\n .with_inner_size(LogicalSize::new(800, 600))\n\n .with_title(\"Example - WASM\")\n\n .with_resizable(true);\n\n\n\n let mut engine = Engine::new(window_builder, &event_loop, true).unwrap();\n\n engine\n\n .renderer\n\n .set_backbuffer_clear_color(Color::opaque(150, 150, 255));\n\n\n\n // Configure resource manager.\n\n engine.resource_manager.state().set_textures_import_options(\n\n TextureImportOptions::default().with_compression(CompressionOptions::NoCompression),\n\n );\n\n\n", "file_path": "examples/wasm/src/lib.rs", "rank": 54, "score": 171374.84568967548 }, { "content": "/// Generates UVs for a specified mesh.\n\npub fn generate_uvs_mesh(\n\n mesh: &Mesh,\n\n spacing: f32,\n\n) -> Result<Vec<SurfaceDataPatch>, VertexFetchError> {\n\n let last = instant::Instant::now();\n\n\n\n let data_set = mesh.surfaces().iter().map(|s| s.data()).collect::<Vec<_>>();\n\n\n\n let patches = data_set\n\n .into_par_iter()\n\n .map(|data| generate_uvs(&mut data.write().unwrap(), spacing))\n\n .collect::<Result<Vec<SurfaceDataPatch>, VertexFetchError>>()?;\n\n\n\n println!(\"Generate UVs: {:?}\", instant::Instant::now() - last);\n\n\n\n Ok(patches)\n\n}\n", "file_path": "src/utils/uvgen.rs", "rank": 55, "score": 171374.84568967548 }, { "content": "/// Generates a set of UV meshes.\n\npub fn generate_uv_meshes(\n\n uv_box: &UvBox,\n\n data_id: u64,\n\n vertex_buffer_mut: &mut VertexBufferRefMut,\n\n geometry_buffer_mut: &mut GeometryBufferRefMut,\n\n) -> (Vec<UvMesh>, SurfaceDataPatch) {\n\n let mut mesh_patch = SurfaceDataPatch {\n\n data_id,\n\n ..Default::default()\n\n };\n\n\n\n if !vertex_buffer_mut.has_attribute(VertexAttributeUsage::TexCoord1) {\n\n vertex_buffer_mut\n\n .add_attribute(\n\n VertexAttributeDescriptor {\n\n usage: VertexAttributeUsage::TexCoord1,\n\n data_type: VertexAttributeDataType::F32,\n\n size: 2,\n\n divisor: 0,\n\n shader_location: 6, // HACK: GBuffer renderer expects it to be at 6\n", "file_path": "src/utils/uvgen.rs", "rank": 56, "score": 171374.84568967548 }, { "content": "pub fn barycentric_to_world(\n\n bary: (f32, f32, f32),\n\n pa: Vector3<f32>,\n\n pb: Vector3<f32>,\n\n pc: Vector3<f32>,\n\n) -> Vector3<f32> {\n\n pa.scale(bary.0) + pb.scale(bary.1) + pc.scale(bary.2)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 57, "score": 169167.8167583457 }, { "content": "pub fn vec3_to_vec2_by_plane(\n\n plane_class: PlaneClass,\n\n normal: Vector3<f32>,\n\n point: Vector3<f32>,\n\n) -> Vector2<f32> {\n\n match plane_class {\n\n PlaneClass::XY => {\n\n if normal.z < 0.0 {\n\n Vector2::new(point.y, point.x)\n\n } else {\n\n Vector2::new(point.x, point.y)\n\n }\n\n }\n\n PlaneClass::XZ => {\n\n if normal.y < 0.0 {\n\n Vector2::new(point.x, point.z)\n\n } else {\n\n Vector2::new(point.z, point.x)\n\n }\n\n }\n\n PlaneClass::YZ => {\n\n if normal.x < 0.0 {\n\n Vector2::new(point.z, point.y)\n\n } else {\n\n Vector2::new(point.y, point.z)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 58, "score": 167053.78057800437 }, { "content": "pub fn ray_rect_intersection(\n\n rect: Rect<f32>,\n\n origin: Vector2<f32>,\n\n dir: Vector2<f32>,\n\n) -> Option<IntersectionResult> {\n\n let min = rect.left_top_corner();\n\n let max = rect.right_bottom_corner();\n\n\n\n let (mut tmin, mut tmax) = if dir.x >= 0.0 {\n\n ((min.x - origin.x) / dir.x, (max.x - origin.x) / dir.x)\n\n } else {\n\n ((max.x - origin.x) / dir.x, (min.x - origin.x) / dir.x)\n\n };\n\n\n\n let (tymin, tymax) = if dir.y >= 0.0 {\n\n ((min.y - origin.y) / dir.y, (max.y - origin.y) / dir.y)\n\n } else {\n\n ((max.y - origin.y) / dir.y, (min.y - origin.y) / dir.y)\n\n };\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 59, "score": 167053.78057800437 }, { "content": "pub fn is_point_inside_2d_triangle(\n\n point: Vector2<f32>,\n\n pt_a: Vector2<f32>,\n\n pt_b: Vector2<f32>,\n\n pt_c: Vector2<f32>,\n\n) -> bool {\n\n let ba = pt_b - pt_a;\n\n let ca = pt_c - pt_a;\n\n\n\n let vp = point - pt_a;\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot_02 = ca.dot(&vp);\n\n let dot_12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot_02 - ca_dot_ba * dot_12) * inv_denom;\n\n let v = (ca_dot_ca * dot_12 - ca_dot_ba * dot_02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 60, "score": 167053.78057800437 }, { "content": "pub fn get_barycentric_coords_2d(\n\n p: Vector2<f32>,\n\n a: Vector2<f32>,\n\n b: Vector2<f32>,\n\n c: Vector2<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = b - a;\n\n let v1 = c - a;\n\n let v2 = p - a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let inv_denom = 1.0 / (d00 * d11 - d01.powi(2));\n\n\n\n let v = (d11 * d20 - d01 * d21) * inv_denom;\n\n let w = (d00 * d21 - d01 * d20) * inv_denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 61, "score": 167053.78057800437 }, { "content": "pub fn get_barycentric_coords(\n\n p: &Vector3<f32>,\n\n a: &Vector3<f32>,\n\n b: &Vector3<f32>,\n\n c: &Vector3<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = *b - *a;\n\n let v1 = *c - *a;\n\n let v2 = *p - *a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let denom = d00 * d11 - d01.powi(2);\n\n\n\n let v = (d11 * d20 - d01 * d21) / denom;\n\n let w = (d00 * d21 - d01 * d20) / denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 62, "score": 167053.78057800437 }, { "content": "fn save(game: &mut Game) {\n\n // To save a game state all we need to do is to create new instance of Visitor\n\n // and call visit on game instance.\n\n let mut visitor = Visitor::new();\n\n game.visit(\"Game\", &mut visitor).unwrap();\n\n // And call save method.\n\n visitor.save_binary(Path::new(SAVE_FILE)).unwrap();\n\n}\n\n\n", "file_path": "examples/save_load.rs", "rank": 63, "score": 165459.9027394224 }, { "content": "fn get_line_thickness_vector(a: Vector2<f32>, b: Vector2<f32>, thickness: f32) -> Vector2<f32> {\n\n if let Some(dir) = (b - a).try_normalize(f32::EPSILON) {\n\n Vector2::new(dir.y, -dir.x).scale(thickness * 0.5)\n\n } else {\n\n Vector2::default()\n\n }\n\n}\n\n\n\nimpl Default for DrawingContext {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Draw for DrawingContext {\n\n #[inline(always)]\n\n fn push_vertex(&mut self, pos: Vector2<f32>, tex_coord: Vector2<f32>) {\n\n self.vertex_buffer.push(Vertex::new(pos, tex_coord));\n\n }\n\n\n", "file_path": "rg3d-ui/src/draw.rs", "rank": 64, "score": 162276.42803941114 }, { "content": "fn line_thickness_vector(a: Vector2<f32>, b: Vector2<f32>, thickness: f32) -> Vector2<f32> {\n\n if let Some(dir) = (b - a).try_normalize(f32::EPSILON) {\n\n Vector2::new(dir.y, -dir.x).scale(thickness * 0.5)\n\n } else {\n\n Vector2::default()\n\n }\n\n}\n\n\n\nimpl Primitive {\n\n pub fn bounds(&self) -> (Vector2<f32>, Vector2<f32>) {\n\n match self {\n\n Primitive::Triangle { points } => {\n\n let min = points[0]\n\n .per_component_min(&points[1])\n\n .per_component_min(&points[2]);\n\n let max = points[0]\n\n .per_component_max(&points[1])\n\n .per_component_max(&points[2]);\n\n (min, max)\n\n }\n", "file_path": "rg3d-ui/src/vector_image.rs", "rank": 65, "score": 162276.42803941114 }, { "content": "/// `<prefix>field.visit(\"name\", visitor);`\n\npub fn create_field_visits<'a>(\n\n // None or `f` when bindings tuple variants. NOTE: We can't use `prefix: Ident`\n\n prefix: Option<Ident>,\n\n fields: impl Iterator<Item = &'a args::FieldArgs>,\n\n field_style: ast::Style,\n\n) -> Vec<TokenStream2> {\n\n if field_style == ast::Style::Unit {\n\n // `Unit` (struct/enum variant) has no field to visit.\n\n // We won't even enter this region:\n\n return vec![];\n\n }\n\n\n\n let visit_args = fields\n\n .filter(|field| !field.skip)\n\n .enumerate()\n\n .map(|(field_index, field)| {\n\n let (ident, name) = match field_style {\n\n // `NamedFields { a: f32, .. }`\n\n ast::Style::Struct => {\n\n let ident = field.ident.as_ref().unwrap_or_else(|| unreachable!());\n", "file_path": "rg3d-core-derive/src/visit/utils.rs", "rank": 66, "score": 161267.41162279714 }, { "content": "fn sort_keys(keys: &mut [CurveKey]) {\n\n keys.sort_by(|a, b| {\n\n if a.location > b.location {\n\n Ordering::Greater\n\n } else if a.location < b.location {\n\n Ordering::Less\n\n } else {\n\n Ordering::Equal\n\n }\n\n });\n\n}\n\n\n\nimpl From<Vec<CurveKey>> for Curve {\n\n fn from(mut keys: Vec<CurveKey>) -> Self {\n\n sort_keys(&mut keys);\n\n Self { keys }\n\n }\n\n}\n\n\n\nimpl Curve {\n", "file_path": "rg3d-core/src/curve.rs", "rank": 67, "score": 159106.17756997977 }, { "content": "#[allow(clippy::manual_map)]\n\npub fn into_any_arc<T: Any + Send + Sync>(\n\n opt: Option<Arc<T>>,\n\n) -> Option<Arc<dyn Any + Send + Sync>> {\n\n match opt {\n\n Some(r) => Some(r),\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 68, "score": 156247.04037803682 }, { "content": "/// A panic hook for use with\n\n/// [`std::panic::set_hook`](https://doc.rust-lang.org/nightly/std/panic/fn.set_hook.html)\n\n/// that logs panics into\n\n/// [`console.error`](https://developer.mozilla.org/en-US/docs/Web/API/Console/error).\n\n///\n\n/// On non-wasm targets, prints the panic to `stderr`.\n\npub fn hook(info: &panic::PanicInfo) {\n\n hook_impl(info);\n\n}\n\n\n\n/// Set the `console.error` panic hook the first time this is called. Subsequent\n\n/// invocations do nothing.\n", "file_path": "examples/wasm/src/lib.rs", "rank": 69, "score": 154869.0020908082 }, { "content": "#[inline]\n\nfn read_samples(buffer: &mut Vec<f32>, decoder: &mut Decoder, count: usize) -> usize {\n\n buffer.clear();\n\n for _ in 0..count {\n\n if let Some(sample) = decoder.next() {\n\n buffer.push(sample)\n\n } else {\n\n break;\n\n }\n\n }\n\n buffer.len()\n\n}\n\n\n\nimpl StreamingBuffer {\n\n /// Defines amount of samples `per channel` which each streaming buffer will use for internal buffer.\n\n pub const STREAM_SAMPLE_COUNT: usize = 44100;\n\n\n\n /// Creates new streaming buffer using given data source. May fail if data source has unsupported format\n\n /// or it has corrupted data. Length of internal generic buffer cannot be changed but can be fetched from\n\n /// `StreamingBuffer::STREAM_SAMPLE_COUNT`\n\n ///\n", "file_path": "rg3d-sound/src/buffer/streaming.rs", "rank": 70, "score": 154235.77152587552 }, { "content": "struct UiShader {\n\n program: GpuProgram,\n\n wvp_matrix: UniformLocation,\n\n diffuse_texture: UniformLocation,\n\n is_font: UniformLocation,\n\n solid_color: UniformLocation,\n\n brush_type: UniformLocation,\n\n gradient_point_count: UniformLocation,\n\n gradient_colors: UniformLocation,\n\n gradient_stops: UniformLocation,\n\n gradient_origin: UniformLocation,\n\n gradient_end: UniformLocation,\n\n resolution: UniformLocation,\n\n bounds_min: UniformLocation,\n\n bounds_max: UniformLocation,\n\n opacity: UniformLocation,\n\n}\n\n\n\nimpl UiShader {\n\n pub fn new(state: &mut PipelineState) -> Result<Self, FrameworkError> {\n", "file_path": "src/renderer/ui_renderer.rs", "rank": 71, "score": 153465.49497188925 }, { "content": "/// Translates library button state into rg3d-ui button state.\n\npub fn translate_state(state: ElementState) -> ButtonState {\n\n match state {\n\n ElementState::Pressed => ButtonState::Pressed,\n\n ElementState::Released => ButtonState::Released,\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 72, "score": 152920.20896939392 }, { "content": "pub fn wrap_angle(angle: f32) -> f32 {\n\n let two_pi = 2.0 * std::f32::consts::PI;\n\n\n\n if angle > 0.0 {\n\n angle % two_pi\n\n } else {\n\n (angle + two_pi) % two_pi\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 73, "score": 152914.310543774 }, { "content": "pub fn get_closest_point_triangles<P: PositionProvider>(\n\n points: &[P],\n\n triangles: &[TriangleDefinition],\n\n triangle_indices: &[u32],\n\n point: Vector3<f32>,\n\n) -> Option<usize> {\n\n let mut closest_sqr_distance = f32::MAX;\n\n let mut closest_index = None;\n\n for triangle_index in triangle_indices {\n\n let triangle = triangles.get(*triangle_index as usize).unwrap();\n\n for point_index in triangle.0.iter() {\n\n let vertex = points.get(*point_index as usize).unwrap();\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(*point_index as usize);\n\n }\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 74, "score": 152289.49110062158 }, { "content": "fn is_vorbis_ogg(source: &mut DataSource) -> bool {\n\n let pos = source.seek(SeekFrom::Current(0)).unwrap();\n\n\n\n let is_vorbis = OggStreamReader::new(source.by_ref()).is_ok();\n\n\n\n source.seek(SeekFrom::Start(pos)).unwrap();\n\n\n\n is_vorbis\n\n}\n\n\n\nimpl OggDecoder {\n\n pub fn new(mut source: DataSource) -> Result<Self, DataSource> {\n\n if is_vorbis_ogg(&mut source) {\n\n let mut reader = OggStreamReader::new(source).unwrap();\n\n\n\n let samples = if let Ok(Some(samples)) =\n\n reader.read_dec_packet_generic::<InterleavedSamples<f32>>()\n\n {\n\n samples.samples.into_iter()\n\n } else {\n", "file_path": "rg3d-sound/src/decoder/vorbis.rs", "rank": 75, "score": 152196.71064271044 }, { "content": "/// Converts engine's optional texture \"pointer\" to rg3d-ui's.\n\npub fn into_gui_texture(this: Texture) -> draw::SharedTexture {\n\n draw::SharedTexture(this.0.into_inner())\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 76, "score": 152112.14199745393 }, { "content": "pub fn print() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 77, "score": 152106.20468583255 }, { "content": "struct InputController {\n\n move_forward: bool,\n\n move_backward: bool,\n\n move_left: bool,\n\n move_right: bool,\n\n}\n\n\n", "file_path": "examples/2d.rs", "rank": 78, "score": 151528.055579047 }, { "content": "/// Translates keyboard modifiers to rg3d-ui keyboard modifiers.\n\npub fn translate_keyboard_modifiers(modifiers: ModifiersState) -> KeyboardModifiers {\n\n KeyboardModifiers {\n\n alt: modifiers.alt(),\n\n shift: modifiers.shift(),\n\n control: modifiers.ctrl(),\n\n system: modifiers.logo(),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 79, "score": 151052.46912313515 }, { "content": "/// Translated key code to rg3d-ui key code.\n\npub fn translate_key(key: VirtualKeyCode) -> KeyCode {\n\n match key {\n\n VirtualKeyCode::Key1 => KeyCode::Key1,\n\n VirtualKeyCode::Key2 => KeyCode::Key2,\n\n VirtualKeyCode::Key3 => KeyCode::Key3,\n\n VirtualKeyCode::Key4 => KeyCode::Key4,\n\n VirtualKeyCode::Key5 => KeyCode::Key5,\n\n VirtualKeyCode::Key6 => KeyCode::Key6,\n\n VirtualKeyCode::Key7 => KeyCode::Key7,\n\n VirtualKeyCode::Key8 => KeyCode::Key8,\n\n VirtualKeyCode::Key9 => KeyCode::Key9,\n\n VirtualKeyCode::Key0 => KeyCode::Key0,\n\n VirtualKeyCode::A => KeyCode::A,\n\n VirtualKeyCode::B => KeyCode::B,\n\n VirtualKeyCode::C => KeyCode::C,\n\n VirtualKeyCode::D => KeyCode::D,\n\n VirtualKeyCode::E => KeyCode::E,\n\n VirtualKeyCode::F => KeyCode::F,\n\n VirtualKeyCode::G => KeyCode::G,\n\n VirtualKeyCode::H => KeyCode::H,\n", "file_path": "src/utils/mod.rs", "rank": 80, "score": 151052.46912313515 }, { "content": "#[proc_macro_derive(Inspect, attributes(inspect))]\n\npub fn inspect(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(inspect::impl_inspect(ast))\n\n}\n", "file_path": "rg3d-core-derive/src/lib.rs", "rank": 81, "score": 151046.53181151376 }, { "content": "pub fn make_vec3_container<P: AsRef<str>>(\n\n nodes: &FbxNodeContainer,\n\n container_node: Handle<FbxNode>,\n\n data_name: P,\n\n) -> Result<FbxContainer<Vector3<f32>>, FbxError> {\n\n FbxContainer::new(nodes, container_node, data_name, |attributes| {\n\n let mut normals = Vec::with_capacity(attributes.len() / 3);\n\n for normal in attributes.chunks_exact(3) {\n\n normals.push(Vector3::new(\n\n normal[0].as_f32()?,\n\n normal[1].as_f32()?,\n\n normal[2].as_f32()?,\n\n ));\n\n }\n\n Ok(normals)\n\n })\n\n}\n", "file_path": "src/resource/fbx/scene/mod.rs", "rank": 82, "score": 151046.53181151376 }, { "content": "#[proc_macro_derive(Visit, attributes(visit))]\n\npub fn visit(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n TokenStream::from(visit::impl_visit(ast))\n\n}\n\n\n\n/// Implements `Inspect` trait\n\n///\n\n/// User has to import `Inspect` and `PropertyInfo` to use this macro.\n", "file_path": "rg3d-core-derive/src/lib.rs", "rank": 83, "score": 151046.53181151376 }, { "content": "pub fn get_closest_point_triangle_set<P: PositionProvider>(\n\n points: &[P],\n\n triangles: &[TriangleDefinition],\n\n point: Vector3<f32>,\n\n) -> Option<usize> {\n\n let mut closest_sqr_distance = f32::MAX;\n\n let mut closest_index = None;\n\n for triangle in triangles {\n\n for point_index in triangle.0.iter() {\n\n let vertex = points.get(*point_index as usize).unwrap();\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(*point_index as usize);\n\n }\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 84, "score": 150562.72628058388 }, { "content": "struct Interface {\n\n debug_text: Handle<UiNode>,\n\n yaw: Handle<UiNode>,\n\n scale: Handle<UiNode>,\n\n reset: Handle<UiNode>,\n\n video_modes: Vec<VideoMode>,\n\n resolutions: Handle<UiNode>,\n\n}\n\n\n", "file_path": "examples/ui.rs", "rank": 85, "score": 149453.58902474184 }, { "content": "pub fn err_code_to_string(err_code: c_int) -> String {\n\n unsafe {\n\n let message = CStr::from_ptr(snd_strerror(err_code) as *const _)\n\n .to_bytes()\n\n .to_vec();\n\n String::from_utf8(message).unwrap()\n\n }\n\n}\n\n\n", "file_path": "rg3d-sound/src/device/alsa.rs", "rank": 86, "score": 149251.33512478284 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/lightmap.rs", "rank": 87, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/simple.rs", "rank": 88, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 89, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/navmesh.rs", "rank": 90, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/terrain.rs", "rank": 91, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/instancing.rs", "rank": 92, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n forward: bool,\n\n backward: bool,\n\n}\n\n\n", "file_path": "examples/lod.rs", "rank": 93, "score": 148871.55008114927 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/async.rs", "rank": 94, "score": 148871.55008114927 }, { "content": "/// Translates window event to rg3d-ui event.\n\npub fn translate_event(event: &WindowEvent) -> Option<OsEvent> {\n\n match event {\n\n WindowEvent::ReceivedCharacter(c) => Some(OsEvent::Character(*c)),\n\n WindowEvent::KeyboardInput { input, .. } => {\n\n input.virtual_keycode.map(|key| OsEvent::KeyboardInput {\n\n button: translate_key(key),\n\n state: translate_state(input.state),\n\n })\n\n }\n\n WindowEvent::CursorMoved { position, .. } => Some(OsEvent::CursorMoved {\n\n position: Vector2::new(position.x as f32, position.y as f32),\n\n }),\n\n WindowEvent::MouseWheel { delta, .. } => match delta {\n\n MouseScrollDelta::LineDelta(x, y) => Some(OsEvent::MouseWheel(*x, *y)),\n\n MouseScrollDelta::PixelDelta(pos) => {\n\n Some(OsEvent::MouseWheel(pos.x as f32, pos.y as f32))\n\n }\n\n },\n\n WindowEvent::MouseInput { state, button, .. } => Some(OsEvent::MouseInput {\n\n button: translate_button(*button),\n\n state: translate_state(*state),\n\n }),\n\n &WindowEvent::ModifiersChanged(modifiers) => Some(OsEvent::KeyboardModifiers(\n\n translate_keyboard_modifiers(modifiers),\n\n )),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 95, "score": 148299.54919264212 }, { "content": "pub fn print_hot_path() -> Result<String, fmt::Error> {\n\n #[cfg(feature = \"enable_profiler\")]\n\n {\n\n let mut buffer = String::new();\n\n PROFILER.lock().unwrap().print_hot_path(&mut buffer)?;\n\n Ok(buffer)\n\n }\n\n\n\n #[cfg(not(feature = \"enable_profiler\"))]\n\n {\n\n Ok(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\".to_owned())\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 96, "score": 148293.57247889644 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n model_handle: Handle<Node>,\n\n walk_animation: Handle<Animation>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Set ambient light.\n\n scene.ambient_lighting_color = Color::opaque(200, 200, 200);\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(0.0, 6.0, -12.0),\n\n &mut scene.graph,\n\n )\n\n .await;\n\n\n", "file_path": "examples/ui.rs", "rank": 97, "score": 146845.93279365223 }, { "content": "fn rc_to_raw<T>(rc: &Rc<T>) -> *mut T {\n\n &**rc as *const T as *mut T\n\n}\n\n\n\nimpl<T> Visit for Arc<T>\n\nwhere\n\n T: Visit + Send + Sync + 'static,\n\n{\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {\n\n visitor.enter_region(name)?;\n\n\n\n if visitor.reading {\n\n let mut raw = 0u64;\n\n raw.visit(\"Id\", visitor)?;\n\n if raw == 0 {\n\n return Err(VisitError::UnexpectedRcNullIndex);\n\n }\n\n if let Some(ptr) = visitor.arc_map.get(&raw) {\n\n if let Ok(res) = Arc::downcast::<T>(ptr.clone()) {\n\n *self = res;\n", "file_path": "rg3d-core/src/visitor.rs", "rank": 98, "score": 146842.15068907844 }, { "content": "fn arc_to_raw<T>(arc: &Arc<T>) -> *mut T {\n\n &**arc as *const T as *mut T\n\n}\n\n\n", "file_path": "rg3d-core/src/visitor.rs", "rank": 99, "score": 146842.15068907844 } ]
Rust
src/resource/model.rs
Jytesh/rg3d
5cc1017f9a9b3e5d461fbb6247675bf0df4d6d00
#![warn(missing_docs)] use crate::{ animation::Animation, core::{ pool::Handle, visitor::{Visit, VisitError, VisitResult, Visitor}, }, engine::resource_manager::ResourceManager, resource::{ fbx::{self, error::FbxError}, Resource, ResourceData, }, scene::{node::Node, Scene}, utils::log::{Log, MessageKind}, }; use std::{ borrow::Cow, path::{Path, PathBuf}, }; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[repr(u32)] pub(in crate) enum NodeMapping { UseNames = 0, UseHandles = 1, } #[derive(Debug)] pub struct ModelData { pub(in crate) path: PathBuf, pub(in crate) mapping: NodeMapping, scene: Scene, } pub type Model = Resource<ModelData, ModelLoadError>; impl Model { pub fn instantiate_geometry(&self, dest_scene: &mut Scene) -> Handle<Node> { let data = self.data_ref(); let (root, old_to_new) = data.scene.graph.copy_node( data.scene.graph.get_root(), &mut dest_scene.graph, &mut |_, _| true, ); dest_scene.graph[root].is_resource_instance_root = true; let mut stack = vec![root]; while let Some(node_handle) = stack.pop() { let node = &mut dest_scene.graph[node_handle]; node.resource = Some(self.clone()); stack.extend_from_slice(node.children()); } for navmesh in data.scene.navmeshes.iter() { dest_scene.navmeshes.add(navmesh.clone()); } std::mem::drop(data); dest_scene.physics.embed_resource( &mut dest_scene.physics_binder, &dest_scene.graph, old_to_new, self.clone(), ); root } pub fn instantiate(&self, dest_scene: &mut Scene) -> ModelInstance { let root = self.instantiate_geometry(dest_scene); ModelInstance { root, animations: self.retarget_animations(root, dest_scene), } } pub fn retarget_animations( &self, root: Handle<Node>, dest_scene: &mut Scene, ) -> Vec<Handle<Animation>> { let data = self.data_ref(); let mut animation_handles = Vec::new(); for ref_anim in data.scene.animations.iter() { let mut anim_copy = ref_anim.clone(); anim_copy.resource = Some(self.clone()); for (i, ref_track) in ref_anim.get_tracks().iter().enumerate() { let ref_node = &data.scene.graph[ref_track.get_node()]; let instance_node = dest_scene.graph.find_by_name(root, ref_node.name()); if instance_node.is_none() { Log::writeln( MessageKind::Error, format!( "Failed to retarget animation {:?} for node {}", data.path(), ref_node.name() ), ); } anim_copy.get_tracks_mut()[i].set_node(instance_node); } animation_handles.push(dest_scene.animations.add(anim_copy)); } animation_handles } } impl ResourceData for ModelData { fn path(&self) -> Cow<Path> { Cow::Borrowed(&self.path) } } impl Default for ModelData { fn default() -> Self { Self { path: PathBuf::new(), mapping: NodeMapping::UseNames, scene: Scene::new(), } } } impl Visit for ModelData { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.path.visit("Path", visitor)?; visitor.leave_region() } } pub struct ModelInstance { pub root: Handle<Node>, pub animations: Vec<Handle<Animation>>, } #[derive(Debug)] pub enum ModelLoadError { Visit(VisitError), NotSupported(String), Fbx(FbxError), } impl From<FbxError> for ModelLoadError { fn from(fbx: FbxError) -> Self { ModelLoadError::Fbx(fbx) } } impl From<VisitError> for ModelLoadError { fn from(e: VisitError) -> Self { ModelLoadError::Visit(e) } } impl ModelData { pub(in crate) async fn load<P: AsRef<Path>>( path: P, resource_manager: ResourceManager, ) -> Result<Self, ModelLoadError> { let extension = path .as_ref() .extension() .unwrap_or_default() .to_string_lossy() .as_ref() .to_lowercase(); let (scene, mapping) = match extension.as_ref() { "fbx" => { let mut scene = Scene::new(); if let Some(filename) = path.as_ref().file_name() { let root = scene.graph.get_root(); scene.graph[root].set_name(filename.to_string_lossy().to_string()); } fbx::load_to_scene(&mut scene, resource_manager, path.as_ref())?; (scene, NodeMapping::UseNames) } "rgs" => ( Scene::from_file(path.as_ref(), resource_manager).await?, NodeMapping::UseHandles, ), _ => { return Err(ModelLoadError::NotSupported(format!( "Unsupported model resource format: {}", extension ))) } }; Ok(Self { path: path.as_ref().to_owned(), scene, mapping, }) } pub fn get_scene(&self) -> &Scene { &self.scene } pub fn find_node_by_name(&self, name: &str) -> Handle<Node> { self.scene.graph.find_by_name_from_root(name) } }
#![warn(missing_docs)] use crate::{ animation::Animation, core::{ pool::Handle, visitor::{Visit, VisitError, VisitResult, Visitor}, }, engine::resource_manager::ResourceManager, resource::{ fbx::{self, error::FbxError}, Resource, ResourceData, }, scene::{node::Node, Scene}, utils::log::{Log, MessageKind}, }; use std::{ borrow::Cow, path::{Path, PathBuf}, }; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[repr(u32)] pub(in crate) enum NodeMapping { UseNames = 0, UseHandles = 1, } #[derive(Debug)] pub struct ModelData { pub(in crate) path: PathBuf, pub(in crate) mapping: NodeMapping, scene: Scene, } pub type Model = Resource<ModelData, ModelLoadError>; impl Model { pub fn instantiate_geometry(&self, dest_scene: &mut Scene) -> Handle<Node> { let d
xtension = path .as_ref() .extension() .unwrap_or_default() .to_string_lossy() .as_ref() .to_lowercase(); let (scene, mapping) = match extension.as_ref() { "fbx" => { let mut scene = Scene::new(); if let Some(filename) = path.as_ref().file_name() { let root = scene.graph.get_root(); scene.graph[root].set_name(filename.to_string_lossy().to_string()); } fbx::load_to_scene(&mut scene, resource_manager, path.as_ref())?; (scene, NodeMapping::UseNames) } "rgs" => ( Scene::from_file(path.as_ref(), resource_manager).await?, NodeMapping::UseHandles, ), _ => { return Err(ModelLoadError::NotSupported(format!( "Unsupported model resource format: {}", extension ))) } }; Ok(Self { path: path.as_ref().to_owned(), scene, mapping, }) } pub fn get_scene(&self) -> &Scene { &self.scene } pub fn find_node_by_name(&self, name: &str) -> Handle<Node> { self.scene.graph.find_by_name_from_root(name) } }
ata = self.data_ref(); let (root, old_to_new) = data.scene.graph.copy_node( data.scene.graph.get_root(), &mut dest_scene.graph, &mut |_, _| true, ); dest_scene.graph[root].is_resource_instance_root = true; let mut stack = vec![root]; while let Some(node_handle) = stack.pop() { let node = &mut dest_scene.graph[node_handle]; node.resource = Some(self.clone()); stack.extend_from_slice(node.children()); } for navmesh in data.scene.navmeshes.iter() { dest_scene.navmeshes.add(navmesh.clone()); } std::mem::drop(data); dest_scene.physics.embed_resource( &mut dest_scene.physics_binder, &dest_scene.graph, old_to_new, self.clone(), ); root } pub fn instantiate(&self, dest_scene: &mut Scene) -> ModelInstance { let root = self.instantiate_geometry(dest_scene); ModelInstance { root, animations: self.retarget_animations(root, dest_scene), } } pub fn retarget_animations( &self, root: Handle<Node>, dest_scene: &mut Scene, ) -> Vec<Handle<Animation>> { let data = self.data_ref(); let mut animation_handles = Vec::new(); for ref_anim in data.scene.animations.iter() { let mut anim_copy = ref_anim.clone(); anim_copy.resource = Some(self.clone()); for (i, ref_track) in ref_anim.get_tracks().iter().enumerate() { let ref_node = &data.scene.graph[ref_track.get_node()]; let instance_node = dest_scene.graph.find_by_name(root, ref_node.name()); if instance_node.is_none() { Log::writeln( MessageKind::Error, format!( "Failed to retarget animation {:?} for node {}", data.path(), ref_node.name() ), ); } anim_copy.get_tracks_mut()[i].set_node(instance_node); } animation_handles.push(dest_scene.animations.add(anim_copy)); } animation_handles } } impl ResourceData for ModelData { fn path(&self) -> Cow<Path> { Cow::Borrowed(&self.path) } } impl Default for ModelData { fn default() -> Self { Self { path: PathBuf::new(), mapping: NodeMapping::UseNames, scene: Scene::new(), } } } impl Visit for ModelData { fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult { visitor.enter_region(name)?; self.path.visit("Path", visitor)?; visitor.leave_region() } } pub struct ModelInstance { pub root: Handle<Node>, pub animations: Vec<Handle<Animation>>, } #[derive(Debug)] pub enum ModelLoadError { Visit(VisitError), NotSupported(String), Fbx(FbxError), } impl From<FbxError> for ModelLoadError { fn from(fbx: FbxError) -> Self { ModelLoadError::Fbx(fbx) } } impl From<VisitError> for ModelLoadError { fn from(e: VisitError) -> Self { ModelLoadError::Visit(e) } } impl ModelData { pub(in crate) async fn load<P: AsRef<Path>>( path: P, resource_manager: ResourceManager, ) -> Result<Self, ModelLoadError> { let e
random
[ { "content": "/// Tries to load and convert FBX from given path.\n\n///\n\n/// Normally you should never use this method, use resource manager to load models.\n\npub fn load_to_scene<P: AsRef<Path>>(\n\n scene: &mut Scene,\n\n resource_manager: ResourceManager,\n\n path: P,\n\n) -> Result<(), FbxError> {\n\n let start_time = Instant::now();\n\n\n\n Log::writeln(\n\n MessageKind::Information,\n\n format!(\"Trying to load {:?}\", path.as_ref()),\n\n );\n\n\n\n let now = Instant::now();\n\n let fbx = FbxDocument::new(path.as_ref())?;\n\n let parsing_time = now.elapsed().as_millis();\n\n\n\n let now = Instant::now();\n\n let fbx_scene = FbxScene::new(&fbx)?;\n\n let dom_prepare_time = now.elapsed().as_millis();\n\n\n", "file_path": "src/resource/fbx/mod.rs", "rank": 0, "score": 239598.46693377758 }, { "content": "#[cfg(not(feature = \"enable_profiler\"))]\n\npub fn print_hot_path() {\n\n println!(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\")\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 1, "score": 213922.89836919578 }, { "content": "/// Utility function that replaces back slashes \\ to forward /\n\n/// It replaces slashes only on windows!\n\npub fn replace_slashes<P: AsRef<Path>>(path: P) -> PathBuf {\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n if path.as_ref().is_absolute() {\n\n // Absolute Windows paths are incompatible with other operating systems so\n\n // don't bother here and return existing path as owned.\n\n path.as_ref().to_owned()\n\n } else {\n\n // Replace all \\ to /. This is needed because on macos or linux \\ is a valid symbol in\n\n // file name, and not separator (except linux which understand both variants).\n\n let mut os_str = std::ffi::OsString::new();\n\n let count = path.as_ref().components().count();\n\n for (i, component) in path.as_ref().components().enumerate() {\n\n os_str.push(component.as_os_str());\n\n if i != count - 1 {\n\n os_str.push(\"/\");\n\n }\n\n }\n\n PathBuf::from(os_str)\n\n }\n", "file_path": "rg3d-core/src/lib.rs", "rank": 2, "score": 208940.62257705006 }, { "content": "pub trait MessageData: 'static + Debug + Clone + PartialEq {}\n\n\n\n/// Message is basic communication element that is used to deliver information to UI nodes\n\n/// or to user code.\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct UiMessage<M: MessageData, C: Control<M, C>> {\n\n /// Useful flag to check if a message was already handled.\n\n handled: Cell<bool>,\n\n\n\n /// Actual message data. Use pattern matching to get type specific data.\n\n data: UiMessageData<M, C>,\n\n\n\n /// Handle of node that will receive message. Please note that all nodes in hierarchy will\n\n /// also receive this message, order is \"up-on-tree\".\n\n destination: Handle<UINode<M, C>>,\n\n\n\n /// Indicates the direction of the message.\n\n ///\n\n /// See [MessageDirection](enum.MessageDirection.html) for details.\n\n direction: MessageDirection,\n", "file_path": "rg3d-ui/src/message.rs", "rank": 3, "score": 196785.9945776297 }, { "content": "pub fn create_scene_async(resource_manager: ResourceManager) -> Arc<Mutex<SceneLoadContext>> {\n\n // Create load context - it will be shared with caller and loader threads.\n\n let context = Arc::new(Mutex::new(SceneLoadContext {\n\n scene_data: None,\n\n message: \"Starting..\".to_string(),\n\n progress: 0.0,\n\n }));\n\n let result = context.clone();\n\n\n\n // Spawn separate thread which will create scene by loading various assets.\n\n std::thread::spawn(move || {\n\n futures::executor::block_on(async move {\n\n let mut scene = Scene::new();\n\n\n\n // Create reverb effect for more natural sound - our player walks in some sort of cathedral,\n\n // so there will be pretty decent echo.\n\n let mut base_effect = BaseEffect::default();\n\n // Make sure it won't be too loud - rg3d-sound doesn't care about energy conservation law, it\n\n // just makes requested calculation.\n\n base_effect.set_gain(0.7);\n", "file_path": "examples/shared/mod.rs", "rank": 4, "score": 184637.6830728355 }, { "content": "pub fn wrapf(mut n: f32, mut min_limit: f32, mut max_limit: f32) -> f32 {\n\n if n >= min_limit && n <= max_limit {\n\n return n;\n\n }\n\n\n\n if max_limit == 0.0 && min_limit == 0.0 {\n\n return 0.0;\n\n }\n\n\n\n max_limit -= min_limit;\n\n\n\n let offset = min_limit;\n\n min_limit = 0.0;\n\n n -= offset;\n\n\n\n let num_of_max = (n / max_limit).abs().floor();\n\n\n\n if n >= max_limit {\n\n n -= num_of_max * max_limit;\n\n } else if n < min_limit {\n\n n += (num_of_max + 1.0) * max_limit;\n\n }\n\n\n\n n + offset\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 5, "score": 181804.2041155096 }, { "content": "#[cfg(not(feature = \"enable_profiler\"))]\n\npub fn print() {\n\n println!(\"Performance profiling results are not available, because feature 'enable_profiler' wasn't defined!\")\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 6, "score": 179698.25507509612 }, { "content": "pub fn make_vec3_container<P: AsRef<str>>(\n\n nodes: &FbxNodeContainer,\n\n container_node: Handle<FbxNode>,\n\n data_name: P,\n\n) -> Result<FbxContainer<Vector3<f32>>, FbxError> {\n\n FbxContainer::new(nodes, container_node, data_name, |attributes| {\n\n let mut normals = Vec::with_capacity(attributes.len() / 3);\n\n for normal in attributes.chunks_exact(3) {\n\n normals.push(Vector3::new(\n\n normal[0].as_f32()?,\n\n normal[1].as_f32()?,\n\n normal[2].as_f32()?,\n\n ));\n\n }\n\n Ok(normals)\n\n })\n\n}\n", "file_path": "src/resource/fbx/scene/mod.rs", "rank": 7, "score": 176965.67999311662 }, { "content": "#[inline]\n\npub fn type_name_of<T>(_: T) -> &'static str {\n\n std::any::type_name::<T>()\n\n}\n\n\n\n#[cfg(feature = \"enable_profiler\")]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {\n\n let function_name = {\n\n fn scope() {}\n\n $crate::profiler::type_name_of(scope)\n\n };\n\n let _scope_guard = $crate::profiler::ScopeDefinition::new(function_name, line!());\n\n };\n\n}\n\n\n\n#[cfg(not(feature = \"enable_profiler\"))]\n\n#[macro_export]\n\nmacro_rules! scope_profile {\n\n () => {};\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 8, "score": 176960.88367800374 }, { "content": "fn arc_to_raw<T>(arc: Arc<T>) -> *mut T {\n\n &*arc as *const T as *mut T\n\n}\n\n\n", "file_path": "rg3d-core/src/visitor.rs", "rank": 9, "score": 172356.80836512952 }, { "content": "fn rc_to_raw<T>(rc: Rc<T>) -> *mut T {\n\n &*rc as *const T as *mut T\n\n}\n\n\n\nimpl<T> Visit for Arc<T>\n\nwhere\n\n T: Default + Visit + Send + Sync + 'static,\n\n{\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult {\n\n visitor.enter_region(name)?;\n\n\n\n if visitor.reading {\n\n let mut raw = 0u64;\n\n raw.visit(\"Id\", visitor)?;\n\n if raw == 0 {\n\n return Err(VisitError::UnexpectedRcNullIndex);\n\n }\n\n if let Some(ptr) = visitor.arc_map.get(&raw) {\n\n if let Ok(res) = Arc::downcast::<T>(ptr.clone()) {\n\n *self = res;\n", "file_path": "rg3d-core/src/visitor.rs", "rank": 10, "score": 172356.80836512952 }, { "content": "pub fn barycentric_to_world(\n\n bary: (f32, f32, f32),\n\n pa: Vector3<f32>,\n\n pb: Vector3<f32>,\n\n pc: Vector3<f32>,\n\n) -> Vector3<f32> {\n\n pa.scale(bary.0) + pb.scale(bary.1) + pc.scale(bary.2)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 11, "score": 170394.6964902355 }, { "content": "///\n\n/// Triangulates specified polygon.\n\n///\n\npub fn triangulate(vertices: &[Vector3<f32>], out_triangles: &mut Vec<[usize; 3]>) {\n\n out_triangles.clear();\n\n if vertices.len() == 3 {\n\n // Triangulating a triangle?\n\n out_triangles.push([0, 1, 2]);\n\n } else if vertices.len() == 4 {\n\n // Special case for quadrilaterals (much faster than generic)\n\n let mut start_vertex = 0;\n\n for i in 0..4 {\n\n let v = vertices[i];\n\n let v0 = vertices[(i + 3) % 4];\n\n if let Some(left) = (v0 - v).try_normalize(std::f32::EPSILON) {\n\n let v1 = vertices[(i + 2) % 4];\n\n if let Some(diag) = (v1 - v).try_normalize(std::f32::EPSILON) {\n\n let v2 = vertices[(i + 1) % 4];\n\n if let Some(right) = (v2 - v).try_normalize(std::f32::EPSILON) {\n\n // Check for concave vertex\n\n let angle = left.dot(&diag).acos() + right.dot(&diag).acos();\n\n if angle > std::f32::consts::PI {\n\n start_vertex = i;\n", "file_path": "rg3d-core/src/math/triangulator.rs", "rank": 12, "score": 166563.7270581963 }, { "content": "pub fn get_barycentric_coords(\n\n p: &Vector3<f32>,\n\n a: &Vector3<f32>,\n\n b: &Vector3<f32>,\n\n c: &Vector3<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = *b - *a;\n\n let v1 = *c - *a;\n\n let v2 = *p - *a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let denom = d00 * d11 - d01.powi(2);\n\n\n\n let v = (d11 * d20 - d01 * d21) / denom;\n\n let w = (d00 * d21 - d01 * d20) / denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 13, "score": 166193.8240626658 }, { "content": "pub fn get_barycentric_coords_2d(\n\n p: Vector2<f32>,\n\n a: Vector2<f32>,\n\n b: Vector2<f32>,\n\n c: Vector2<f32>,\n\n) -> (f32, f32, f32) {\n\n let v0 = b - a;\n\n let v1 = c - a;\n\n let v2 = p - a;\n\n\n\n let d00 = v0.dot(&v0);\n\n let d01 = v0.dot(&v1);\n\n let d11 = v1.dot(&v1);\n\n let d20 = v2.dot(&v0);\n\n let d21 = v2.dot(&v1);\n\n let inv_denom = 1.0 / (d00 * d11 - d01.powi(2));\n\n\n\n let v = (d11 * d20 - d01 * d21) * inv_denom;\n\n let w = (d00 * d21 - d01 * d20) * inv_denom;\n\n let u = 1.0 - v - w;\n\n\n\n (u, v, w)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 14, "score": 166193.8240626658 }, { "content": "pub fn ray_rect_intersection(\n\n rect: Rect<f32>,\n\n origin: Vector2<f32>,\n\n dir: Vector2<f32>,\n\n) -> Option<IntersectionResult> {\n\n let min = rect.left_top_corner();\n\n let max = rect.right_bottom_corner();\n\n\n\n let (mut tmin, mut tmax) = if dir.x >= 0.0 {\n\n ((min.x - origin.x) / dir.x, (max.x - origin.x) / dir.x)\n\n } else {\n\n ((max.x - origin.x) / dir.x, (min.x - origin.x) / dir.x)\n\n };\n\n\n\n let (tymin, tymax) = if dir.y >= 0.0 {\n\n ((min.y - origin.y) / dir.y, (max.y - origin.y) / dir.y)\n\n } else {\n\n ((max.y - origin.y) / dir.y, (min.y - origin.y) / dir.y)\n\n };\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 15, "score": 166193.8240626658 }, { "content": "pub fn vec3_to_vec2_by_plane(\n\n plane_class: PlaneClass,\n\n normal: Vector3<f32>,\n\n point: Vector3<f32>,\n\n) -> Vector2<f32> {\n\n match plane_class {\n\n PlaneClass::XY => {\n\n if normal.z < 0.0 {\n\n Vector2::new(point.y, point.x)\n\n } else {\n\n Vector2::new(point.x, point.y)\n\n }\n\n }\n\n PlaneClass::XZ => {\n\n if normal.y < 0.0 {\n\n Vector2::new(point.x, point.z)\n\n } else {\n\n Vector2::new(point.z, point.x)\n\n }\n\n }\n\n PlaneClass::YZ => {\n\n if normal.x < 0.0 {\n\n Vector2::new(point.z, point.y)\n\n } else {\n\n Vector2::new(point.y, point.z)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 16, "score": 166193.8240626658 }, { "content": "pub fn is_point_inside_2d_triangle(\n\n point: Vector2<f32>,\n\n pt_a: Vector2<f32>,\n\n pt_b: Vector2<f32>,\n\n pt_c: Vector2<f32>,\n\n) -> bool {\n\n let ba = pt_b - pt_a;\n\n let ca = pt_c - pt_a;\n\n\n\n let vp = point - pt_a;\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot_02 = ca.dot(&vp);\n\n let dot_12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot_02 - ca_dot_ba * dot_12) * inv_denom;\n\n let v = (ca_dot_ca * dot_12 - ca_dot_ba * dot_02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 17, "score": 166193.8240626658 }, { "content": "fn convert_model(\n\n fbx_scene: &FbxScene,\n\n model: &FbxModel,\n\n resource_manager: ResourceManager,\n\n graph: &mut Graph,\n\n animations: &mut AnimationContainer,\n\n animation_handle: Handle<Animation>,\n\n) -> Result<Handle<Node>, FbxError> {\n\n let base = convert_model_to_base(model);\n\n\n\n // Create node with correct kind.\n\n let node_handle = if !model.geoms.is_empty() {\n\n convert_mesh(base, fbx_scene, resource_manager, model, graph)?\n\n } else if model.light.is_some() {\n\n fbx_scene.get(model.light).as_light()?.convert(base, graph)\n\n } else {\n\n base.build(graph)\n\n };\n\n\n\n // Convert animations\n", "file_path": "src/resource/fbx/mod.rs", "rank": 18, "score": 165258.8383677839 }, { "content": "fn convert_model_to_base(model: &FbxModel) -> BaseBuilder {\n\n BaseBuilder::new()\n\n .with_inv_bind_pose_transform(model.inv_bind_transform)\n\n .with_name(model.name.as_str())\n\n .with_local_transform(\n\n TransformBuilder::new()\n\n .with_local_rotation(quat_from_euler(model.rotation))\n\n .with_local_scale(model.scale)\n\n .with_local_position(model.translation)\n\n .with_post_rotation(quat_from_euler(model.post_rotation))\n\n .with_pre_rotation(quat_from_euler(model.pre_rotation))\n\n .with_rotation_offset(model.rotation_offset)\n\n .with_rotation_pivot(model.rotation_pivot)\n\n .with_scaling_offset(model.scaling_offset)\n\n .with_scaling_pivot(model.scaling_pivot)\n\n .build(),\n\n )\n\n}\n\n\n", "file_path": "src/resource/fbx/mod.rs", "rank": 19, "score": 165015.52543294433 }, { "content": "/// A trait for resource data.\n\npub trait ResourceData: 'static + Default + Debug + Visit + Send {\n\n /// Returns path of resource data.\n\n fn path(&self) -> Cow<Path>;\n\n}\n\n\n", "file_path": "src/resource/mod.rs", "rank": 20, "score": 161771.4287768079 }, { "content": "/// Performs hashing of a sized value by interpreting it as raw memory.\n\npub fn hash_as_bytes<T: Sized, H: Hasher>(value: &T, hasher: &mut H) {\n\n unsafe {\n\n hasher.write(std::slice::from_raw_parts(\n\n value as *const T as *const u8,\n\n std::mem::size_of::<T>(),\n\n ))\n\n }\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 21, "score": 161297.23821385356 }, { "content": "fn read_tangents(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Option<FbxContainer<Vector3<f32>>>, FbxError> {\n\n if let Ok(layer_element_tangent) = nodes.find(geom_node_handle, \"LayerElementTangent\") {\n\n Ok(Some(scene::make_vec3_container(\n\n nodes,\n\n layer_element_tangent,\n\n \"Tangents\",\n\n )?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 22, "score": 159837.96518402782 }, { "content": "fn read_normals(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Option<FbxContainer<Vector3<f32>>>, FbxError> {\n\n if let Ok(layer_element_normal) = nodes.find(geom_node_handle, \"LayerElementNormal\") {\n\n Ok(Some(scene::make_vec3_container(\n\n nodes,\n\n layer_element_normal,\n\n \"Normals\",\n\n )?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 23, "score": 159837.96518402782 }, { "content": "fn read_binormals(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Option<FbxContainer<Vector3<f32>>>, FbxError> {\n\n if let Ok(layer_element_tangent) = nodes.find(geom_node_handle, \"LayerElementBinormal\") {\n\n Ok(Some(scene::make_vec3_container(\n\n nodes,\n\n layer_element_tangent,\n\n \"Binormals\",\n\n )?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 24, "score": 159837.96518402782 }, { "content": "fn read_uvs(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Option<FbxContainer<Vector2<f32>>>, FbxError> {\n\n if let Ok(layer_element_uv) = nodes.find(geom_node_handle, \"LayerElementUV\") {\n\n Ok(Some(FbxContainer::new(\n\n nodes,\n\n layer_element_uv,\n\n \"UV\",\n\n |attributes| {\n\n let mut uvs = Vec::with_capacity(attributes.len() / 2);\n\n for uv in attributes.chunks_exact(2) {\n\n uvs.push(Vector2::new(uv[0].as_f32()?, uv[1].as_f32()?));\n\n }\n\n Ok(uvs)\n\n },\n\n )?))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 25, "score": 159837.96518402782 }, { "content": "fn read_vertices(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Vec<Vector3<f32>>, FbxError> {\n\n let vertices_node_handle = nodes.find(geom_node_handle, \"Vertices\")?;\n\n let vertices_array_node = nodes.get_by_name(vertices_node_handle, \"a\")?;\n\n let mut vertices = Vec::with_capacity(vertices_array_node.attrib_count() / 3);\n\n for vertex in vertices_array_node.attributes().chunks_exact(3) {\n\n vertices.push(Vector3::new(\n\n vertex[0].as_f32()?,\n\n vertex[1].as_f32()?,\n\n vertex[2].as_f32()?,\n\n ));\n\n }\n\n\n\n Ok(vertices)\n\n}\n\n\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 26, "score": 159837.96518402782 }, { "content": "fn read_indices(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Vec<i32>, FbxError> {\n\n let indices_node_handle = nodes.find(geom_node_handle, \"PolygonVertexIndex\")?;\n\n let indices_array_node = nodes.get_by_name(indices_node_handle, \"a\")?;\n\n let mut indices = Vec::with_capacity(indices_array_node.attrib_count());\n\n for index in indices_array_node.attributes() {\n\n indices.push(index.as_i32()?);\n\n }\n\n Ok(indices)\n\n}\n\n\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 27, "score": 159837.96518402782 }, { "content": "fn read_materials(\n\n geom_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n) -> Result<Option<FbxContainer<i32>>, FbxError> {\n\n if let Ok(layer_element_material_node_handle) =\n\n nodes.find(geom_node_handle, \"LayerElementMaterial\")\n\n {\n\n Ok(Some(FbxContainer::new(\n\n nodes,\n\n layer_element_material_node_handle,\n\n \"Materials\",\n\n |attributes| {\n\n let mut materials = Vec::with_capacity(attributes.len());\n\n for attribute in attributes {\n\n materials.push(attribute.as_i32()?);\n\n }\n\n Ok(materials)\n\n },\n\n )?))\n\n } else {\n", "file_path": "src/resource/fbx/scene/geometry.rs", "rank": 28, "score": 159837.96518402782 }, { "content": "pub trait Visit {\n\n fn visit(&mut self, name: &str, visitor: &mut Visitor) -> VisitResult;\n\n}\n\n\n\nimpl Default for Visitor {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Visitor {\n\n const MAGIC: &'static str = \"RG3D\";\n\n\n\n pub fn new() -> Self {\n\n let mut nodes = Pool::new();\n\n let root = nodes.spawn(Node::new(\"__ROOT__\", Handle::NONE));\n\n Self {\n\n nodes,\n\n rc_map: HashMap::new(),\n\n arc_map: HashMap::new(),\n", "file_path": "rg3d-core/src/visitor.rs", "rank": 29, "score": 159697.12956729977 }, { "content": "fn calculate_hash<T: Hash>(t: &T) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n s.finish()\n\n}\n\n\n\nimpl Profiler {\n\n fn enter_scope(&mut self, scope: &mut ScopeMark) {\n\n let parent_scope_mark = *self.scope_stack.last().unwrap();\n\n scope.parent_scope_hash = calculate_hash(&parent_scope_mark);\n\n self.scope_stack.push(*scope);\n\n self.samples.entry(*scope).or_default();\n\n self.samples\n\n .get_mut(&parent_scope_mark)\n\n .unwrap()\n\n .children\n\n .insert(*scope);\n\n }\n\n\n\n fn leave_scope(&mut self, scope: ScopeMark, elapsed: f64) {\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 30, "score": 157810.07257685097 }, { "content": "pub fn read_binary<R>(file: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let total_length = file.seek(SeekFrom::End(0))?;\n\n file.seek(SeekFrom::Start(0))?;\n\n\n\n // Ignore all stuff until version.\n\n let mut temp = [0; 23];\n\n file.read_exact(&mut temp)?;\n\n\n\n // Verify version.\n\n let version = file.read_u32::<LittleEndian>()? as i32;\n\n\n\n // Anything else should be supported.\n\n if version < 7100 {\n\n return Err(FbxError::UnsupportedVersion(version));\n\n }\n\n\n\n let mut nodes = Pool::new();\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 31, "score": 157388.47615593363 }, { "content": "pub fn read_ascii<R>(reader: &mut R) -> Result<FbxDocument, FbxError>\n\nwhere\n\n R: Read + Seek,\n\n{\n\n let mut nodes: Pool<FbxNode> = Pool::new();\n\n let root_handle = nodes.spawn(FbxNode {\n\n name: String::from(\"__ROOT__\"),\n\n children: Vec::new(),\n\n parent: Handle::NONE,\n\n attributes: Vec::new(),\n\n });\n\n let mut parent_handle: Handle<FbxNode> = root_handle;\n\n let mut node_handle: Handle<FbxNode> = Handle::NONE;\n\n let mut buffer: Vec<u8> = Vec::new();\n\n let mut name: Vec<u8> = Vec::new();\n\n let mut value: Vec<u8> = Vec::new();\n\n\n\n let buf_len = reader.seek(SeekFrom::End(0))?;\n\n reader.seek(SeekFrom::Start(0))?;\n\n\n", "file_path": "src/resource/fbx/document/ascii.rs", "rank": 32, "score": 157388.47615593363 }, { "content": "fn map_texture(tex: Option<Texture>, rm: ResourceManager) -> Option<Texture> {\n\n if let Some(shallow_texture) = tex {\n\n let shallow_texture = shallow_texture.state();\n\n Some(rm.request_texture(shallow_texture.path()))\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n/// A structure that holds times that specific update step took.\n\n#[derive(Copy, Clone, Default, Debug)]\n\npub struct PerformanceStatistics {\n\n /// A time (in seconds) which was required to update physics.\n\n pub physics_time: f32,\n\n\n\n /// A time (in seconds) which was required to update graph.\n\n pub graph_update_time: f32,\n\n\n\n /// A time (in seconds) which was required to update animations.\n\n pub animations_update_time: f32,\n", "file_path": "src/scene/mod.rs", "rank": 33, "score": 156988.64058102074 }, { "content": "#[allow(dead_code)]\n\npub fn load_with<F>(mut loadfn: F)\n\nwhere\n\n F: FnMut(&'static str) -> *const __gl_imports::raw::c_void,\n\n{\n\n ActiveShaderProgram::load_with(&mut loadfn);\n\n ActiveTexture::load_with(&mut loadfn);\n\n AttachShader::load_with(&mut loadfn);\n\n BeginConditionalRender::load_with(&mut loadfn);\n\n BeginQuery::load_with(&mut loadfn);\n\n BeginQueryIndexed::load_with(&mut loadfn);\n\n BeginTransformFeedback::load_with(&mut loadfn);\n\n BindAttribLocation::load_with(&mut loadfn);\n\n BindBuffer::load_with(&mut loadfn);\n\n BindBufferBase::load_with(&mut loadfn);\n\n BindBufferRange::load_with(&mut loadfn);\n\n BindBuffersBase::load_with(&mut loadfn);\n\n BindBuffersRange::load_with(&mut loadfn);\n\n BindFragDataLocation::load_with(&mut loadfn);\n\n BindFragDataLocationIndexed::load_with(&mut loadfn);\n\n BindFramebuffer::load_with(&mut loadfn);\n", "file_path": "src/renderer/framework/gl.rs", "rank": 34, "score": 154451.3707547838 }, { "content": "/// A trait for resource load error.\n\npub trait ResourceLoadError: 'static + Debug + Send + Sync {}\n\n\n\nimpl<T> ResourceLoadError for T where T: 'static + Debug + Send + Sync {}\n\n\n\n/// Resource could be in three possible states:\n\n/// 1. Pending - it is loading.\n\n/// 2. LoadError - an error has occurred during the load.\n\n/// 3. Ok - resource is fully loaded and ready to use.\n\n///\n\n/// Why it is so complex?\n\n/// Short answer: asynchronous loading.\n\n/// Long answer: when you loading a scene you expect it to be loaded as fast as\n\n/// possible, use all available power of the CPU. To achieve that each resource\n\n/// ideally should be loaded on separate core of the CPU, but since this is\n\n/// asynchronous, we must have the ability to track the state of the resource. \n\n#[derive(Debug)]\n\npub enum ResourceState<T: ResourceData, E: ResourceLoadError> {\n\n /// Resource is loading from external resource or in the queue to load.\n\n Pending {\n\n /// A path to load resource from.\n", "file_path": "src/resource/mod.rs", "rank": 35, "score": 154344.53812860537 }, { "content": "fn link_child_with_parent_component(\n\n parent: &mut FbxComponent,\n\n child: &mut FbxComponent,\n\n child_handle: Handle<FbxComponent>,\n\n property: String,\n\n) {\n\n match parent {\n\n // Link model with other components\n\n FbxComponent::Model(model) => match child {\n\n FbxComponent::Geometry(_) => model.geoms.push(child_handle),\n\n FbxComponent::Material(_) => model.materials.push(child_handle),\n\n FbxComponent::AnimationCurveNode(_) => model.animation_curve_nodes.push(child_handle),\n\n FbxComponent::Light(_) => model.light = child_handle,\n\n FbxComponent::Model(_) => model.children.push(child_handle),\n\n _ => (),\n\n },\n\n // Link material with textures\n\n FbxComponent::Material(material) => {\n\n if let FbxComponent::Texture(_) = child {\n\n material.textures.push((property, child_handle));\n", "file_path": "src/resource/fbx/scene/mod.rs", "rank": 36, "score": 151176.3846703524 }, { "content": "fn create_scene_async(resource_manager: ResourceManager) -> Arc<Mutex<SceneLoadContext>> {\n\n // Create load context - it will be shared with caller and loader threads.\n\n let context = Arc::new(Mutex::new(SceneLoadContext {\n\n data: None,\n\n message: \"Starting..\".to_string(),\n\n progress: 0.0,\n\n }));\n\n let result = context.clone();\n\n\n\n // Spawn separate thread which will create scene by loading various assets.\n\n std::thread::spawn(move || {\n\n futures::executor::block_on(async move {\n\n let mut scene = Scene::new();\n\n\n\n // It is important to lock context for short period of time so other thread can\n\n // read data from it as soon as possible - not when everything was loaded.\n\n context\n\n .lock()\n\n .unwrap()\n\n .report_progress(0.0, \"Creating camera...\");\n", "file_path": "examples/async.rs", "rank": 37, "score": 147498.11183376238 }, { "content": "/// Translates window mouse button into rg3d-ui mouse button.\n\npub fn translate_button(button: crate::event::MouseButton) -> crate::gui::message::MouseButton {\n\n match button {\n\n crate::event::MouseButton::Left => crate::gui::message::MouseButton::Left,\n\n crate::event::MouseButton::Right => crate::gui::message::MouseButton::Right,\n\n crate::event::MouseButton::Middle => crate::gui::message::MouseButton::Middle,\n\n crate::event::MouseButton::Other(i) => crate::gui::message::MouseButton::Other(i),\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 38, "score": 145540.02840533736 }, { "content": "use crate::core::algebra::{Matrix4, Vector3};\n\nuse crate::{\n\n core::pool::Handle,\n\n resource::fbx::{\n\n document::{FbxNode, FbxNodeContainer},\n\n scene::FbxComponent,\n\n },\n\n};\n\n\n\npub struct FbxModel {\n\n pub name: String,\n\n pub pre_rotation: Vector3<f32>,\n\n pub post_rotation: Vector3<f32>,\n\n pub rotation_offset: Vector3<f32>,\n\n pub rotation_pivot: Vector3<f32>,\n\n pub scaling_offset: Vector3<f32>,\n\n pub scaling_pivot: Vector3<f32>,\n\n pub rotation: Vector3<f32>,\n\n pub scale: Vector3<f32>,\n\n pub translation: Vector3<f32>,\n", "file_path": "src/resource/fbx/scene/model.rs", "rank": 39, "score": 144201.41220919508 }, { "content": " pub geometric_translation: Vector3<f32>,\n\n pub geometric_rotation: Vector3<f32>,\n\n pub geometric_scale: Vector3<f32>,\n\n pub inv_bind_transform: Matrix4<f32>,\n\n pub geoms: Vec<Handle<FbxComponent>>,\n\n /// List of handles of materials\n\n pub materials: Vec<Handle<FbxComponent>>,\n\n /// List of handles of animation curve nodes\n\n pub animation_curve_nodes: Vec<Handle<FbxComponent>>,\n\n /// List of handles of children models\n\n pub children: Vec<Handle<FbxComponent>>,\n\n /// Handle to light component\n\n pub light: Handle<FbxComponent>,\n\n}\n\n\n\nimpl FbxModel {\n\n pub fn read(\n\n model_node_handle: Handle<FbxNode>,\n\n nodes: &FbxNodeContainer,\n\n ) -> Result<FbxModel, String> {\n", "file_path": "src/resource/fbx/scene/model.rs", "rank": 40, "score": 144183.35573696505 }, { "content": " let mut name = String::from(\"Unnamed\");\n\n\n\n let model_node = nodes.get(model_node_handle);\n\n if let Ok(name_attrib) = model_node.get_attrib(1) {\n\n name = name_attrib.as_string();\n\n }\n\n\n\n // Remove prefix\n\n if name.starts_with(\"Model::\") {\n\n name = name.chars().skip(7).collect();\n\n }\n\n\n\n let mut model = FbxModel {\n\n name,\n\n pre_rotation: Vector3::default(),\n\n post_rotation: Vector3::default(),\n\n rotation_offset: Vector3::default(),\n\n rotation_pivot: Vector3::default(),\n\n scaling_offset: Vector3::default(),\n\n scaling_pivot: Vector3::default(),\n", "file_path": "src/resource/fbx/scene/model.rs", "rank": 41, "score": 144179.0840744139 }, { "content": " \"Lcl Translation\" => model.translation = property_node.get_vec3_at(4)?,\n\n \"Lcl Rotation\" => model.rotation = property_node.get_vec3_at(4)?,\n\n \"Lcl Scaling\" => model.scale = property_node.get_vec3_at(4)?,\n\n \"PreRotation\" => model.pre_rotation = property_node.get_vec3_at(4)?,\n\n \"PostRotation\" => model.post_rotation = property_node.get_vec3_at(4)?,\n\n \"RotationOffset\" => model.rotation_offset = property_node.get_vec3_at(4)?,\n\n \"RotationPivot\" => model.rotation_pivot = property_node.get_vec3_at(4)?,\n\n \"ScalingOffset\" => model.scaling_offset = property_node.get_vec3_at(4)?,\n\n \"ScalingPivot\" => model.scaling_pivot = property_node.get_vec3_at(4)?,\n\n \"GeometricTranslation\" => {\n\n model.geometric_translation = property_node.get_vec3_at(4)?\n\n }\n\n \"GeometricScaling\" => model.geometric_scale = property_node.get_vec3_at(4)?,\n\n \"GeometricRotation\" => model.geometric_rotation = property_node.get_vec3_at(4)?,\n\n _ => (), // Unused properties\n\n }\n\n }\n\n Ok(model)\n\n }\n\n}\n", "file_path": "src/resource/fbx/scene/model.rs", "rank": 42, "score": 144174.32934067943 }, { "content": " rotation: Vector3::default(),\n\n scale: Vector3::new(1.0, 1.0, 1.0),\n\n translation: Vector3::default(),\n\n geometric_translation: Vector3::default(),\n\n geometric_rotation: Vector3::default(),\n\n geometric_scale: Vector3::new(1.0, 1.0, 1.0),\n\n inv_bind_transform: Matrix4::identity(),\n\n geoms: Vec::new(),\n\n materials: Vec::new(),\n\n animation_curve_nodes: Vec::new(),\n\n children: Vec::new(),\n\n light: Handle::NONE,\n\n };\n\n\n\n let properties70_node_handle = nodes.find(model_node_handle, \"Properties70\")?;\n\n let properties70_node = nodes.get(properties70_node_handle);\n\n for property_handle in properties70_node.children() {\n\n let property_node = nodes.get(*property_handle);\n\n let name_attrib = property_node.get_attrib(0)?;\n\n match name_attrib.as_string().as_str() {\n", "file_path": "src/resource/fbx/scene/model.rs", "rank": 43, "score": 144170.56213039398 }, { "content": "pub fn fix_shadows_distance(mut quality: QualitySettings) -> QualitySettings {\n\n // Scale distance because game world has different scale.\n\n quality.spot_shadows_distance *= 2.0;\n\n quality.point_shadows_distance *= 2.0;\n\n quality\n\n}\n", "file_path": "examples/shared/mod.rs", "rank": 44, "score": 143965.62149013198 }, { "content": "/// Translates cursor icon from rg3d-ui library to glutin format.\n\npub fn translate_cursor_icon(icon: crate::gui::message::CursorIcon) -> crate::window::CursorIcon {\n\n match icon {\n\n crate::gui::message::CursorIcon::Default => crate::window::CursorIcon::Default,\n\n crate::gui::message::CursorIcon::Crosshair => crate::window::CursorIcon::Crosshair,\n\n crate::gui::message::CursorIcon::Hand => crate::window::CursorIcon::Hand,\n\n crate::gui::message::CursorIcon::Arrow => crate::window::CursorIcon::Arrow,\n\n crate::gui::message::CursorIcon::Move => crate::window::CursorIcon::Move,\n\n crate::gui::message::CursorIcon::Text => crate::window::CursorIcon::Text,\n\n crate::gui::message::CursorIcon::Wait => crate::window::CursorIcon::Wait,\n\n crate::gui::message::CursorIcon::Help => crate::window::CursorIcon::Help,\n\n crate::gui::message::CursorIcon::Progress => crate::window::CursorIcon::Progress,\n\n crate::gui::message::CursorIcon::NotAllowed => crate::window::CursorIcon::NotAllowed,\n\n crate::gui::message::CursorIcon::ContextMenu => crate::window::CursorIcon::ContextMenu,\n\n crate::gui::message::CursorIcon::Cell => crate::window::CursorIcon::Cell,\n\n crate::gui::message::CursorIcon::VerticalText => crate::window::CursorIcon::VerticalText,\n\n crate::gui::message::CursorIcon::Alias => crate::window::CursorIcon::Alias,\n\n crate::gui::message::CursorIcon::Copy => crate::window::CursorIcon::Copy,\n\n crate::gui::message::CursorIcon::NoDrop => crate::window::CursorIcon::NoDrop,\n\n crate::gui::message::CursorIcon::Grab => crate::window::CursorIcon::Grab,\n\n crate::gui::message::CursorIcon::Grabbing => crate::window::CursorIcon::Grabbing,\n", "file_path": "src/utils/mod.rs", "rank": 45, "score": 142879.99239659848 }, { "content": "fn is_binary<P: AsRef<Path>>(path: P) -> Result<bool, FbxError> {\n\n let mut file = File::open(path)?;\n\n let mut magic = [0; 18];\n\n file.read_exact(&mut magic)?;\n\n let fbx_magic = b\"Kaydara FBX Binary\";\n\n Ok(magic == *fbx_magic)\n\n}\n\n\n\nimpl FbxDocument {\n\n pub fn new<P: AsRef<Path>>(path: P) -> Result<FbxDocument, FbxError> {\n\n let is_bin = is_binary(path.as_ref())?;\n\n\n\n let mut reader = BufReader::new(File::open(path)?);\n\n\n\n if is_bin {\n\n binary::read_binary(&mut reader)\n\n } else {\n\n ascii::read_ascii(&mut reader)\n\n }\n\n }\n\n\n\n pub fn root(&self) -> Handle<FbxNode> {\n\n self.root\n\n }\n\n\n\n pub fn nodes(&self) -> &FbxNodeContainer {\n\n &self.nodes\n\n }\n\n}\n", "file_path": "src/resource/fbx/document/mod.rs", "rank": 46, "score": 142522.85057731115 }, { "content": "pub fn wrap_angle(angle: f32) -> f32 {\n\n let two_pi = 2.0 * std::f32::consts::PI;\n\n\n\n if angle > 0.0 {\n\n angle % two_pi\n\n } else {\n\n (angle + two_pi) % two_pi\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 47, "score": 142511.31361246438 }, { "content": "fn read_attribute<R>(type_code: u8, file: &mut R) -> Result<FbxAttribute, FbxError>\n\nwhere\n\n R: Read,\n\n{\n\n match type_code {\n\n b'f' | b'F' => Ok(FbxAttribute::Float(file.read_f32::<LittleEndian>()?)),\n\n b'd' | b'D' => Ok(FbxAttribute::Double(file.read_f64::<LittleEndian>()?)),\n\n b'l' | b'L' => Ok(FbxAttribute::Long(file.read_i64::<LittleEndian>()?)),\n\n b'i' | b'I' => Ok(FbxAttribute::Integer(file.read_i32::<LittleEndian>()?)),\n\n b'Y' => Ok(FbxAttribute::Integer(i32::from(\n\n file.read_i16::<LittleEndian>()?,\n\n ))),\n\n b'b' | b'C' => Ok(FbxAttribute::Bool(file.read_u8()? != 0)),\n\n _ => Err(FbxError::UnknownAttributeType(type_code)),\n\n }\n\n}\n\n\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 48, "score": 141660.45514132158 }, { "content": "fn create_ui(ctx: &mut BuildContext) -> Handle<UiNode> {\n\n TextBuilder::new(WidgetBuilder::new()).build(ctx)\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 49, "score": 139491.11006353947 }, { "content": "pub fn get_closest_point_triangles<P: PositionProvider>(\n\n points: &[P],\n\n triangles: &[TriangleDefinition],\n\n triangle_indices: &[u32],\n\n point: Vector3<f32>,\n\n) -> Option<usize> {\n\n let mut closest_sqr_distance = std::f32::MAX;\n\n let mut closest_index = None;\n\n for triangle_index in triangle_indices {\n\n let triangle = triangles.get(*triangle_index as usize).unwrap();\n\n for point_index in triangle.0.iter() {\n\n let vertex = points.get(*point_index as usize).unwrap();\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(*point_index as usize);\n\n }\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 50, "score": 139463.90480895963 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n pivot: Handle<Node>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n let pivot = BaseBuilder::new()\n\n .with_children(&[create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(0.0, 4.0, -8.0),\n\n &mut scene.graph,\n\n )\n\n .await])\n\n .build(&mut scene.graph);\n\n\n\n // There is no difference between scene created in rusty-editor and any other\n\n // model file, so any scene can be used directly as resource.\n\n resource_manager\n\n .request_model(\"examples/data/test_scene.rgs\")\n\n .await\n\n .unwrap()\n\n .instantiate(&mut scene)\n\n .root;\n\n\n\n GameScene { scene, pivot }\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 51, "score": 138412.51573991135 }, { "content": "fn read_array<R>(type_code: u8, file: &mut R) -> Result<Vec<FbxAttribute>, FbxError>\n\nwhere\n\n R: Read,\n\n{\n\n let length = file.read_u32::<LittleEndian>()? as usize;\n\n let encoding = file.read_u32::<LittleEndian>()?;\n\n let compressed_length = file.read_u32::<LittleEndian>()? as usize;\n\n let mut array = Vec::new();\n\n\n\n if encoding == 0 {\n\n for _ in 0..length {\n\n array.push(read_attribute(type_code, file)?);\n\n }\n\n } else {\n\n let mut compressed = Vec::with_capacity(compressed_length);\n\n unsafe { compressed.set_len(compressed_length) };\n\n file.read_exact(compressed.as_mut_slice())?;\n\n let decompressed = inflate::inflate_bytes_zlib(&compressed)?;\n\n let mut cursor = Cursor::new(decompressed);\n\n for _ in 0..length {\n\n array.push(read_attribute(type_code, &mut cursor)?);\n\n }\n\n }\n\n\n\n Ok(array)\n\n}\n\n\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 52, "score": 137551.04373287823 }, { "content": "type ParameterContainer = HashMap<String, Parameter>;\n\n\n", "file_path": "src/animation/machine/mod.rs", "rank": 53, "score": 136482.59962735476 }, { "content": "pub fn get_closest_point_triangle_set<P: PositionProvider>(\n\n points: &[P],\n\n triangles: &[TriangleDefinition],\n\n point: Vector3<f32>,\n\n) -> Option<usize> {\n\n let mut closest_sqr_distance = std::f32::MAX;\n\n let mut closest_index = None;\n\n for triangle in triangles {\n\n for point_index in triangle.0.iter() {\n\n let vertex = points.get(*point_index as usize).unwrap();\n\n let sqr_distance = (vertex.position() - point).norm_squared();\n\n if sqr_distance < closest_sqr_distance {\n\n closest_sqr_distance = sqr_distance;\n\n closest_index = Some(*point_index as usize);\n\n }\n\n }\n\n }\n\n closest_index\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 54, "score": 136364.71019722943 }, { "content": "pub fn lerpf(a: f32, b: f32, t: f32) -> f32 {\n\n a + (b - a) * t\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 55, "score": 134083.6564183018 }, { "content": "#[allow(clippy::useless_let_if_seq)]\n\npub fn classify_plane(normal: Vector3<f32>) -> PlaneClass {\n\n let mut longest = 0.0f32;\n\n let mut class = PlaneClass::XY;\n\n\n\n if normal.x.abs() > longest {\n\n longest = normal.x.abs();\n\n class = PlaneClass::YZ;\n\n }\n\n\n\n if normal.y.abs() > longest {\n\n longest = normal.y.abs();\n\n class = PlaneClass::XZ;\n\n }\n\n\n\n if normal.z.abs() > longest {\n\n class = PlaneClass::XY;\n\n }\n\n\n\n class\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 56, "score": 132908.57788313748 }, { "content": "pub fn barycentric_is_inside(bary: (f32, f32, f32)) -> bool {\n\n (bary.0 >= 0.0) && (bary.1 >= 0.0) && (bary.0 + bary.1 < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 57, "score": 130652.2316457876 }, { "content": "/// Generates a set of UV meshes.\n\npub fn generate_uv_meshes(\n\n uv_box: &UvBox,\n\n data: &mut SurfaceSharedData,\n\n) -> (Vec<UvMesh>, SurfaceDataPatch) {\n\n let mut mesh_patch = SurfaceDataPatch {\n\n data_id: data.id(),\n\n ..Default::default()\n\n };\n\n\n\n // Step 1. Split vertices at boundary between each face. This step multiplies the\n\n // number of vertices at boundary so we'll get separate texture coordinates at\n\n // seams.\n\n make_seam(\n\n data,\n\n &uv_box.px,\n\n &[&uv_box.nx, &uv_box.py, &uv_box.ny, &uv_box.pz, &uv_box.nz],\n\n &mut mesh_patch,\n\n );\n\n make_seam(\n\n data,\n", "file_path": "src/utils/uvgen.rs", "rank": 58, "score": 130183.82137204715 }, { "content": "pub fn create_ui(ui: &mut BuildContext, screen_size: Vector2<f32>) -> Interface {\n\n let debug_text;\n\n let progress_bar;\n\n let progress_text;\n\n let root = GridBuilder::new(\n\n WidgetBuilder::new()\n\n .with_width(screen_size.x)\n\n .with_height(screen_size.y)\n\n .with_child({\n\n debug_text = TextBuilder::new(WidgetBuilder::new().on_row(0).on_column(0))\n\n .with_wrap(true)\n\n .build(ui);\n\n debug_text\n\n })\n\n .with_child({\n\n progress_bar =\n\n ProgressBarBuilder::new(WidgetBuilder::new().on_row(1).on_column(1)).build(ui);\n\n progress_bar\n\n })\n\n .with_child({\n", "file_path": "examples/shared/mod.rs", "rank": 59, "score": 129532.71771578789 }, { "content": "fn main() {\n\n let event_loop = EventLoop::new();\n\n\n\n let window_builder = rg3d::window::WindowBuilder::new()\n\n .with_title(\"Example - Scene\")\n\n .with_resizable(true);\n\n\n\n let mut engine = GameEngine::new(window_builder, &event_loop, true).unwrap();\n\n\n\n // Prepare resource manager - it must be notified where to search textures. When engine\n\n // loads model resource it automatically tries to load textures it uses. But since most\n\n // model formats store absolute paths, we can't use them as direct path to load texture\n\n // instead we telling engine to search textures in given folder.\n\n engine\n\n .resource_manager\n\n .state()\n\n .set_textures_path(\"examples/data\");\n\n\n\n // Create simple user interface that will show some useful info.\n\n let debug_text = create_ui(&mut engine.user_interface.build_ctx());\n", "file_path": "examples/scene.rs", "rank": 60, "score": 129373.27924630529 }, { "content": "/// Generates UV map for given surface data.\n\n///\n\n/// # Performance\n\n///\n\n/// This method utilizes lots of \"brute force\" algorithms, so it is not fast as it\n\n/// could be in ideal case. It also allocates some memory for internal needs.\n\npub fn generate_uvs(data: &mut SurfaceSharedData, spacing: f32) -> SurfaceDataPatch {\n\n let uv_box = generate_uv_box(data);\n\n\n\n let (mut meshes, mut patch) = generate_uv_meshes(&uv_box, data);\n\n\n\n // Step 4. Arrange and scale all meshes on uv map so it fits into [0;1] range.\n\n let area = meshes.iter().fold(0.0, |area, mesh| area + mesh.area());\n\n let square_side = area.sqrt() + spacing * meshes.len() as f32;\n\n\n\n meshes.sort_unstable_by(|a, b| b.area().partial_cmp(&a.area()).unwrap());\n\n\n\n let mut rects = Vec::new();\n\n\n\n let twice_spacing = spacing * 2.0;\n\n\n\n // Some empiric coefficient that large enough to make size big enough for all meshes.\n\n // This should be large enough to fit all meshes, but small to prevent losing of space.\n\n // We'll use iterative approach to pack everything as tight as possible: at each iteration\n\n // scale will be increased until packer is able to pack everything.\n\n let mut empiric_scale = 1.1;\n", "file_path": "src/utils/uvgen.rs", "rank": 61, "score": 129020.80657382209 }, { "content": "pub fn clampf(v: f32, min: f32, max: f32) -> f32 {\n\n if v < min {\n\n min\n\n } else if v > max {\n\n max\n\n } else {\n\n v\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 62, "score": 127320.91347633644 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n camera: Handle<Node>,\n\n animations: Vec<Handle<Animation>>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n let camera = create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(0.0, 32.0, -140.0),\n\n &mut scene.graph,\n\n )\n\n .await;\n\n\n\n // Load model and animation resource in parallel. Is does *not* adds anything to\n\n // our scene - it just loads a resource then can be used later on to instantiate\n\n // models from it on scene. Why loading of resource is separated from instantiation?\n", "file_path": "examples/instancing.rs", "rank": 63, "score": 125689.48342832507 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n camera: Handle<Node>,\n\n model_handle: Handle<Node>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n let camera = create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(0.0, 1.5, -5.0),\n\n &mut scene.graph,\n\n )\n\n .await;\n\n\n\n // Set small z far for the sake of example.\n\n scene.graph[camera].as_camera_mut().set_z_far(32.0);\n\n\n", "file_path": "examples/lod.rs", "rank": 64, "score": 125689.48342832507 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n model_handle: Handle<Node>,\n\n walk_animation: Handle<Animation>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(0.0, 6.0, -12.0),\n\n &mut scene.graph,\n\n )\n\n .await;\n\n\n\n // Load model resource. Is does *not* adds anything to our scene - it just loads a\n\n // resource then can be used later on to instantiate models from it on scene. Why\n\n // loading of resource is separated from instantiation? Because there it is too\n", "file_path": "examples/ui.rs", "rank": 65, "score": 125689.48342832507 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n model_handle: Handle<Node>,\n\n walk_animation: Handle<Animation>,\n\n}\n\n\n", "file_path": "examples/async.rs", "rank": 66, "score": 125689.48342832507 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n agent: Handle<Node>,\n\n cursor: Handle<Node>,\n\n camera: Handle<Node>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n let camera = create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(4.0, 8.0, 0.0),\n\n &mut scene.graph,\n\n )\n\n .await;\n\n\n\n scene.graph[camera]\n\n .local_transform_mut()\n", "file_path": "examples/navmesh.rs", "rank": 67, "score": 125689.48342832507 }, { "content": "struct InputController {\n\n rotate_left: bool,\n\n rotate_right: bool,\n\n}\n\n\n", "file_path": "examples/scene.rs", "rank": 68, "score": 125689.48342832507 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n root: Handle<Node>,\n\n}\n\n\n", "file_path": "examples/lightmap.rs", "rank": 69, "score": 125689.48342832507 }, { "content": "struct GameScene {\n\n scene: Scene,\n\n model_handle: Handle<Node>,\n\n walk_animation: Handle<Animation>,\n\n}\n\n\n\nasync fn create_scene(resource_manager: ResourceManager) -> GameScene {\n\n let mut scene = Scene::new();\n\n\n\n // Camera is our eyes in the world - you won't see anything without it.\n\n create_camera(\n\n resource_manager.clone(),\n\n Vector3::new(0.0, 6.0, -12.0),\n\n &mut scene.graph,\n\n )\n\n .await;\n\n\n\n // Load model and animation resource in parallel. Is does *not* adds anything to\n\n // our scene - it just loads a resource then can be used later on to instantiate\n\n // models from it on scene. Why loading of resource is separated from instantiation?\n", "file_path": "examples/simple.rs", "rank": 70, "score": 125689.48342832507 }, { "content": "pub fn solve_quadratic(a: f32, b: f32, c: f32) -> Option<[f32; 2]> {\n\n let discriminant = b * b - 4.0 * a * c;\n\n if discriminant < 0.0 {\n\n // No real roots\n\n None\n\n } else {\n\n // Dont care if quadratic equation has only one root (discriminant == 0), this is edge-case\n\n // which requires additional branching instructions which is not good for branch-predictor in CPU.\n\n let _2a = 2.0 * a;\n\n let discr_root = discriminant.sqrt();\n\n let r1 = (-b + discr_root) / _2a;\n\n let r2 = (-b - discr_root) / _2a;\n\n Some([r1, r2])\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 71, "score": 124973.0363671525 }, { "content": "struct PointShadowMapShader {\n\n program: GpuProgram,\n\n world_matrix: UniformLocation,\n\n bone_matrices: UniformLocation,\n\n world_view_projection_matrix: UniformLocation,\n\n use_skeletal_animation: UniformLocation,\n\n diffuse_texture: UniformLocation,\n\n light_position: UniformLocation,\n\n}\n\n\n\nimpl PointShadowMapShader {\n\n pub fn new() -> Result<Self, RendererError> {\n\n let fragment_source = include_str!(\"shaders/point_shadow_map_fs.glsl\");\n\n let vertex_source = include_str!(\"shaders/point_shadow_map_vs.glsl\");\n\n let program =\n\n GpuProgram::from_source(\"PointShadowMapShader\", vertex_source, fragment_source)?;\n\n Ok(Self {\n\n world_matrix: program.uniform_location(\"worldMatrix\")?,\n\n bone_matrices: program.uniform_location(\"boneMatrices\")?,\n\n world_view_projection_matrix: program.uniform_location(\"worldViewProjection\")?,\n", "file_path": "src/renderer/shadow_map_renderer.rs", "rank": 72, "score": 124325.96389200166 }, { "content": "struct SpotShadowMapShader {\n\n program: GpuProgram,\n\n bone_matrices: UniformLocation,\n\n world_view_projection_matrix: UniformLocation,\n\n use_skeletal_animation: UniformLocation,\n\n diffuse_texture: UniformLocation,\n\n}\n\n\n\nimpl SpotShadowMapShader {\n\n pub fn new() -> Result<Self, RendererError> {\n\n let fragment_source = include_str!(\"shaders/spot_shadow_map_fs.glsl\");\n\n let vertex_source = include_str!(\"shaders/spot_shadow_map_vs.glsl\");\n\n let program =\n\n GpuProgram::from_source(\"SpotShadowMapShader\", vertex_source, fragment_source)?;\n\n Ok(Self {\n\n bone_matrices: program.uniform_location(\"boneMatrices\")?,\n\n world_view_projection_matrix: program.uniform_location(\"worldViewProjection\")?,\n\n use_skeletal_animation: program.uniform_location(\"useSkeletalAnimation\")?,\n\n diffuse_texture: program.uniform_location(\"diffuseTexture\")?,\n\n\n", "file_path": "src/renderer/shadow_map_renderer.rs", "rank": 73, "score": 124325.96389200166 }, { "content": "#[repr(C)]\n\nstruct Vertex {\n\n position: Vector3<f32>,\n\n color: u32,\n\n}\n\n\n\n/// See module docs.\n\npub struct DebugRenderer {\n\n geometry: GeometryBuffer,\n\n vertices: Vec<Vertex>,\n\n line_indices: Vec<[u32; 2]>,\n\n shader: DebugShader,\n\n}\n\n\n\npub(in crate) struct DebugShader {\n\n program: GpuProgram,\n\n wvp_matrix: UniformLocation,\n\n}\n\n\n\nimpl DebugShader {\n\n fn new() -> Result<Self, RendererError> {\n", "file_path": "src/renderer/debug_renderer.rs", "rank": 74, "score": 122651.73835981067 }, { "content": "struct Profiler {\n\n start_time: std::time::Instant,\n\n samples: HashMap<ScopeMark, Sample>,\n\n scope_stack: Vec<ScopeMark>,\n\n}\n\n\n\nconst ENTRY_SCOPE_MARK: ScopeMark = ScopeMark {\n\n parent_scope_hash: 0,\n\n function_name: \"EntryPoint\",\n\n line: 0,\n\n};\n\n\n\nimpl Default for Profiler {\n\n #[inline]\n\n fn default() -> Self {\n\n let entry_sample = Sample {\n\n count: 0,\n\n time: 0.0,\n\n children: HashSet::new(),\n\n };\n\n let mut samples = HashMap::new();\n\n samples.insert(ENTRY_SCOPE_MARK, entry_sample);\n\n Self {\n\n start_time: std::time::Instant::now(),\n\n samples,\n\n scope_stack: vec![ENTRY_SCOPE_MARK],\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 75, "score": 122298.60648699576 }, { "content": "struct Sample {\n\n count: u64,\n\n time: f64,\n\n children: HashSet<ScopeMark>,\n\n}\n\n\n\nimpl Sample {\n\n pub fn collect(&mut self, time: f64) {\n\n self.time += time;\n\n self.count += 1;\n\n }\n\n}\n\n\n\nimpl Default for Sample {\n\n fn default() -> Self {\n\n Self {\n\n count: 0,\n\n time: 0.0,\n\n children: Default::default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 76, "score": 122298.60648699576 }, { "content": "///\n\n/// Converts FBX DOM to native engine representation.\n\n///\n\nfn convert(\n\n fbx_scene: &FbxScene,\n\n resource_manager: ResourceManager,\n\n scene: &mut Scene,\n\n) -> Result<(), FbxError> {\n\n let root = scene.graph.get_root();\n\n let animation_handle = scene.animations.add(Animation::default());\n\n let mut fbx_model_to_node_map = HashMap::new();\n\n for (component_handle, component) in fbx_scene.pair_iter() {\n\n if let FbxComponent::Model(model) = component {\n\n let node = convert_model(\n\n fbx_scene,\n\n model,\n\n resource_manager.clone(),\n\n &mut scene.graph,\n\n &mut scene.animations,\n\n animation_handle,\n\n )?;\n\n scene.graph.link_nodes(node, root);\n\n fbx_model_to_node_map.insert(component_handle, node);\n", "file_path": "src/resource/fbx/mod.rs", "rank": 77, "score": 122075.03623406096 }, { "content": "struct SceneLoadContext {\n\n data: Option<GameScene>,\n\n progress_indicator: ProgressIndicator,\n\n cancellation_token: CancellationToken,\n\n start_time: Instant,\n\n generate_lightmap: bool,\n\n}\n\n\n", "file_path": "examples/lightmap.rs", "rank": 78, "score": 121989.2867634688 }, { "content": "struct SceneLoadContext {\n\n data: Option<GameScene>,\n\n message: String,\n\n progress: f32,\n\n}\n\n\n\nimpl SceneLoadContext {\n\n pub fn report_progress(&mut self, progress: f32, message: &str) {\n\n self.progress = progress;\n\n self.message = message.to_owned();\n\n println!(\"Loading progress: {}% - {}\", progress * 100.0, message);\n\n }\n\n}\n\n\n", "file_path": "examples/async.rs", "rank": 79, "score": 121989.2867634688 }, { "content": "struct PointShadowCubeMapFace {\n\n face: CubeMapFace,\n\n look: Vector3<f32>,\n\n up: Vector3<f32>,\n\n}\n\n\n\npub(in crate) struct PointShadowMapRenderContext<'a, 'c> {\n\n pub state: &'a mut PipelineState,\n\n pub graph: &'c Graph,\n\n pub light_pos: Vector3<f32>,\n\n pub light_radius: f32,\n\n pub geom_cache: &'a mut GeometryCache,\n\n pub cascade: usize,\n\n pub batch_storage: &'a BatchStorage,\n\n}\n\n\n\nimpl PointShadowMapRenderer {\n\n pub fn new(\n\n state: &mut PipelineState,\n\n size: usize,\n", "file_path": "src/renderer/shadow_map_renderer.rs", "rank": 80, "score": 121819.17755503842 }, { "content": "fn create_scene_async(\n\n resource_manager: ResourceManager,\n\n generate_lightmap: bool,\n\n) -> Arc<Mutex<SceneLoadContext>> {\n\n let progress_indicator = ProgressIndicator::new();\n\n let cancellation_token = CancellationToken::new();\n\n\n\n // Create load context - it will be shared with caller and loader threads.\n\n let context = Arc::new(Mutex::new(SceneLoadContext {\n\n data: None,\n\n progress_indicator: progress_indicator.clone(),\n\n cancellation_token: cancellation_token.clone(),\n\n start_time: Instant::now(),\n\n generate_lightmap,\n\n }));\n\n let result = context.clone();\n\n\n\n // Spawn separate thread which will create scene by loading various assets.\n\n std::thread::spawn(move || {\n\n futures::executor::block_on(async move {\n", "file_path": "examples/lightmap.rs", "rank": 81, "score": 121686.96947116226 }, { "content": "fn read_string<R>(file: &mut R) -> Result<FbxAttribute, FbxError>\n\nwhere\n\n R: Read,\n\n{\n\n let length = file.read_u32::<LittleEndian>()? as usize;\n\n let mut raw_string = Vec::with_capacity(length);\n\n unsafe {\n\n raw_string.set_len(length);\n\n };\n\n file.read_exact(raw_string.as_mut_slice())?;\n\n // Find null terminator. It is required because for some reason some strings\n\n // have additional data after null terminator like this: Omni004\\x0\\x1Model, but\n\n // length still more than position of null terminator.\n\n if let Some(null_terminator_pos) = raw_string.iter().position(|c| *c == 0) {\n\n raw_string.truncate(null_terminator_pos);\n\n }\n\n let string = String::from_utf8(raw_string)?;\n\n Ok(FbxAttribute::String(string))\n\n}\n\n\n\nconst VERSION_7500: i32 = 7500;\n\nconst VERSION_7500_NULLREC_SIZE: usize = 25; // in bytes\n\nconst NORMAL_NULLREC_SIZE: usize = 13; // in bytes\n\n\n", "file_path": "src/resource/fbx/document/binary.rs", "rank": 82, "score": 120143.8246440484 }, { "content": "pub fn is_point_inside_triangle(p: &Vector3<f32>, vertices: &[Vector3<f32>; 3]) -> bool {\n\n let ba = vertices[1] - vertices[0];\n\n let ca = vertices[2] - vertices[0];\n\n let vp = *p - vertices[0];\n\n\n\n let ba_dot_ba = ba.dot(&ba);\n\n let ca_dot_ba = ca.dot(&ba);\n\n let ca_dot_ca = ca.dot(&ca);\n\n\n\n let dot02 = ca.dot(&vp);\n\n let dot12 = ba.dot(&vp);\n\n\n\n let inv_denom = 1.0 / (ca_dot_ca * ba_dot_ba - ca_dot_ba.powi(2));\n\n\n\n // Calculate barycentric coordinates\n\n let u = (ba_dot_ba * dot02 - ca_dot_ba * dot12) * inv_denom;\n\n let v = (ca_dot_ca * dot12 - ca_dot_ba * dot02) * inv_denom;\n\n\n\n (u >= 0.0) && (v >= 0.0) && (u + v < 1.0)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 83, "score": 120022.79513181392 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Debug)]\n\nenum PathVertexState {\n\n NonVisited,\n\n Open,\n\n Closed,\n\n}\n\n\n\n/// Graph vertex that contains position in world and list of indices of neighbour\n\n/// vertices.\n\n#[derive(Clone, Debug)]\n\npub struct PathVertex {\n\n /// Position in world.\n\n pub position: Vector3<f32>,\n\n state: PathVertexState,\n\n g_score: f32,\n\n f_score: f32,\n\n parent: Option<usize>,\n\n neighbours: Vec<u32>,\n\n}\n\n\n\nimpl Default for PathVertex {\n", "file_path": "src/utils/astar.rs", "rank": 84, "score": 119370.53821829092 }, { "content": "#[derive(Default, Clone)]\n\nstruct SurfaceData {\n\n builder: RawMeshBuilder<Vertex>,\n\n skin_data: Vec<VertexWeightSet>,\n\n}\n\n\n", "file_path": "src/resource/fbx/mod.rs", "rank": 85, "score": 118934.30059545938 }, { "content": "struct UnpackedVertex {\n\n // Index of surface this vertex belongs to.\n\n surface: usize,\n\n position: Vector3<f32>,\n\n normal: Vector3<f32>,\n\n tangent: Vector3<f32>,\n\n uv: Vector2<f32>,\n\n // Set of weights for skinning.\n\n weights: Option<VertexWeightSet>,\n\n}\n\n\n\nimpl Into<Vertex> for UnpackedVertex {\n\n fn into(self) -> Vertex {\n\n Vertex {\n\n position: self.position,\n\n tex_coord: self.uv,\n\n // TODO: FBX can contain second texture coordinates so they should be\n\n // extracted when available\n\n second_tex_coord: Default::default(),\n\n normal: self.normal,\n\n tangent: Vector4::new(self.tangent.x, self.tangent.y, self.tangent.z, 1.0),\n\n // Correct values will be assigned in second pass of conversion\n\n // when all nodes will be converted.\n\n bone_weights: Default::default(),\n\n bone_indices: Default::default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/resource/fbx/mod.rs", "rank": 86, "score": 118928.04968136697 }, { "content": "#[derive(Hash, PartialEq, Eq, Copy, Clone, Debug)]\n\nstruct ScopeMark {\n\n parent_scope_hash: u64,\n\n function_name: &'static str,\n\n line: u32,\n\n}\n\n\n", "file_path": "rg3d-core/src/profiler.rs", "rank": 87, "score": 118877.21099909807 }, { "content": "#[derive(Debug)]\n\nstruct Vertex {\n\n position: Vector2<f32>,\n\n prev: usize,\n\n index: usize,\n\n next: usize,\n\n}\n\n\n", "file_path": "rg3d-core/src/math/triangulator.rs", "rank": 88, "score": 118858.4576090916 }, { "content": "///\n\n/// Linked list of vertices\n\n///\n\nstruct Polygon {\n\n vertices: Vec<Vertex>,\n\n head: usize,\n\n tail: usize,\n\n}\n\n\n\nimpl Polygon {\n\n ///\n\n /// Excludes vertex from polygon. Does not remove it from vertices array!\n\n ///\n\n fn remove_vertex(&mut self, index: usize) {\n\n let next_index = self.vertices[index].next;\n\n let prev_index = self.vertices[index].prev;\n\n\n\n let prev = &mut self.vertices[prev_index];\n\n prev.next = next_index;\n\n\n\n let next = &mut self.vertices[next_index];\n\n next.prev = prev_index;\n\n\n", "file_path": "rg3d-core/src/math/triangulator.rs", "rank": 89, "score": 118852.12011870214 }, { "content": "fn convert_vertex(\n\n geom: &FbxGeometry,\n\n geometric_transform: &Matrix4<f32>,\n\n material_index: usize,\n\n index: usize,\n\n index_in_polygon: usize,\n\n skin_data: &[VertexWeightSet],\n\n) -> Result<UnpackedVertex, FbxError> {\n\n let position = *geom.vertices.get(index).ok_or(FbxError::IndexOutOfBounds)?;\n\n\n\n let normal = match geom.normals.as_ref() {\n\n Some(normals) => *normals.get(index, index_in_polygon)?,\n\n None => Vector3::y(),\n\n };\n\n\n\n let tangent = match geom.tangents.as_ref() {\n\n Some(tangents) => *tangents.get(index, index_in_polygon)?,\n\n None => Vector3::y(),\n\n };\n\n\n", "file_path": "src/resource/fbx/mod.rs", "rank": 90, "score": 118630.50749568336 }, { "content": "fn create_surfaces(\n\n fbx_scene: &FbxScene,\n\n data_set: Vec<SurfaceData>,\n\n resource_manager: ResourceManager,\n\n model: &FbxModel,\n\n) -> Result<Vec<Surface>, FbxError> {\n\n let mut surfaces = Vec::new();\n\n\n\n // Create surfaces per material\n\n if model.materials.is_empty() {\n\n assert_eq!(data_set.len(), 1);\n\n let data = data_set.into_iter().next().unwrap();\n\n let mut surface = Surface::new(Arc::new(RwLock::new(SurfaceSharedData::from_raw_mesh(\n\n data.builder.build(),\n\n false,\n\n ))));\n\n surface.vertex_weights = data.skin_data;\n\n surfaces.push(surface);\n\n } else {\n\n assert_eq!(data_set.len(), model.materials.len());\n", "file_path": "src/resource/fbx/mod.rs", "rank": 91, "score": 118630.50749568336 }, { "content": "fn convert_mesh(\n\n base: BaseBuilder,\n\n fbx_scene: &FbxScene,\n\n resource_manager: ResourceManager,\n\n model: &FbxModel,\n\n graph: &mut Graph,\n\n) -> Result<Handle<Node>, FbxError> {\n\n let geometric_transform = Matrix4::new_translation(&model.geometric_translation)\n\n * quat_from_euler(model.geometric_rotation).to_homogeneous()\n\n * Matrix4::new_nonuniform_scaling(&model.geometric_scale);\n\n\n\n let mut temp_vertices = Vec::new();\n\n let mut triangles = Vec::new();\n\n\n\n // Array for triangulation needs, it will contain triangle definitions for\n\n // triangulated polygon.\n\n let mut face_triangles = Vec::new();\n\n\n\n let mut mesh_surfaces = Vec::new();\n\n for &geom_handle in &model.geoms {\n", "file_path": "src/resource/fbx/mod.rs", "rank": 92, "score": 118630.50749568336 }, { "content": "fn build_recursive(\n\n nodes: &mut Pool<OctreeNode>,\n\n triangles: &[[Vector3<f32>; 3]],\n\n bounds: AxisAlignedBoundingBox,\n\n indices: Vec<u32>,\n\n split_threshold: usize,\n\n) -> Handle<OctreeNode> {\n\n if indices.len() <= split_threshold {\n\n nodes.spawn(OctreeNode::Leaf { bounds, indices })\n\n } else {\n\n let mut leaves = [Handle::NONE; 8];\n\n let leaf_bounds = split_bounds(bounds);\n\n\n\n for i in 0..8 {\n\n let mut leaf_indices = Vec::new();\n\n\n\n for index in indices.iter() {\n\n let index = *index;\n\n\n\n let triangle_bounds =\n", "file_path": "rg3d-core/src/octree.rs", "rank": 93, "score": 118554.57793301852 }, { "content": "pub fn triangle_area(a: Vector3<f32>, b: Vector3<f32>, c: Vector3<f32>) -> f32 {\n\n (b - a).cross(&(c - a)).norm() * 0.5\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 94, "score": 118010.51679357215 }, { "content": "fn save(game: &mut Game) {\n\n // To save a game state all we need to do is to create new instance of Visitor\n\n // and call visit on game instance.\n\n let mut visitor = Visitor::new();\n\n game.visit(\"Game\", &mut visitor).unwrap();\n\n // And call save method.\n\n visitor.save_binary(Path::new(SAVE_FILE)).unwrap();\n\n}\n\n\n", "file_path": "examples/save_load.rs", "rank": 95, "score": 117948.5195851202 }, { "content": "fn load(game: &mut Game) {\n\n if Path::new(SAVE_FILE).exists() {\n\n // Loading a game is even simpler - just 2 lines.\n\n let mut visitor = Visitor::load_binary(SAVE_FILE).unwrap();\n\n game.visit(\"Game\", &mut visitor).unwrap();\n\n }\n\n}\n\n\n", "file_path": "examples/save_load.rs", "rank": 96, "score": 117948.5195851202 }, { "content": "pub fn spherical_to_cartesian(azimuth: f32, elevation: f32, radius: f32) -> Vector3<f32> {\n\n let x = radius * elevation.sin() * azimuth.sin();\n\n let y = radius * elevation.cos();\n\n let z = -radius * elevation.sin() * azimuth.cos();\n\n Vector3::new(x, y, z)\n\n}\n\n\n", "file_path": "rg3d-core/src/math/mod.rs", "rank": 97, "score": 117245.84530244915 }, { "content": "#![warn(missing_docs)]\n\n\n\n//! Resource module contains all structures and method to manage resources.\n\n\n\nuse crate::core::visitor::{Visit, VisitResult, Visitor};\n\nuse std::borrow::Cow;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::{\n\n fmt::Debug,\n\n future::Future,\n\n path::{Path, PathBuf},\n\n pin::Pin,\n\n sync::{Arc, Mutex, MutexGuard},\n\n task::{Context, Poll, Waker},\n\n};\n\n\n\npub mod fbx;\n\npub mod model;\n\npub mod texture;\n\n\n\n/// A trait for resource data.\n", "file_path": "src/resource/mod.rs", "rank": 99, "score": 57.06252764227223 } ]
Rust
packages/sycamore-macro/src/component/mod.rs
alsuren/sycamore
df16d18ad29933316aa666185e7e2dd7002eb662
use proc_macro2::TokenStream; use quote::{quote, ToTokens}; use syn::parse::{Parse, ParseStream}; use syn::{ Attribute, Block, FnArg, GenericParam, Generics, Ident, Item, ItemFn, Result, ReturnType, Type, Visibility, }; pub struct ComponentFunctionName { pub component_name: Ident, pub generics: Generics, } impl Parse for ComponentFunctionName { fn parse(input: ParseStream) -> Result<Self> { if input.is_empty() { Err(input.error("expected an identifier for the component")) } else { let component_name: Ident = input.parse()?; let generics: Generics = input.parse()?; if let Some(lifetime) = generics.lifetimes().next() { return Err(syn::Error::new_spanned( lifetime, "unexpected lifetime param; put lifetime params on function instead", )); } if let Some(const_param) = generics.const_params().next() { return Err(syn::Error::new_spanned( const_param, "unexpected const generic param; put const generic params on function instead", )); } if generics.type_params().count() != 1 { return Err(syn::Error::new_spanned( generics, "expected a single type param", )); } if !generics .type_params() .next() .unwrap() .bounds .empty_or_trailing() { return Err(syn::Error::new_spanned( generics, "unexpected type bound in generic type", )); } Ok(Self { component_name, generics, }) } } } pub struct ComponentFunction { pub block: Box<Block>, pub props_type: Box<Type>, pub arg: FnArg, pub generics: Generics, pub vis: Visibility, pub attrs: Vec<Attribute>, pub name: Ident, pub return_type: Box<Type>, } impl Parse for ComponentFunction { fn parse(input: ParseStream) -> Result<Self> { let parsed: Item = input.parse()?; match parsed { Item::Fn(func) => { let ItemFn { attrs, vis, sig, block, } = func; if sig.asyncness.is_some() { return Err(syn::Error::new_spanned( sig.asyncness, "async functions can't be components", )); } if sig.constness.is_some() { return Err(syn::Error::new_spanned( sig.constness, "const functions can't be components", )); } if sig.abi.is_some() { return Err(syn::Error::new_spanned( sig.abi, "extern functions can't be components", )); } let return_type = match sig.output { ReturnType::Default => { return Err(syn::Error::new_spanned( sig, "function must return `sycamore::view::View`", )) } ReturnType::Type(_, ty) => ty, }; let mut inputs = sig.inputs.into_iter(); let arg: FnArg = inputs.next().unwrap_or_else(|| syn::parse_quote! { _: () }); let props_type = match &arg { FnArg::Typed(arg) => arg.ty.clone(), FnArg::Receiver(arg) => { return Err(syn::Error::new_spanned( arg, "function components can't accept a receiver", )) } }; if inputs.len() > 0 { let params: TokenStream = inputs.map(|it| it.to_token_stream()).collect(); return Err(syn::Error::new_spanned( params, "function should accept at most one parameter for the prop", )); } Ok(Self { block, props_type, arg, generics: sig.generics, vis, attrs, name: sig.ident, return_type, }) } item => Err(syn::Error::new_spanned( item, "`component` attribute can only be applied to functions", )), } } } pub fn component_impl( attr: ComponentFunctionName, component: ComponentFunction, ) -> Result<TokenStream> { let ComponentFunctionName { component_name, generics: generic_node_ty, } = attr; let component_name_str = component_name.to_string(); let generic_node_ty = generic_node_ty.type_params().next().unwrap(); let generic_node: GenericParam = syn::parse_quote! { #generic_node_ty: ::sycamore::generic_node::Html }; let ComponentFunction { block, props_type: _, arg, mut generics, vis, attrs, name, return_type, } = component; let prop_ty = match &arg { FnArg::Receiver(_) => unreachable!(), FnArg::Typed(pat_ty) => &pat_ty.ty, }; let first_generic_param_index = generics .params .iter() .enumerate() .find(|(_, param)| matches!(param, GenericParam::Type(_) | GenericParam::Const(_))) .map(|(i, _)| i); if let Some(first_generic_param_index) = first_generic_param_index { generics .params .insert(first_generic_param_index, generic_node); } else { generics.params.push(generic_node); } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let phantom_generics = ty_generics .clone() .into_token_stream() .into_iter() .collect::<Vec<_>>(); let phantom_generics_len = phantom_generics.len(); let phantom_generics = phantom_generics .into_iter() .take(phantom_generics_len.saturating_sub(1)) .skip(1) .collect::<TokenStream>(); if name == component_name { return Err(syn::Error::new_spanned( component_name, "the component must not have the same name as the function", )); } let quoted = quote! { #(#attrs)* #vis struct #component_name#generics { #[doc(hidden)] _marker: ::std::marker::PhantomData<(#phantom_generics)>, } impl#impl_generics ::sycamore::component::Component::<#generic_node_ty> for #component_name#ty_generics #where_clause { #[cfg(debug_assertions)] const NAME: &'static ::std::primitive::str = #component_name_str; type Props = #prop_ty; fn __create_component(#arg) -> #return_type{ #block } } }; Ok(quoted) }
use proc_macro2::TokenStream; use quote::{quote, ToTokens}; use syn::parse::{Parse, ParseStream}; use syn::{ Attribute, Block, FnArg, GenericParam, Generics, Ident, Item, ItemFn, Result, ReturnType, Type, Visibility, }; pub struct ComponentFunctionName { pub component_name: Ident, pub generics: Generics, } impl Parse for ComponentFunctionName { fn parse(input: ParseStream) -> Result<Self> { if input.is_empty() { Err(input.error("expected an identifier for the component")) } else { let component_name: Ident = input.parse()?; let generics: Generics = input.parse()?; if let Some(lifetime) = generics.lifetimes().next() { return Err(syn::Error::new_spanned( lifetime, "unexpected lifetime param; put lifetime params on function instead", )); } if let Some(const_param) = generics.const_params().next() { return Err(syn::Error::new_spanned( const_param, "unexpected const generic param; put const generic params on function instead", )); } if generics.type_params().count() != 1 { return Err(syn::Error::new_spanned( generics, "expected a single type param", )); } if !generics .type_params() .next() .unwrap() .bounds .empty_or_trailing() { return Err(syn::Error::new_spanned( generics, "unexpected type bound in generic type", )); } Ok(Self { component_name, generics, }) } } } pub struct ComponentFunction { pub block: Box<Block>, pub props_type: Box<Type>, pub arg: FnArg, pub generics: Generics, pub vis: Visibility, pub attrs: Vec<Attribute>, pub name: Ident, pub return_type: Box<Type>, } impl Parse for ComponentFunction { fn parse(input: ParseStream) -> Result<Self> { let parsed: Item = input.parse()?; match parsed { Item::Fn(func) => { let ItemFn { attrs, vis, sig, block, } = func; if sig.asyncness.is_some() { return Err(syn::Error::new_spanned( sig.asyncness, "async functions can't be components", )); } if sig.constness.is_some() { return Err(syn::Error::new_spanned( sig.constness, "const functions can't be components", )); } if sig.abi.is_some() { return Err(syn::Error::new_spanned( sig.abi, "extern functions can't be components", )); } let return_type = match sig.output { ReturnType::Default => { return Err(syn::Error::new_spanned( sig, "function must return `sycamore::view::View`", )) } ReturnType::Type(_, ty) => ty, }; let mut inputs = sig.inputs.into_iter(); let arg: FnArg = inputs.next().unwrap_or_else(|| syn::parse_quote! { _: () }); let props_type = match &arg { FnArg::Typed(arg) => arg.ty.clone(), FnArg::Receiver(arg) => { return Err(syn::Error::new_spanned( arg, "function components can't accept a receiver", )) } }; if inputs.len() > 0 { let params: TokenStream = inputs.map(|it| it.to_token_stream()).collect(); return Err(syn::Error::new_spanned( params, "function should accept at most one parameter for the prop", )); } Ok(Self { block, props_type, arg, generics: sig.generics, vis, attrs, name: sig.ident, return_type, }) } item => Err(syn::Error::new_spanned( item, "`component` attribute can only be applied to functions", )), } } }
pub fn component_impl( attr: ComponentFunctionName, component: ComponentFunction, ) -> Result<TokenStream> { let ComponentFunctionName { component_name, generics: generic_node_ty, } = attr; let component_name_str = component_name.to_string(); let generic_node_ty = generic_node_ty.type_params().next().unwrap(); let generic_node: GenericParam = syn::parse_quote! { #generic_node_ty: ::sycamore::generic_node::Html }; let ComponentFunction { block, props_type: _, arg, mut generics, vis, attrs, name, return_type, } = component; let prop_ty = match &arg { FnArg::Receiver(_) => unreachable!(), FnArg::Typed(pat_ty) => &pat_ty.ty, }; let first_generic_param_index = generics .params .iter() .enumerate() .find(|(_, param)| matches!(param, GenericParam::Type(_) | GenericParam::Const(_))) .map(|(i, _)| i); if let Some(first_generic_param_index) = first_generic_param_index { generics .params .insert(first_generic_param_index, generic_node); } else { generics.params.push(generic_node); } let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let phantom_generics = ty_generics .clone() .into_token_stream() .into_iter() .collect::<Vec<_>>(); let phantom_generics_len = phantom_generics.len(); let phantom_generics = phantom_generics .into_iter() .take(phantom_generics_len.saturating_sub(1)) .skip(1) .collect::<TokenStream>(); if name == component_name { return Err(syn::Error::new_spanned( component_name, "the component must not have the same name as the function", )); } let quoted = quote! { #(#attrs)* #vis struct #component_name#generics { #[doc(hidden)] _marker: ::std::marker::PhantomData<(#phantom_generics)>, } impl#impl_generics ::sycamore::component::Component::<#generic_node_ty> for #component_name#ty_generics #where_clause { #[cfg(debug_assertions)] const NAME: &'static ::std::primitive::str = #component_name_str; type Props = #prop_ty; fn __create_component(#arg) -> #return_type{ #block } } }; Ok(quoted) }
function_block-full_function
[ { "content": "pub fn route_impl(input: DeriveInput) -> syn::Result<TokenStream> {\n\n let mut quoted = TokenStream::new();\n\n let mut err_quoted = TokenStream::new();\n\n let mut has_error_handler = false;\n\n\n\n match &input.data {\n\n syn::Data::Enum(de) => {\n\n let ty_name = &input.ident;\n\n\n\n for variant in &de.variants {\n\n let variant_id = &variant.ident;\n\n\n\n let mut quote_capture_vars = TokenStream::new();\n\n let mut route_path_ast = None;\n\n\n\n let mut is_to_route = false;\n\n\n\n for attr in &variant.attrs {\n\n let attr_name = match attr.path.get_ident() {\n\n Some(ident) => ident.to_string(),\n", "file_path": "packages/sycamore-router-macro/src/route.rs", "rank": 0, "score": 358273.39184251614 }, { "content": "#[proc_macro_attribute]\n\npub fn component(attr: TokenStream, component: TokenStream) -> TokenStream {\n\n let attr = parse_macro_input!(attr as component::ComponentFunctionName);\n\n let component = parse_macro_input!(component as component::ComponentFunction);\n\n\n\n component::component_impl(attr, component)\n\n .unwrap_or_else(|err| err.to_compile_error())\n\n .into()\n\n}\n", "file_path": "packages/sycamore-macro/src/lib.rs", "rank": 1, "score": 254879.1784545763 }, { "content": "#[proc_macro]\n\npub fn node(input: TokenStream) -> TokenStream {\n\n let node = parse_macro_input!(input as view::Element);\n\n\n\n view::node_impl(node).into()\n\n}\n\n\n\n/// A macro for creating components from functions.\n\n///\n\n/// Add this attribute to a `fn` to create a component from that function.\n\n///\n\n/// To learn more about components, see the chapter on\n\n/// [components](https://sycamore-rs.netlify.app/docs/basics/components) in the Sycamore Book.\n", "file_path": "packages/sycamore-macro/src/lib.rs", "rank": 2, "score": 221224.05428181565 }, { "content": "#[proc_macro_derive(Route, attributes(to, not_found))]\n\npub fn route(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n route::route_impl(input)\n\n .unwrap_or_else(|err| err.to_compile_error())\n\n .into()\n\n}\n", "file_path": "packages/sycamore-router-macro/src/lib.rs", "rank": 3, "score": 219031.2446051857 }, { "content": "pub fn view_impl(component: HtmlRoot) -> TokenStream {\n\n component.to_token_stream()\n\n}\n\n\n", "file_path": "packages/sycamore-macro/src/view/mod.rs", "rank": 5, "score": 213034.47560696246 }, { "content": "/// Adds a callback function to the current reactive scope's cleanup.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore_reactive::*;\n\n///\n\n/// let cleanup_called = Signal::new(false);\n\n///\n\n/// let scope = create_root(cloned!((cleanup_called) => move || {\n\n/// on_cleanup(move || {\n\n/// cleanup_called.set(true);\n\n/// })\n\n/// }));\n\n///\n\n/// assert_eq!(*cleanup_called.get(), false);\n\n///\n\n/// drop(scope);\n\n/// assert_eq!(*cleanup_called.get(), true);\n\n/// ```\n\npub fn on_cleanup(f: impl FnOnce() + 'static) {\n\n SCOPES.with(|scope| {\n\n if scope.borrow().last().is_some() {\n\n scope\n\n .borrow_mut()\n\n .last_mut()\n\n .unwrap_throw()\n\n .add_cleanup(Box::new(f));\n\n } else {\n\n #[cfg(all(target_arch = \"wasm32\", debug_assertions))]\n\n web_sys::console::warn_1(\n\n &\"Cleanup callbacks created outside of a reactive root will never run.\".into(),\n\n );\n\n #[cfg(all(not(target_arch = \"wasm32\"), debug_assertions))]\n\n eprintln!(\n\n \"WARNING: Cleanup callbacks created outside of a reactive root will never run.\"\n\n );\n\n }\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 6, "score": 193341.4777691328 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n c.bench_function(\"reactivity_signals\", |b| {\n\n b.iter(|| {\n\n let state = Signal::new(black_box(0));\n\n\n\n for _i in 0..1000 {\n\n state.set(*state.get() + 1);\n\n }\n\n });\n\n });\n\n\n\n c.bench_function(\"reactivity_effects\", |b| {\n\n b.iter(|| {\n\n let state = Signal::new(black_box(0));\n\n create_effect(cloned!((state) => move || {\n\n let double = *state.get() * 2;\n\n black_box(double);\n\n }));\n\n\n\n for _i in 0..1000 {\n", "file_path": "packages/sycamore/benches/reactivity.rs", "rank": 7, "score": 192561.2268448305 }, { "content": "pub fn bench(c: &mut Criterion) {\n\n c.bench_function(\"ssr_small\", |b| {\n\n b.iter(|| {\n\n #[component(App<G>)]\n\n fn app() -> View<G> {\n\n view! {\n\n div(class=\"my-container\") {\n\n p { \"Hello World!\" }\n\n }\n\n }\n\n }\n\n\n\n let _ssr = sycamore::render_to_string(|| view! { App() });\n\n })\n\n });\n\n\n\n c.bench_function(\"ssr_medium\", |b| {\n\n b.iter(|| {\n\n #[component(ListItem<G>)]\n\n fn list_item(value: i32) -> View<G> {\n", "file_path": "packages/sycamore/benches/ssr.rs", "rank": 8, "score": 192561.2268448305 }, { "content": "#[proc_macro]\n\npub fn view(component: TokenStream) -> TokenStream {\n\n let component = parse_macro_input!(component as view::HtmlRoot);\n\n\n\n view::view_impl(component).into()\n\n}\n\n\n\n/// ```\n\n/// use sycamore::prelude::*;\n\n///\n\n/// #[component(MyComponent<G>)]\n\n/// pub fn my_component() -> View<G> {\n\n/// let cool_button: G = node! { button { \"The coolest 😎\" } };\n\n///\n\n/// cool_button.set_property(\"myProperty\", &\"Epic!\".into());\n\n///\n\n/// View::new_node(cool_button)\n\n/// }\n\n/// ```\n", "file_path": "packages/sycamore-macro/src/lib.rs", "rank": 9, "score": 191615.89493000088 }, { "content": "#[component(Item<G>)]\n\npub fn item(todo: Signal<Todo>) -> View<G> {\n\n let app_state = use_context::<AppState>();\n\n\n\n let title = cloned!((todo) => move || todo.get().title.clone());\n\n let completed = create_selector(cloned!((todo) => move || todo.get().completed));\n\n let id = todo.get().id;\n\n\n\n let editing = Signal::new(false);\n\n let input_ref = NodeRef::<G>::new();\n\n let value = Signal::new(\"\".to_string());\n\n\n\n let handle_input = cloned!((value) => move |event: Event| {\n\n let target: HtmlInputElement = event.target().unwrap().unchecked_into();\n\n value.set(target.value());\n\n });\n\n\n\n let toggle_completed = cloned!((todo) => move |_| {\n\n todo.set(Todo {\n\n completed: !todo.get().completed,\n\n ..todo.get().as_ref().clone()\n", "file_path": "examples/todomvc/src/item.rs", "rank": 10, "score": 188369.50468892604 }, { "content": "/// Instantiate a component as a [`View`].\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore::prelude::*;\n\n/// # use sycamore::builder::html::*;\n\n/// #[component(MyComponent<G>)]\n\n/// fn my_component() -> View<G> {\n\n/// h1().text(\"I am a component\").build()\n\n/// }\n\n///\n\n/// // Elsewhere in another component.\n\n/// # fn view<G: Html>() -> View<G> {\n\n/// component::<_, MyComponent<_>>(())\n\n/// # }\n\n/// ```\n\npub fn component<G, C>(props: C::Props) -> View<G>\n\nwhere\n\n G: GenericNode + Html,\n\n C: Component<G>,\n\n{\n\n C::__create_component(props)\n\n}\n\n\n", "file_path": "packages/sycamore/src/builder/agnostic/mod.rs", "rank": 11, "score": 184006.48694134058 }, { "content": "pub fn ident_start(s: &str) -> IResult<&str, &str> {\n\n verify(take(1usize), |c: &str| {\n\n let c = c.chars().next().unwrap();\n\n c == '_' || unicode_xid::UnicodeXID::is_xid_start(c)\n\n })(s)\n\n}\n\n\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 12, "score": 182523.717879967 }, { "content": "pub fn ident_continue(s: &str) -> IResult<&str, &str> {\n\n verify(take(1usize), |c: &str| {\n\n unicode_xid::UnicodeXID::is_xid_continue(c.chars().next().unwrap())\n\n })(s)\n\n}\n\n\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 13, "score": 182523.717879967 }, { "content": "/// Run the passed closure inside an untracked dependency scope.\n\n///\n\n/// This does **NOT** create a new [`ReactiveScope`].\n\n///\n\n/// See also [`StateHandle::get_untracked()`].\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use sycamore_reactive::*;\n\n///\n\n/// let state = Signal::new(1);\n\n///\n\n/// let double = create_memo({\n\n/// let state = state.clone();\n\n/// move || untrack(|| *state.get() * 2)\n\n/// });\n\n///\n\n/// assert_eq!(*double.get(), 2);\n\n///\n\n/// state.set(2);\n\n/// // double value should still be old value because state was untracked\n\n/// assert_eq!(*double.get(), 2);\n\n/// ```\n\npub fn untrack<T>(f: impl FnOnce() -> T) -> T {\n\n let f = Rc::new(RefCell::new(Some(f)));\n\n let g = Rc::clone(&f);\n\n\n\n // Do not panic if running inside destructor.\n\n if let Ok(ret) = LISTENERS.try_with(|listeners| {\n\n let tmp = listeners.take();\n\n\n\n let ret = f.take().unwrap_throw()();\n\n\n\n *listeners.borrow_mut() = tmp;\n\n\n\n ret\n\n }) {\n\n ret\n\n } else {\n\n g.take().unwrap_throw()()\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 14, "score": 182219.08184624556 }, { "content": "pub fn node_impl(html: Element) -> TokenStream {\n\n html.to_token_stream()\n\n}\n", "file_path": "packages/sycamore-macro/src/view/mod.rs", "rank": 15, "score": 180454.02599676646 }, { "content": "#[must_use = \"create_scope returns the reactive scope of the effects created inside this scope\"]\n\npub fn create_scope<'a>(callback: impl FnOnce() + 'a) -> ReactiveScope {\n\n _create_child_scope_in(None, Box::new(callback))\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/lib.rs", "rank": 16, "score": 180139.17574078968 }, { "content": "#[must_use = \"create_root returns the reactive scope of the effects created inside this scope\"]\n\npub fn create_root<'a>(callback: impl FnOnce() + 'a) -> ReactiveScope {\n\n _create_child_scope_in(None, Box::new(callback))\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/lib.rs", "rank": 17, "score": 180139.17574078968 }, { "content": "/// Render a [`View`] under a `parent` node by reusing existing nodes (client side\n\n/// hydration). Alias for [`hydrate_to`] with `parent` being the `<body>` tag.\n\n///\n\n/// For rendering without hydration, use [`render`] instead.\n\n///\n\n/// **TODO**: This method currently deletes existing nodes from DOM and reinserts new\n\n/// created nodes. This will be fixed in a later release.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn hydrate(template: impl FnOnce() -> View<DomNode>) {\n\n let window = web_sys::window().unwrap_throw();\n\n let document = window.document().unwrap_throw();\n\n\n\n hydrate_to(template, &document.body().unwrap_throw());\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 18, "score": 179371.29120331112 }, { "content": "/// Render a [`View`] into the DOM.\n\n/// Alias for [`render_to`] with `parent` being the `<body>` tag.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn render(template: impl FnOnce() -> View<DomNode>) {\n\n let window = web_sys::window().unwrap_throw();\n\n let document = window.document().unwrap_throw();\n\n\n\n render_to(template, &document.body().unwrap_throw());\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 19, "score": 179365.3953899047 }, { "content": "/// Render a [`View`] into a static [`String`]. Useful\n\n/// for rendering to a string on the server side.\n\n///\n\n/// _This API requires the following crate features to be activated: `ssr`_\n\npub fn render_to_string(template: impl FnOnce() -> View<SsrNode>) -> String {\n\n let mut ret = String::new();\n\n let _scope = create_root(|| {\n\n for node in template().flatten() {\n\n node.write_to_string(&mut ret);\n\n }\n\n });\n\n\n\n ret\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::prelude::*;\n\n\n\n #[test]\n\n fn render_hello_world() {\n\n assert_eq!(\n\n render_to_string(|| view! {\n", "file_path": "packages/sycamore/src/generic_node/ssr_node.rs", "rank": 20, "score": 172504.1483008088 }, { "content": "/// Render a [`View`] under a `parent` node by reusing existing nodes (client side\n\n/// hydration). For rendering under the `<body>` tag, use [`hydrate_to`] instead.\n\n///\n\n/// For rendering without hydration, use [`render`] instead.\n\n///\n\n/// **TODO**: This method currently deletes existing nodes from DOM and reinserts new\n\n/// created nodes. This will be fixed in a later release.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn hydrate_to(template: impl FnOnce() -> View<DomNode>, parent: &Node) {\n\n for child in get_children(parent.unchecked_ref()) {\n\n child.remove();\n\n }\n\n\n\n let scope = create_root(|| {\n\n insert(\n\n &DomNode {\n\n id: Default::default(),\n\n node: parent.clone(),\n\n },\n\n template(),\n\n None,\n\n None, // TODO\n\n false,\n\n );\n\n });\n\n\n\n thread_local! {\n\n static GLOBAL_SCOPES: std::cell::RefCell<Vec<ReactiveScope>> = std::cell::RefCell::new(Vec::new());\n\n }\n\n\n\n GLOBAL_SCOPES.with(|global_scopes| global_scopes.borrow_mut().push(scope));\n\n}\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 21, "score": 169605.51743827903 }, { "content": "/// Render a [`View`] under a `parent` node.\n\n/// For rendering under the `<body>` tag, use [`render`] instead.\n\n///\n\n/// _This API requires the following crate features to be activated: `dom`_\n\npub fn render_to(template: impl FnOnce() -> View<DomNode>, parent: &Node) {\n\n let scope = create_root(|| {\n\n insert(\n\n &DomNode {\n\n id: Default::default(),\n\n node: parent.clone(),\n\n },\n\n template(),\n\n None,\n\n None,\n\n false,\n\n );\n\n });\n\n\n\n thread_local! {\n\n static GLOBAL_SCOPES: std::cell::RefCell<Vec<ReactiveScope>> = std::cell::RefCell::new(Vec::new());\n\n }\n\n\n\n GLOBAL_SCOPES.with(|global_scopes| global_scopes.borrow_mut().push(scope));\n\n}\n\n\n", "file_path": "packages/sycamore/src/generic_node/dom_node.rs", "rank": 22, "score": 169605.2752897476 }, { "content": "#[component(Component<G>)]\n\npub fn component() -> View<G> {\n\n view! {\n\n div\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/view/component-pass.rs", "rank": 23, "score": 168736.64051895888 }, { "content": "#[component(Comp8<G>)]\n\nfn comp8(one: (), two: ()) -> View<G> {\n\n todo!();\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/component/component-fail.rs", "rank": 24, "score": 166943.36251542915 }, { "content": "/// Creates a new [`ReactiveScope`] with a context and runs the supplied callback function.\n\npub fn create_context_scope<T: 'static, Out>(value: T, f: impl FnOnce() -> Out) -> Out {\n\n SCOPES.with(|scopes| {\n\n // Create a new ReactiveScope with a context.\n\n let scope = ReactiveScope::new();\n\n scope.0.borrow_mut().context = Some(Box::new(Context { value }));\n\n scopes.borrow_mut().push(scope);\n\n let out = f();\n\n let scope = scopes.borrow_mut().pop().unwrap_throw();\n\n on_cleanup(move || drop(scope));\n\n out\n\n })\n\n}\n", "file_path": "packages/sycamore-reactive/src/context.rs", "rank": 25, "score": 165942.42379478965 }, { "content": "#[component(Portal<G>)]\n\npub fn portal(props: PortalProps<G>) -> View<G> {\n\n let PortalProps { children, selector } = props;\n\n\n\n if G::IS_BROWSER {\n\n let window = web_sys::window().unwrap_throw();\n\n let document = window.document().unwrap_throw();\n\n let container = document\n\n .query_selector(selector)\n\n .unwrap_throw()\n\n .expect_throw(\"could not find element matching selector\");\n\n\n\n let children = children.flatten();\n\n\n\n for child in &children {\n\n container\n\n .append_child(\n\n &<dyn Any>::downcast_ref::<DomNode>(child)\n\n .unwrap_throw()\n\n .inner_element(),\n\n )\n", "file_path": "packages/sycamore/src/portal.rs", "rank": 26, "score": 159765.0615925096 }, { "content": "/// An alternative to [`Signal::new`] that uses a reducer to get the next value.\n\n///\n\n/// It uses a reducer function that takes the previous value and a message and returns the next\n\n/// value.\n\n///\n\n/// Returns a [`ReadSignal`] and a dispatch function to send messages to the reducer.\n\n///\n\n/// # Params\n\n/// * `initial` - The initial value of the state.\n\n/// * `reducer` - A function that takes the previous value and a message and returns the next value.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore_reactive::*;\n\n///\n\n/// enum Msg {\n\n/// Increment,\n\n/// Decrement,\n\n/// }\n\n///\n\n/// let (state, dispatch) = create_reducer(0, |state, msg: Msg| match msg {\n\n/// Msg::Increment => *state + 1,\n\n/// Msg::Decrement => *state - 1,\n\n/// });\n\n///\n\n/// assert_eq!(*state.get(), 0);\n\n/// dispatch(Msg::Increment);\n\n/// assert_eq!(*state.get(), 1);\n\n/// dispatch(Msg::Decrement);\n\n/// assert_eq!(*state.get(), 0);\n\n/// ```\n\npub fn create_reducer<F, Out, Msg>(initial: Out, reduce: F) -> (ReadSignal<Out>, Rc<impl Fn(Msg)>)\n\nwhere\n\n F: Fn(&Out, Msg) -> Out,\n\n{\n\n let memo = Signal::new(initial);\n\n\n\n let dispatcher = {\n\n let memo = memo.clone();\n\n move |msg| {\n\n memo.set(reduce(&memo.get_untracked(), msg));\n\n }\n\n };\n\n\n\n (memo.into_handle(), Rc::new(dispatcher))\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 27, "score": 159379.82421845023 }, { "content": "/// Parse a Rust identifier. Reference: https://doc.rust-lang.org/reference/identifiers.html\n\nfn ident(i: &str) -> IResult<&str, &str> {\n\n recognize(pair(ident_start, many0(ident_continue)))(i)\n\n}\n\n\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 28, "score": 156332.0965930366 }, { "content": "fn param(i: &str) -> IResult<&str, &str> {\n\n take_till(|c| c == '/')(i)\n\n}\n\n\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 29, "score": 156326.89533079637 }, { "content": "#[component(NumberDisplayer<G>)]\n\nfn number_displayer(prop: i32) -> View<G> {\n\n view! {\n\n p { \"My number is: \" (prop) }\n\n }\n\n}\n\n\n", "file_path": "examples/higher-order-components/src/main.rs", "rank": 30, "score": 156191.88086322855 }, { "content": "/// Reconciles an array of nodes.\n\n///\n\n/// # Params\n\n/// * `parent` - The parent node under which all other nodes are (direct) children.\n\n/// * `a` - The current/existing nodes that are to be diffed.\n\n/// * `b` - The new nodes that are to be inserted. After the reconciliation, all the nodes in `b`\n\n/// should be inserted under `parent`.\n\n///\n\n/// # Panics\n\n/// Panics if `a.is_empty()`. Append nodes instead.\n\npub fn reconcile_fragments<G: GenericNode>(parent: &G, a: &mut [G], b: &[G]) {\n\n debug_assert!(!a.is_empty(), \"a cannot be empty\");\n\n\n\n // Sanity check: make sure all nodes in a are children of parent.\n\n #[cfg(debug_assertions)]\n\n {\n\n for (i, node) in a.iter().enumerate() {\n\n if node.parent_node().as_ref() != Some(parent) {\n\n panic!(\n\n \"node {} in existing nodes Vec is not a child of parent. node = {:#?}\",\n\n i, node\n\n );\n\n }\n\n }\n\n }\n\n\n\n let b_len = b.len();\n\n let mut a_end = a.len();\n\n let mut b_end = b_len;\n\n let mut a_start = 0;\n", "file_path": "packages/sycamore/src/utils/render.rs", "rank": 31, "score": 155867.08489982173 }, { "content": "/// Creates a memoized value from some signals. Also know as \"derived stores\".\n\n/// Unlike [`create_memo`], this function will not notify dependents of a change if the output is\n\n/// the same.\n\n///\n\n/// It takes a comparison function to compare the old and new value, which returns `true` if they\n\n/// are the same and `false` otherwise.\n\n///\n\n/// To use the type's [`PartialEq`] implementation instead of a custom function, use\n\n/// [`create_selector`].\n\npub fn create_selector_with<F, Out, C>(mut derived: F, comparator: C) -> ReadSignal<Out>\n\nwhere\n\n F: FnMut() -> Out + 'static,\n\n Out: 'static,\n\n C: Fn(&Out, &Out) -> bool + 'static,\n\n{\n\n let memo = Rc::new(RefCell::new(None::<Signal<Out>>));\n\n\n\n create_effect({\n\n let memo = Rc::clone(&memo);\n\n move || {\n\n if memo.borrow().as_ref().is_some() {\n\n let memo = memo.borrow();\n\n let memo = memo.as_ref().unwrap_throw();\n\n let new_value = derived();\n\n if !comparator(&memo.get_untracked(), &new_value) {\n\n memo.set(new_value);\n\n }\n\n } else {\n\n *memo.borrow_mut() = Some(Signal::new(derived()));\n\n }\n\n }\n\n });\n\n\n\n let memo = memo.borrow();\n\n memo.as_ref().unwrap_throw().handle()\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 32, "score": 151755.72724529565 }, { "content": "/// Get the value of a context in the current [`ReactiveScope`].\n\n///\n\n/// # Panics\n\n/// This function will `panic!` if the context is not found in the current scope or a parent scope.\n\npub fn use_context<T: Clone + 'static>() -> T {\n\n SCOPES.with(|scopes| {\n\n let scopes = scopes.borrow();\n\n let mut current = scopes.last().map(|s| Rc::clone(&s.0));\n\n match current {\n\n Some(_) => {\n\n while let Some(scope) = &current {\n\n if let Some(context) = &scope.borrow().context {\n\n if let Some(value) = context.get_value().downcast_ref::<T>() {\n\n return value.clone();\n\n }\n\n }\n\n current = current.unwrap_throw().borrow().parent.0.upgrade();\n\n }\n\n panic!(\"context not found for type\")\n\n }\n\n None => panic!(\"context not found for type\"),\n\n }\n\n })\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/context.rs", "rank": 33, "score": 151203.16500699363 }, { "content": "/// Create a [`View`] from an array of [`View`].\n\n///\n\n/// # Example\n\n/// ```\n\n/// # use sycamore::prelude::*;\n\n/// # use sycamore::builder::html::*;\n\n/// # fn _test<G: GenericNode>() -> View<G> {\n\n/// fragment([\n\n/// div().build(),\n\n/// div().build()\n\n/// ])\n\n/// # }\n\n/// ```\n\npub fn fragment<G, const N: usize>(parts: [View<G>; N]) -> View<G>\n\nwhere\n\n G: GenericNode,\n\n{\n\n View::new_fragment(Vec::from_iter(parts.to_vec()))\n\n}\n\n\n\n/// The main type powering the builder API.\n\n#[derive(Debug)]\n\npub struct NodeBuilder<G>\n\nwhere\n\n G: GenericNode,\n\n{\n\n element: G,\n\n}\n\n\n\nimpl<G> NodeBuilder<G>\n\nwhere\n\n G: GenericNode,\n\n{\n", "file_path": "packages/sycamore/src/builder/agnostic/mod.rs", "rank": 34, "score": 150023.47267937986 }, { "content": "pub fn route(i: &str) -> IResult<&str, RoutePathAst> {\n\n map(separated_list0(tag(\"/\"), segment), |segments| {\n\n let segments = segments\n\n .into_iter()\n\n .filter(|x| !matches!(x, SegmentAst::Param(param) if param.is_empty()))\n\n .collect();\n\n RoutePathAst { segments }\n\n })(i)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use expect_test::{expect, Expect};\n\n\n\n use super::*;\n\n\n\n fn check(input: &str, expect: Expect) {\n\n let actual = format!(\"{:#?}\", route(input).unwrap());\n\n expect.assert_eq(&actual);\n\n }\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 35, "score": 144649.47469487353 }, { "content": "#[component(Router<G>)]\n\npub fn router<R, F>(props: RouterProps<R, F, G>) -> View<G>\n\nwhere\n\n R: Route + 'static,\n\n F: FnOnce(ReadSignal<R>) -> View<G> + 'static,\n\n{\n\n let RouterProps {\n\n render,\n\n integration,\n\n _phantom,\n\n } = props;\n\n let render = Rc::new(RefCell::new(Some(render)));\n\n let integration = Rc::new(integration);\n\n let base_pathname = base_pathname();\n\n\n\n PATHNAME.with(|pathname| {\n\n assert!(pathname.borrow().is_none());\n\n // Get initial url from window.location.\n\n let path = integration.current_pathname();\n\n let path = path.strip_prefix(&base_pathname).unwrap_or(&path);\n\n *pathname.borrow_mut() = Some(Signal::new(path.to_string()));\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 36, "score": 143186.03705005132 }, { "content": "#[component(ContextProvider<G>)]\n\npub fn context_provider<T, F>(props: ContextProviderProps<T, F, G>) -> View<G>\n\nwhere\n\n T: 'static,\n\n F: FnOnce() -> View<G>,\n\n{\n\n let ContextProviderProps { value, children } = props;\n\n\n\n create_context_scope(value, children)\n\n}\n\n\n\n#[cfg(all(test, feature = \"ssr\"))]\n\nmod tests {\n\n use super::*;\n\n use sycamore_reactive::use_context;\n\n\n\n #[test]\n\n fn basic_context() {\n\n sycamore::render_to_string(|| {\n\n view! {\n\n ContextProvider(ContextProviderProps {\n", "file_path": "packages/sycamore/src/context.rs", "rank": 37, "score": 141441.95848752774 }, { "content": "#[component(Comp2<G>)]\n\nfn comp2(_props: ()) -> View<G> {\n\n todo!();\n\n}\n\n\n", "file_path": "packages/sycamore-macro/tests/component/component-pass.rs", "rank": 38, "score": 140904.0397895042 }, { "content": "#[component(StaticRouter<G>)]\n\npub fn static_router<R, F>(props: StaticRouterProps<R, F, G>) -> View<G>\n\nwhere\n\n R: Route + 'static,\n\n F: Fn(R) -> View<G> + 'static,\n\n{\n\n let StaticRouterProps {\n\n render,\n\n route,\n\n _phantom,\n\n } = props;\n\n\n\n render(route)\n\n}\n\n\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 39, "score": 139755.1803994487 }, { "content": "#[component(Indexed<G>)]\n\npub fn indexed<T: 'static, F: 'static>(props: IndexedProps<T, F, G>) -> View<G>\n\nwhere\n\n T: Clone + PartialEq,\n\n F: Fn(T) -> View<G>,\n\n{\n\n let IndexedProps { iterable, template } = props;\n\n\n\n let mut mapped = map_indexed(iterable, move |x| template(x.clone()));\n\n View::new_dyn(move || View::new_fragment(mapped()))\n\n}\n", "file_path": "packages/sycamore/src/flow.rs", "rank": 40, "score": 138781.75176836166 }, { "content": "#[component(Content<G>)]\n\npub fn content(\n\n ContentProps {\n\n data: MarkdownPage { html, outline },\n\n sidebar,\n\n }: ContentProps,\n\n) -> View<G> {\n\n let show_sidebar = sidebar.is_some();\n\n\n\n let sidebar_version = sidebar.as_ref().map(|x| x.0.clone());\n\n\n\n view! {\n\n div(class=\"flex w-full\") {\n\n (if show_sidebar {\n\n view! {\n\n div(class=\"flex-none\") {\n\n crate::sidebar::Sidebar(sidebar.clone().unwrap())\n\n }\n\n }\n\n } else {\n\n view! {}\n", "file_path": "website/src/content.rs", "rank": 41, "score": 138268.45483989007 }, { "content": "/// Internal implementation: use dynamic dispatch to reduce code bloat.\n\nfn _create_effect(mut effect: Box<dyn FnMut()>) {\n\n let listener: Rc<RefCell<Option<Listener>>> = Rc::new(RefCell::new(None));\n\n\n\n // Callback for when the effect's dependencies are triggered.\n\n let callback: Rc<RefCell<dyn FnMut()>> = Rc::new(RefCell::new({\n\n let listener = Rc::downgrade(&listener);\n\n move || {\n\n LISTENERS.with(|listeners| {\n\n // Record initial context size to verify that it is the same after.\n\n let initial_context_size = listeners.borrow().len();\n\n\n\n // Upgrade running now to make sure running is valid for the whole duration of\n\n // the effect.\n\n let listener = listener.upgrade().unwrap_throw();\n\n\n\n // Push new reactive scope.\n\n listeners.borrow_mut().push(Rc::downgrade(&listener));\n\n\n\n let mut listener_mut = listener.borrow_mut();\n\n let listener_ref = listener_mut.as_mut().unwrap_throw();\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 42, "score": 137308.4407504286 }, { "content": "#[component(Comp9<G>)]\n\nstruct AStruct;\n\n\n", "file_path": "packages/sycamore-macro/tests/component/component-fail.rs", "rank": 43, "score": 136749.29470402503 }, { "content": "fn assert_cleanup_called(f: impl FnOnce()) {\n\n CLEANUP_CALLED.with(|cleanup_called| {\n\n assert!(!cleanup_called.get());\n\n f();\n\n assert!(cleanup_called.get());\n\n\n\n cleanup_called.set(false); // Reset for next test\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore/tests/web/cleanup.rs", "rank": 44, "score": 135692.7154321054 }, { "content": "#[component(EnhancedComponent<G>)]\n\nfn enhanced_component<C: Component<G, Props = i32>>() -> View<G> {\n\n view! {\n\n div(class=\"enhanced-container\") {\n\n p { \"Enhanced container start\" }\n\n C(42)\n\n p { \"Enhanced container end\" }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/higher-order-components/src/main.rs", "rank": 45, "score": 134251.46157729885 }, { "content": "#[wasm_bindgen_test]\n\npub fn test_cleanup_in_root() {\n\n let root = create_root(|| {\n\n on_cleanup(on_cleanup_callback);\n\n });\n\n\n\n assert_cleanup_called(|| {\n\n drop(root);\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore/tests/web/cleanup.rs", "rank": 46, "score": 130870.24489535722 }, { "content": "#[wasm_bindgen_test]\n\npub fn test_cleanup_in_effect() {\n\n let trigger = Signal::new(());\n\n\n\n create_effect(cloned!((trigger) => move || {\n\n trigger.get();\n\n on_cleanup(on_cleanup_callback);\n\n }));\n\n\n\n assert_cleanup_called(|| {\n\n trigger.set(());\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore/tests/web/cleanup.rs", "rank": 47, "score": 130870.24489535722 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n println!(\"cargo:rerun-if-changed=next\");\n\n println!(\"cargo:rerun-if-changed=versioned_docs\");\n\n println!(\"cargo:rerun-if-changed=posts\");\n\n\n\n // Sitemap.\n\n generate_sitemap_xml()?;\n\n\n\n // Markdown files.\n\n build_dir(Path::new(\"./next\"), Path::new(\"docs\"))?;\n\n build_dir(Path::new(\"./versioned_docs\"), Path::new(\"docs\"))?;\n\n build_dir(Path::new(\"./posts\"), Path::new(\"posts\"))?;\n\n\n\n // Docs sidebars.\n\n let next_sidebar = fs::read_to_string(\"./next/sidebar.json\")?;\n\n fs::write(\"../website/static/docs/sidebar.json\", next_sidebar)?;\n\n for entry in WalkDir::new(\"./versioned_docs\") {\n\n let entry = entry?;\n\n if entry.path().file_name() == Some(OsStr::new(\"sidebar.json\")) {\n", "file_path": "docs/build.rs", "rank": 48, "score": 128276.27526586986 }, { "content": "#[component(Header<G>)]\n\npub fn header() -> View<G> {\n\n view! {\n\n header(class=\"fixed top-0 z-50 w-full\") {\n\n Nav()\n\n }\n\n }\n\n}\n", "file_path": "website/src/header.rs", "rank": 49, "score": 128004.20327947737 }, { "content": "#[component(Versions<G>)]\n\npub fn versions() -> View<G> {\n\n web_sys::window()\n\n .unwrap()\n\n .document()\n\n .unwrap()\n\n .set_title(\"Versions - Sycamore\");\n\n\n\n let versions = VERSIONS\n\n .iter()\n\n .map(|(name, versioned_docs_link)| {\n\n view! {\n\n li {\n\n h2(class=\"text-2xl font-light\") { (name) }\n\n div(class=\"flex flex-col divide-y dark:divide-gray-500 text-gray-600 dark:text-gray-300\") {\n\n VersionedDocsLinkView((name, versioned_docs_link))\n\n }\n\n }\n\n }\n\n })\n\n .collect();\n", "file_path": "website/src/versions.rs", "rank": 50, "score": 128004.20327947737 }, { "content": "#[component(Index<G>)]\n\npub fn index() -> View<G> {\n\n web_sys::window()\n\n .unwrap()\n\n .document()\n\n .unwrap()\n\n .set_title(\"Sycamore\");\n\n\n\n view! {\n\n div(class=\"pb-10\") {\n\n div(class=\"flex flex-col items-center w-full mb-10\") {\n\n h1(class=\"text-5xl font-bold mt-20 mb-5\") {\n\n \"Sycamore\"\n\n }\n\n\n\n p(class=\"mb-5 text-center\") {\n\n \"A reactive library for creating web apps in Rust and WebAssembly\"\n\n }\n\n\n\n // region: badges\n\n div(class=\"mb-7 flex flex-row flex-wrap justify-center gap-1\") {\n", "file_path": "website/src/index.rs", "rank": 51, "score": 128004.20327947737 }, { "content": "pub fn create_child_scope_in<'a>(\n\n parent: Option<&ReactiveScopeWeak>,\n\n callback: impl FnOnce() + 'a,\n\n) -> ReactiveScope {\n\n _create_child_scope_in(parent, Box::new(callback))\n\n}\n\n\n\n/// Creates a new reactive root / scope. Generally, you won't need this method as it is called\n\n/// automatically in `render`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore_reactive::*;\n\n///\n\n/// let trigger = Signal::new(());\n\n/// let counter = Signal::new(0);\n\n///\n\n/// let scope = create_root(cloned!((trigger, counter) => move || {\n\n/// create_effect(move || {\n\n/// trigger.get(); // subscribe to trigger\n", "file_path": "packages/sycamore-reactive/src/lib.rs", "rank": 52, "score": 127478.14979043292 }, { "content": "fn dyn_param(i: &str) -> IResult<&str, &str> {\n\n delimited(tag(\"<\"), ident, tag(\">\"))(i)\n\n}\n\n\n", "file_path": "packages/sycamore-router-macro/src/parser.rs", "rank": 53, "score": 126256.59276098566 }, { "content": "#[component(Header<G>)]\n\npub fn header() -> View<G> {\n\n let app_state = use_context::<AppState>();\n\n let value = Signal::new(String::new());\n\n\n\n let handle_submit = cloned!((app_state, value) => move |event: Event| {\n\n let event: KeyboardEvent = event.unchecked_into();\n\n\n\n if event.key() == \"Enter\" {\n\n let mut task = value.get().as_ref().clone();\n\n task = task.trim().to_string();\n\n\n\n if !task.is_empty() {\n\n app_state.add_todo(task);\n\n value.set(\"\".to_string());\n\n }\n\n }\n\n });\n\n\n\n view! {\n\n header(class=\"header\") {\n\n h1 { \"todos\" }\n\n input(class=\"new-todo\",\n\n placeholder=\"What needs to be done?\",\n\n bind:value=value,\n\n on:keyup=handle_submit,\n\n )\n\n }\n\n }\n\n}\n", "file_path": "examples/todomvc/src/header.rs", "rank": 54, "score": 126202.08772525155 }, { "content": "#[component(Controls<G>)]\n\npub fn controls() -> View<G> {\n\n let counter = use_context::<Signal<i32>>();\n\n\n\n let increment = cloned!((counter) => move |_| counter.set(*counter.get() + 1));\n\n\n\n let reset = cloned!((counter) => move |_| counter.set(0));\n\n\n\n view! {\n\n button(class=\"increment\", on:click=increment) {\n\n \"Increment\"\n\n }\n\n button(class=\"reset\", on:click=reset) {\n\n \"Reset\"\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/context/src/main.rs", "rank": 55, "score": 126202.08772525155 }, { "content": "#[component(Copyright<G>)]\n\npub fn copyright() -> View<G> {\n\n view! {\n\n footer(class=\"info\") {\n\n p { \"Double click to edit a todo\" }\n\n p {\n\n \"Created by \"\n\n a(href=\"https://github.com/lukechu10\", target=\"_blank\") { \"lukechu10\" }\n\n }\n\n p {\n\n \"Part of \"\n\n a(href=\"http://todomvc.com\") { \"TodoMVC\" }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/todomvc/src/copyright.rs", "rank": 56, "score": 126202.08772525155 }, { "content": "#[component(Footer<G>)]\n\npub fn footer() -> View<G> {\n\n let app_state = use_context::<AppState>();\n\n\n\n let items_text = cloned!((app_state) => move || {\n\n match app_state.todos_left() {\n\n 1 => \"item\",\n\n _ => \"items\"\n\n }\n\n });\n\n\n\n let has_completed_todos = create_selector(cloned!((app_state) => move || {\n\n app_state.todos_left() < app_state.todos.get().len()\n\n }));\n\n\n\n let handle_clear_completed = cloned!((app_state) => move |_| {\n\n app_state.clear_completed()\n\n });\n\n\n\n view! {\n\n footer(class=\"footer\") {\n", "file_path": "examples/todomvc/src/footer.rs", "rank": 57, "score": 126202.08772525155 }, { "content": "#[component(List<G>)]\n\npub fn list() -> View<G> {\n\n let app_state = use_context::<AppState>();\n\n let todos_left = create_selector(cloned!((app_state) => move || {\n\n app_state.todos_left()\n\n }));\n\n\n\n let filtered_todos = create_memo(cloned!((app_state) => move || {\n\n app_state.todos.get().iter().filter(|todo| match *app_state.filter.get() {\n\n Filter::All => true,\n\n Filter::Active => !todo.get().completed,\n\n Filter::Completed => todo.get().completed,\n\n }).cloned().collect::<Vec<_>>()\n\n }));\n\n\n\n // We need a separate signal for checked because clicking the checkbox will detach the binding\n\n // between the attribute and the view.\n\n let checked = Signal::new(false);\n\n create_effect(cloned!((checked) => move || {\n\n // Calling checked.set will also update the `checked` property on the input element.\n\n checked.set(*todos_left.get() == 0)\n", "file_path": "examples/todomvc/src/list.rs", "rank": 58, "score": 126202.08772525155 }, { "content": "fn generate_sitemap_xml() -> Result<(), Box<dyn Error>> {\n\n let out_path = Path::new(\"../website/sitemap_index.xml\");\n\n\n\n let mut buf = String::new();\n\n writeln!(buf, r#\"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\"#)?;\n\n writeln!(\n\n buf,\n\n r#\"<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">\"#\n\n )?;\n\n\n\n write_url(&mut buf, \"\", \"monthly\", \"1.0\")?;\n\n write_url(&mut buf, \"/news\", \"monthly\", \"0.8\")?;\n\n write_url(&mut buf, \"/versions\", \"monthly\", \"0.3\")?;\n\n\n\n // News\n\n generate_sitemap_for_dir(&mut buf, \"/news\", Path::new(\"posts\"), \"yearly\", \"0.8\")?;\n\n\n\n // Docs for master\n\n generate_sitemap_for_dir(&mut buf, \"/docs\", Path::new(\"./next\"), \"weekly\", \"0.5\")?;\n\n\n", "file_path": "docs/build.rs", "rank": 59, "score": 124721.00239940739 }, { "content": "/// Navigates to the specified `url`. The url should have the same origin as the app.\n\n///\n\n/// This is useful for imperatively navigating to an url when using an anchor tag (`<a>`) is not\n\n/// possible/suitable (e.g. when submitting a form).\n\n///\n\n/// # Panics\n\n/// This function will `panic!()` if a [`Router`] has not yet been created.\n\npub fn navigate(url: &str) {\n\n PATHNAME.with(|pathname| {\n\n assert!(\n\n pathname.borrow().is_some(),\n\n \"navigate can only be used with a BrowserRouter\"\n\n );\n\n\n\n let pathname = pathname.borrow().clone().unwrap_throw();\n\n let path = url.strip_prefix(&base_pathname()).unwrap_or(url);\n\n pathname.set(path.to_string());\n\n\n\n // Update History API.\n\n let window = web_sys::window().unwrap_throw();\n\n let history = window.history().unwrap_throw();\n\n history\n\n .push_state_with_url(&JsValue::UNDEFINED, \"\", Some(url))\n\n .unwrap_throw();\n\n window.scroll_to_with_x_and_y(0.0, 0.0);\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 60, "score": 124484.02081648918 }, { "content": "#[component(NewsIndex<G>)]\n\npub fn news_index() -> View<G> {\n\n web_sys::window()\n\n .unwrap()\n\n .document()\n\n .unwrap()\n\n .set_title(\"News - Sycamore\");\n\n\n\n let posts = POSTS\n\n .iter()\n\n .map(|(title, subtitle, url)| {\n\n view! {\n\n li(class=\"hover:text-yellow-500 transition-colors\") {\n\n a(href=format!(\"/news/{}\", url)) {\n\n h2(class=\"text-2xl font-light\") { (title) }\n\n p(class=\"text-gray-600 dark:text-gray-400\") { (subtitle) }\n\n }\n\n }\n\n }\n\n })\n\n .collect();\n", "file_path": "website/src/news_index.rs", "rank": 61, "score": 124480.53370008957 }, { "content": "/// Runs a callback in a `requestAnimationFrame` loop until the `callback` returns `false`.\n\npub fn loop_raf(task: Task) {\n\n TASKS.with(|tasks| {\n\n if tasks.borrow().is_empty() {\n\n run_tasks();\n\n }\n\n\n\n tasks.borrow_mut().insert(task);\n\n });\n\n}\n", "file_path": "packages/sycamore/src/utils.rs", "rank": 62, "score": 124480.32497814854 }, { "content": "pub fn circ_in(t: f32) -> f32 {\n\n 1.0 - f32::sqrt(1.0 - f32::powi(t, 2))\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 63, "score": 123521.39307363113 }, { "content": "pub fn expo_in(t: f32) -> f32 {\n\n if t.abs() <= f32::EPSILON {\n\n 0.0\n\n } else {\n\n EXP_BASE.powf(10.0 * t - 10.0)\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 64, "score": 123521.39307363113 }, { "content": "pub fn cubic_in(t: f32) -> f32 {\n\n t * t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 65, "score": 123521.39307363113 }, { "content": "pub fn sine_out(t: f32) -> f32 {\n\n f32::sin(t * PI / 2.0)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 66, "score": 123521.39307363113 }, { "content": "pub fn quint_out(t: f32) -> f32 {\n\n let f = t - 1.0;\n\n f * f * f * f * f + 1.0\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 67, "score": 123521.39307363113 }, { "content": "pub fn bounce_in(t: f32) -> f32 {\n\n 1.0 - bounce_out(1.0 - t)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 68, "score": 123521.39307363113 }, { "content": "pub fn quad_out(t: f32) -> f32 {\n\n -t * (t - 2.0)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 69, "score": 123521.39307363113 }, { "content": "pub fn sine_in(t: f32) -> f32 {\n\n 1.0 - f32::cos(t * PI / 2.0)\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 70, "score": 123521.39307363113 }, { "content": "pub fn cubic_out(t: f32) -> f32 {\n\n let f = t - 1.0;\n\n f * f * f + 1.0\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 71, "score": 123521.39307363113 }, { "content": "pub fn quint_in(t: f32) -> f32 {\n\n t * t * t * t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 72, "score": 123521.39307363113 }, { "content": "pub fn quart_in(t: f32) -> f32 {\n\n t * t * t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 73, "score": 123521.39307363113 }, { "content": "pub fn circ_out(t: f32) -> f32 {\n\n f32::sqrt(1.0 - f32::powi(t - 1.0, 2).powi(2))\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 74, "score": 123521.39307363113 }, { "content": "pub fn expo_out(t: f32) -> f32 {\n\n if (t - 1.0).abs() <= f32::EPSILON {\n\n 1.0\n\n } else {\n\n 1.0 - EXP_BASE.powf(-10.0 * t)\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 75, "score": 123521.39307363113 }, { "content": "pub fn quart_out(t: f32) -> f32 {\n\n let f = t - 1.0;\n\n f * f * f * (1.0 - t) + 1.0\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 76, "score": 123521.39307363113 }, { "content": "pub fn bounce_out(t: f32) -> f32 {\n\n // TODO: Refactor? Code seems like a repetition.\n\n // Further, it is unclear why the numbers here are\n\n // picked.\n\n if t < 1.0 / BOUNCE_GRAVITY {\n\n BOUNCE_AMPLITUDE * t * t\n\n } else if t < 2.0 / BOUNCE_GRAVITY {\n\n let t = t - 1.5 / BOUNCE_GRAVITY;\n\n BOUNCE_AMPLITUDE * t * t + 0.75\n\n } else if t < 2.5 / BOUNCE_GRAVITY {\n\n let t = t - 2.25 / BOUNCE_GRAVITY;\n\n BOUNCE_AMPLITUDE * t * t + 0.9375\n\n } else {\n\n let t = t - 2.625 / BOUNCE_GRAVITY;\n\n BOUNCE_AMPLITUDE * t * t + 0.984375\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 77, "score": 123521.39307363113 }, { "content": "pub fn linear(t: f32) -> f32 {\n\n t\n\n}\n\n\n\n// Quadratic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 78, "score": 123521.39307363113 }, { "content": "pub fn quad_in(t: f32) -> f32 {\n\n t * t\n\n}\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 79, "score": 123521.39307363113 }, { "content": "/// Function that maps a `Vec` to another `Vec` via a map function. The mapped `Vec` is lazy\n\n/// computed, meaning that it's value will only be updated when requested. Modifications to the\n\n/// input `Vec` are diffed by index to prevent recomputing values that have not changed.\n\n///\n\n/// Generally, it is preferred to use [`map_keyed`] instead when a key function is available.\n\n///\n\n/// This function is the underlying utility behind `Indexed`.\n\n///\n\n/// # Params\n\n/// * `list` - The list to be mapped. The list must be a [`ReadSignal`] (obtained from a [`Signal`])\n\n/// and therefore reactive.\n\n/// * `map_fn` - A closure that maps from the input type to the output type.\n\npub fn map_indexed<T, U>(\n\n list: ReadSignal<Vec<T>>,\n\n map_fn: impl Fn(&T) -> U + 'static,\n\n) -> impl FnMut() -> Vec<U>\n\nwhere\n\n T: PartialEq + Clone,\n\n U: Clone + 'static,\n\n{\n\n let parent_scope = current_scope();\n\n\n\n // Previous state used for diffing.\n\n let mut items = Rc::new(Vec::new());\n\n let mapped = Rc::new(RefCell::new(Vec::new()));\n\n let mut scopes = Vec::new();\n\n\n\n move || {\n\n let new_items = list.get(); // Subscribe to list.\n\n untrack(|| {\n\n if new_items.is_empty() {\n\n // Fast path for removing all items.\n", "file_path": "packages/sycamore-reactive/src/iter.rs", "rank": 80, "score": 122854.87604910244 }, { "content": "/// Gets the number of dependencies of the current reactive scope.\n\n///\n\n/// If the function is called outside a reactive scope, it will return `None`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore_reactive::*;\n\n///\n\n/// assert_eq!(dependency_count(), None);\n\n///\n\n/// let state = Signal::new(1);\n\n/// create_effect(move || {\n\n/// assert_eq!(dependency_count(), Some(0));\n\n/// state.get();\n\n/// assert_eq!(dependency_count(), Some(1));\n\n/// });\n\n/// ```\n\npub fn dependency_count() -> Option<usize> {\n\n LISTENERS.with(|listeners| {\n\n listeners.borrow().last().map(|last_context| {\n\n last_context\n\n .upgrade()\n\n .expect_throw(\"Running should be valid while inside reactive scope\")\n\n .borrow()\n\n .as_ref()\n\n .unwrap_throw()\n\n .dependencies\n\n .len()\n\n })\n\n })\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 81, "score": 122842.03740185009 }, { "content": "/// Navigates to the specified `url` without adding a new history entry. Instead, this replaces the\n\n/// current location with the new `url`. The url should have the same origin as the app.\n\n///\n\n/// This is useful for imperatively navigating to an url when using an anchor tag (`<a>`) is not\n\n/// possible/suitable (e.g. when submitting a form).\n\n///\n\n/// # Panics\n\n/// This function will `panic!()` if a [`Router`] has not yet been created.\n\npub fn navigate_replace(url: &str) {\n\n PATHNAME.with(|pathname| {\n\n assert!(\n\n pathname.borrow().is_some(),\n\n \"navigate_replace can only be used with a BrowserRouter\"\n\n );\n\n\n\n let pathname = pathname.borrow().clone().unwrap_throw();\n\n let path = url.strip_prefix(&base_pathname()).unwrap_or(url);\n\n pathname.set(path.to_string());\n\n\n\n // Update History API.\n\n let window = web_sys::window().unwrap_throw();\n\n let history = window.history().unwrap_throw();\n\n history\n\n .replace_state_with_url(&JsValue::UNDEFINED, \"\", Some(url))\n\n .unwrap_throw();\n\n window.scroll_to_with_x_and_y(0.0, 0.0);\n\n });\n\n}\n\n\n", "file_path": "packages/sycamore-router/src/router.rs", "rank": 82, "score": 122840.41305246016 }, { "content": "/// Insert a [`GenericNode`] under `parent` at the specified `marker`. If `initial` is `Some(_)`,\n\n/// `initial` will be replaced with the new inserted node.\n\n///\n\n/// # Params\n\n/// * `parent` - The parent node to insert `accessor` under.\n\n/// * `accessor` - The [`View`] to be inserted.\n\n/// * `initial` - An optional initial node that is already inserted into the DOM.\n\n/// * `marker` - An optional marker node. If `marker` is `Some(_)`, `accessor` will be inserted\n\n/// directly before `marker`. If `marker` is `None`, `accessor` will be appended at the end of\n\n/// `parent`.\n\n/// * `multi` - A boolean flag indicating whether the node to be inserted is the only child of\n\n/// `parent`. Setting this to `true` will enable certain optimizations when clearing the node.\n\n/// Even if the node to be inserted is the only child of `parent`, `multi` can still be set to\n\n/// `false` but forgoes the optimizations.\n\npub fn insert<G: GenericNode>(\n\n parent: &G,\n\n accessor: View<G>,\n\n initial: Option<View<G>>,\n\n marker: Option<&G>,\n\n multi: bool,\n\n) {\n\n insert_expression(parent, &accessor, initial, marker, false, multi);\n\n}\n\n\n", "file_path": "packages/sycamore/src/utils/render.rs", "rank": 83, "score": 122832.0686493215 }, { "content": "pub fn circ_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n (1.0 - f32::sqrt(1.0 - f32::powi(2.0 * t, 2))) / 2.0\n\n } else {\n\n (f32::sqrt(1.0 - f32::powi(-2.0 * t + 2.0, 2)) + 1.0) / 2.0\n\n }\n\n}\n\n\n\n// Exponential\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 84, "score": 121799.90763898703 }, { "content": "pub fn quad_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 2.0 * t * t\n\n } else {\n\n -2.0 * t * t + 4.0 * t - 1.0\n\n }\n\n}\n\n\n\n// Cubic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 85, "score": 121799.90763898703 }, { "content": "pub fn sine_inout(t: f32) -> f32 {\n\n -(f32::cos(PI * t) - 1.0) / 2.0\n\n}\n\n\n\n// Bounce\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 86, "score": 121799.90763898703 }, { "content": "pub fn cubic_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 4.0 * t * t * t\n\n } else {\n\n let f = 2.0 * t - 2.0;\n\n 0.5 * f * f * f + 1.0\n\n }\n\n}\n\n\n\n// Quartic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 87, "score": 121799.90763898703 }, { "content": "pub fn quart_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 8.0 * t * t * t * t\n\n } else {\n\n let f = t - 1.0;\n\n -8.0 * f * f * f * f + 1.0\n\n }\n\n}\n\n\n\n// Quintic\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 88, "score": 121799.90763898703 }, { "content": "pub fn expo_inout(t: f32) -> f32 {\n\n if t.abs() <= f32::EPSILON {\n\n 0.0\n\n } else if (t - 1.0) <= f32::EPSILON {\n\n 1.0\n\n } else if t <= 0.5 {\n\n f32::powf(EXP_BASE, 20.0 * t - 10.0) / 2.0\n\n } else {\n\n 1.0 + f32::powf(EXP_BASE, -20.0 * t + 10.0) / -2.0\n\n }\n\n}\n\n\n\n// Sine\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 89, "score": 121799.90763898703 }, { "content": "pub fn bounce_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n (1.0 - bounce_out(1.0 - 2.0 * t)) / 2.0\n\n } else {\n\n (1.0 + bounce_out(-1.0 + 2.0 * t)) / 2.0\n\n }\n\n}\n\n\n\n// TODO: add more easing functions\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n macro_rules! test_start_at_0 {\n\n ($($ease_fn:ident),*) => {\n\n paste::paste! {\n\n $(\n\n #[test]\n\n fn [<test_ease_ $ease_fn _starts_at_0>]() {\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 90, "score": 121799.90763898703 }, { "content": "pub fn quint_inout(t: f32) -> f32 {\n\n if t < 0.5 {\n\n 16.0 * t * t * t * t * t\n\n } else {\n\n let f = (2.0 * t) - 2.0;\n\n 0.5 * f * f * f * f * f + 1.0\n\n }\n\n}\n\n\n\n// Circular\n\n\n", "file_path": "packages/sycamore/src/easing.rs", "rank": 91, "score": 121799.90763898703 }, { "content": "/// Cleans the children specified by `current` from `parent`.\n\n///\n\n/// # Params\n\n/// * `parent` - The parent node from which to clean the children.\n\n/// * `current` - A [`Vec`] of [`GenericNode`]s that are to be removed.\n\n/// * `marker` - If `marker` is `None`, all the nodes from `parent` are removed regardless of\n\n/// `current`. This behavior will likely change in the future.\n\n/// * `replacement` - An optional replacement node for the removed nodes.\n\npub fn clean_children<G: GenericNode>(\n\n parent: &G,\n\n current: Vec<G>,\n\n _marker: Option<&G>,\n\n replacement: Option<&G>,\n\n multi: bool,\n\n) {\n\n if !multi {\n\n parent.update_inner_text(\"\");\n\n if let Some(replacement) = replacement {\n\n parent.append_child(replacement);\n\n }\n\n return;\n\n }\n\n\n\n for node in current {\n\n if node.parent_node().as_ref() == Some(parent) {\n\n if let Some(replacement) = replacement {\n\n parent.replace_child(&node, replacement);\n\n } else {\n\n parent.remove_child(&node);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "packages/sycamore/src/utils/render.rs", "rank": 92, "score": 121257.35230081374 }, { "content": "/// Normalizes a `Vec<Template<G>>` into a `Vec<G>`.\n\n///\n\n/// Returns whether the normalized `Vec<G>` is dynamic (and should be rendered in an effect).\n\n///\n\n/// # Params\n\n/// * `v` - The [`Vec`] to write the output to.\n\n/// * `fragment` - The `Vec<Template<G>>` to normalize.\n\n/// * `unwrap` - If `true`, unwraps the `fragment` without setting `dynamic` to true. In most cases,\n\n/// this should be `false`.\n\npub fn normalize_incoming_fragment<G: GenericNode>(\n\n v: &mut Vec<View<G>>,\n\n fragment: &[View<G>],\n\n unwrap: bool,\n\n) -> bool {\n\n let mut dynamic = false;\n\n\n\n for template in fragment {\n\n match &template.inner {\n\n ViewType::Node(_) => v.push(template.clone()),\n\n ViewType::Dyn(f) if unwrap => {\n\n let mut value = f.get().as_ref().clone();\n\n while let ViewType::Dyn(f) = &value.inner {\n\n value = f.get().as_ref().clone();\n\n }\n\n let fragment: Rc<Box<[View<G>]>> = match &value.inner {\n\n ViewType::Node(_) => Rc::new(Box::new([value])),\n\n ViewType::Fragment(fragment) => Rc::clone(fragment),\n\n _ => unreachable!(),\n\n };\n", "file_path": "packages/sycamore/src/utils/render.rs", "rank": 93, "score": 119755.57024454788 }, { "content": "/// Returns a [`ReactiveScopeWeak`] handle to the current reactive scope or `None` if outside of a\n\n/// reactive scope.\n\npub fn current_scope() -> Option<ReactiveScopeWeak> {\n\n SCOPES.with(|scope| {\n\n scope\n\n .borrow()\n\n .last()\n\n .map(|last_context| last_context.downgrade())\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::cloned;\n\n\n\n #[test]\n\n fn effects() {\n\n let state = Signal::new(0);\n\n\n\n let double = Signal::new(-1);\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 94, "score": 119748.76599907178 }, { "content": "/// Function that maps a `Vec` to another `Vec` via a map function. The mapped `Vec` is lazy\n\n/// computed, meaning that it's value will only be updated when requested. Modifications to the\n\n/// input `Vec` are diffed using keys to prevent recomputing values that have not changed.\n\n///\n\n/// This function is the underlying utility behind `Keyed`.\n\n///\n\n/// # Params\n\n/// * `list` - The list to be mapped. The list must be a [`ReadSignal`] (obtained from a [`Signal`])\n\n/// and therefore reactive.\n\n/// * `map_fn` - A closure that maps from the input type to the output type.\n\n/// * `key_fn` - A closure that returns an _unique_ key to each entry.\n\n///\n\n/// _Credits: Based on TypeScript implementation in <https://github.com/solidjs/solid>_\n\npub fn map_keyed<T, K, U>(\n\n list: ReadSignal<Vec<T>>,\n\n map_fn: impl Fn(&T) -> U + 'static,\n\n key_fn: impl Fn(&T) -> K + 'static,\n\n) -> impl FnMut() -> Vec<U>\n\nwhere\n\n T: Eq + Clone,\n\n K: Eq + Hash,\n\n U: Clone + 'static,\n\n{\n\n let parent_scope = current_scope();\n\n\n\n // Previous state used for diffing.\n\n let mut items = Rc::new(Vec::new());\n\n let mapped = Rc::new(RefCell::new(Vec::new()));\n\n let mut scopes: Vec<Option<Rc<ReactiveScope>>> = Vec::new();\n\n\n\n move || {\n\n let new_items = list.get(); // Subscribe to list.\n\n untrack(|| {\n", "file_path": "packages/sycamore-reactive/src/iter.rs", "rank": 95, "score": 118603.19592601797 }, { "content": "/// A wrapper around [`wasm_bindgen_futures::spawn_local`] that extends the current reactive scope\n\n/// that it is called in.\n\n///\n\n/// If the scope is dropped by the time the future is spawned, the callback will not be called.\n\n///\n\n/// If not on `wasm32` target arch, this function is a no-op.\n\n///\n\n/// # Panics\n\n/// This function panics if called outside of a reactive scope.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use sycamore::futures::spawn_local_in_scope;\n\n/// use sycamore::prelude::*;\n\n///\n\n/// create_root(|| {\n\n/// // Inside reactive scope.\n\n/// spawn_local_in_scope(async {\n\n/// // Still inside reactive scope.\n\n/// });\n\n/// });\n\n/// ```\n\npub fn spawn_local_in_scope<F>(future: F)\n\nwhere\n\n F: Future<Output = ()> + 'static,\n\n{\n\n if cfg!(target_arch = \"wasm32\") {\n\n if let Some(scope) = current_scope() {\n\n spawn_local(async move {\n\n scope.extend_future(future).await;\n\n });\n\n } else {\n\n panic!(\"spawn_local_in_scope called outside of reactive scope\");\n\n }\n\n }\n\n}\n", "file_path": "packages/sycamore/src/futures.rs", "rank": 96, "score": 118588.34519771469 }, { "content": "#[inline]\n\npub fn create_effect<F>(effect: F)\n\nwhere\n\n F: FnMut() + 'static,\n\n{\n\n _create_effect(Box::new(effect));\n\n}\n\n\n", "file_path": "packages/sycamore-reactive/src/effect.rs", "rank": 97, "score": 118578.10882450888 }, { "content": "#[component(TodoFilter<G>)]\n\npub fn todo_filter(filter: Filter) -> View<G> {\n\n let app_state = use_context::<AppState>();\n\n let selected = cloned!((app_state) => move || filter == *app_state.filter.get());\n\n let set_filter = cloned!((app_state) => move |filter| {\n\n app_state.filter.set(filter)\n\n });\n\n\n\n view! {\n\n li {\n\n a(\n\n class=if selected() { \"selected\" } else { \"\" },\n\n href=filter.url(),\n\n on:click=move |_| set_filter(filter),\n\n ) {\n\n (format!(\"{:?}\", filter))\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples/todomvc/src/filter.rs", "rank": 98, "score": 116182.60948432265 }, { "content": "/// Trait that is implemented by components. Should not be implemented manually. Use the\n\n/// [`component`](sycamore_macro::component) macro instead.\n\npub trait Component<G: GenericNode> {\n\n /// The name of the component (for use in debug mode). In release mode, this will default to\n\n /// `\"UnnamedComponent\"`\n\n const NAME: &'static str = \"UnnamedComponent\";\n\n /// The type of the properties passed to the component.\n\n type Props;\n\n\n\n /// Create a new component with an instance of the properties.\n\n ///\n\n /// The double underscores (`__`) are to prevent conflicts with other trait methods. This is\n\n /// because we cannot use fully qualified syntax here because it prevents type inference.\n\n fn __create_component(props: Self::Props) -> View<G>;\n\n}\n", "file_path": "packages/sycamore/src/component.rs", "rank": 99, "score": 115554.54286773891 } ]
Rust
starky/src/constraint_consumer.rs
mfaulk/plonky2
2cedd1b02a718d19115560647ba1f741eab83260
use std::marker::PhantomData; use plonky2::field::extension_field::Extendable; use plonky2::field::packed_field::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; pub struct ConstraintConsumer<P: PackedField> { alphas: Vec<P::Scalar>, pub constraint_accs: Vec<P>, z_last: P, lagrange_basis_first: P, lagrange_basis_last: P, } impl<P: PackedField> ConstraintConsumer<P> { pub fn new( alphas: Vec<P::Scalar>, z_last: P, lagrange_basis_first: P, lagrange_basis_last: P, ) -> Self { Self { constraint_accs: vec![P::ZEROS; alphas.len()], alphas, z_last, lagrange_basis_first, lagrange_basis_last, } } pub fn accumulators(self) -> Vec<P::Scalar> { self.constraint_accs .into_iter() .map(|acc| acc.as_slice()[0]) .collect() } pub fn constraint_transition(&mut self, constraint: P) { self.constraint(constraint * self.z_last); } pub fn constraint(&mut self, constraint: P) { for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { *acc *= alpha; *acc += constraint; } } pub fn constraint_first_row(&mut self, constraint: P) { self.constraint(constraint * self.lagrange_basis_first); } pub fn constraint_last_row(&mut self, constraint: P) { self.constraint(constraint * self.lagrange_basis_last); } } pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> { alphas: Vec<Target>, constraint_accs: Vec<ExtensionTarget<D>>, z_last: ExtensionTarget<D>, lagrange_basis_first: ExtensionTarget<D>, lagrange_basis_last: ExtensionTarget<D>, _phantom: PhantomData<F>, } impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> { pub fn new( zero: ExtensionTarget<D>, alphas: Vec<Target>, z_last: ExtensionTarget<D>, lagrange_basis_first: ExtensionTarget<D>, lagrange_basis_last: ExtensionTarget<D>, ) -> Self { Self { constraint_accs: vec![zero; alphas.len()], alphas, z_last, lagrange_basis_first, lagrange_basis_last, _phantom: Default::default(), } } pub fn accumulators(self) -> Vec<ExtensionTarget<D>> { self.constraint_accs } pub fn constraint_transition( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { let filtered_constraint = builder.mul_extension(constraint, self.z_last); self.constraint(builder, filtered_constraint); } pub fn constraint( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { *acc = builder.scalar_mul_add_extension(alpha, *acc, constraint); } } pub fn constraint_first_row( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first); self.constraint(builder, filtered_constraint); } pub fn constraint_last_row( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last); self.constraint(builder, filtered_constraint); } }
use std::marker::PhantomData; use plonky2::field::extension_field::Extendable; use plonky2::field::packed_field::PackedField; use plonky2::hash::hash_types::RichField; use plonky2::iop::ext_target::ExtensionTarget; use plonky2::iop::target::Target; use plonky2::plonk::circuit_builder::CircuitBuilder; pub struct ConstraintConsumer<P: PackedField> { alphas: Vec<P::Scalar>, pub constraint_accs: Vec<P>, z_last: P, lagrange_basis_first: P, lagrange_basis_last: P, } impl<P: PackedField> ConstraintConsumer<P> { pub fn new( alphas: Vec<P::Scalar>, z_last: P, lagrange_basis_first: P, lagrange_basis_last: P, ) -> Self { Self { constraint_accs: vec![P::ZEROS; alphas.len()], alphas, z_last, lagrange_basis_first, lagrange_basis_last, } } pub fn accumulators(self) -> Vec<P::Scalar> { self.constraint_accs .into_iter() .map(|acc| acc.as_slice()[0]) .collect() } pub fn constraint_transition(&mut self, constraint: P) { self.constraint(constraint * self.z_last); } pub fn c
pub fn constraint_first_row(&mut self, constraint: P) { self.constraint(constraint * self.lagrange_basis_first); } pub fn constraint_last_row(&mut self, constraint: P) { self.constraint(constraint * self.lagrange_basis_last); } } pub struct RecursiveConstraintConsumer<F: RichField + Extendable<D>, const D: usize> { alphas: Vec<Target>, constraint_accs: Vec<ExtensionTarget<D>>, z_last: ExtensionTarget<D>, lagrange_basis_first: ExtensionTarget<D>, lagrange_basis_last: ExtensionTarget<D>, _phantom: PhantomData<F>, } impl<F: RichField + Extendable<D>, const D: usize> RecursiveConstraintConsumer<F, D> { pub fn new( zero: ExtensionTarget<D>, alphas: Vec<Target>, z_last: ExtensionTarget<D>, lagrange_basis_first: ExtensionTarget<D>, lagrange_basis_last: ExtensionTarget<D>, ) -> Self { Self { constraint_accs: vec![zero; alphas.len()], alphas, z_last, lagrange_basis_first, lagrange_basis_last, _phantom: Default::default(), } } pub fn accumulators(self) -> Vec<ExtensionTarget<D>> { self.constraint_accs } pub fn constraint_transition( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { let filtered_constraint = builder.mul_extension(constraint, self.z_last); self.constraint(builder, filtered_constraint); } pub fn constraint( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { *acc = builder.scalar_mul_add_extension(alpha, *acc, constraint); } } pub fn constraint_first_row( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_first); self.constraint(builder, filtered_constraint); } pub fn constraint_last_row( &mut self, builder: &mut CircuitBuilder<F, D>, constraint: ExtensionTarget<D>, ) { let filtered_constraint = builder.mul_extension(constraint, self.lagrange_basis_last); self.constraint(builder, filtered_constraint); } }
onstraint(&mut self, constraint: P) { for (&alpha, acc) in self.alphas.iter().zip(&mut self.constraint_accs) { *acc *= alpha; *acc += constraint; } }
function_block-function_prefixed
[]
Rust
starchart/src/action/result.rs
starlite-project/starchart
c0f8cd6774596d45bf9a603aa3a37aa5dcde5d3a
use std::{ fmt::{Debug, Display, Formatter, Result as FmtResult}, hint::unreachable_unchecked, iter::FromIterator, }; use crate::Entry; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[must_use = "an ActionResult should be asserted"] pub enum ActionResult<R> { Create, SingleRead(Option<R>), MultiRead(Vec<R>), Update, Delete(bool), } impl<R> ActionResult<R> { pub const fn is_create(&self) -> bool { matches!(self, Self::Create) } pub const fn is_single_read(&self) -> bool { matches!(self, Self::SingleRead(_)) } pub const fn is_multi_read(&self) -> bool { matches!(self, Self::MultiRead(_)) } pub const fn is_read(&self) -> bool { self.is_single_read() || self.is_multi_read() } pub const fn is_update(&self) -> bool { matches!(self, Self::Update) } pub const fn is_delete(&self) -> bool { matches!(self, Self::Delete(_)) } } impl<R: Entry> ActionResult<R> { #[track_caller] #[inline] pub fn unwrap_create(self) { assert!( self.is_create(), "called `ActionResult::create` on a `{}` value", self ); } #[inline] #[track_caller] pub unsafe fn unwrap_create_unchecked(self) { debug_assert!(self.is_create()); if let Self::Create = self { } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_single_read(self) -> Option<R> { if let Self::SingleRead(v) = self { v } else { panic!("called `ActionResult::single_read` on a `{}` value", self); } } #[track_caller] #[inline] pub unsafe fn unwrap_single_read_unchecked(self) -> Option<R> { debug_assert!(self.is_single_read()); if let Self::SingleRead(v) = self { v } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_multi_read<I: FromIterator<R>>(self) -> I { if let Self::MultiRead(v) = self { v.into_iter().collect() } else { panic!("called `ActionResult::multi_read` on a `{}` value", self) } } pub unsafe fn unwrap_multi_read_unchecked<I: FromIterator<R>>(self) -> I { debug_assert!(self.is_multi_read()); if let Self::MultiRead(v) = self { v.into_iter().collect() } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_update(self) { assert!( self.is_update(), "called `ActionResult::update` on a `{}` value", self ); } pub unsafe fn unwrap_update_unchecked(self) { debug_assert!(self.is_update()); if let Self::Update = self { } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_delete(self) -> bool { if let Self::Delete(b) = self { b } else { panic!("called `ActionResult::delete` on a `{}` value", self) } } pub unsafe fn unwrap_delete_unchecked(self) -> bool { debug_assert!(self.is_delete()); if let Self::Delete(b) = self { b } else { unreachable_unchecked() } } } impl<R> Display for ActionResult<R> { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { Self::Create => f.write_str("Create"), Self::SingleRead(_) | Self::MultiRead(_) => f.write_str("Read"), Self::Update => f.write_str("Update"), Self::Delete(_) => f.write_str("Delete"), } } }
use std::{ fmt::{Debug, Display, Formatter, Result as FmtResult}, hint::unreachable_unchecked, iter::FromIterator, }; use crate::Entry; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] #[must_use = "an ActionResult should be asserted"] pub enum ActionResult<R> { Create, SingleRead(Option<R>), MultiRead(Vec<R>), Update, Delete(bool), } impl<R> ActionResult<R> { pub const fn is_create(&self) -> bool { matches!(self, Self::Create) } pub const fn is_single_read(&self) -> bool { matches!(self, Self::SingleRead(_)) } pub const fn is_multi_read(&self) -> bool { matches!(self, Self::MultiRead(_)) } pub const fn is_read(&self) -> bool { self.is_single_read() || self.is_multi_read() } pub const fn is_update(&self) -> bool { matches!(self, Self::Update) } pub const fn is_delete(&self) -> bool { matches!(self, Self::Delete(_)) } } impl<R: Entry> ActionResult<R> { #[track_caller] #[inline] pub fn unwrap_create(self) { assert!( self.is_create(), "called `ActionResult::create` on a `{}` value", self ); } #[inline] #[track_caller] pub unsafe fn unwrap_create_unchecked(self) { debug_assert!(self.is_create()); if let Self::Create = self { } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_single_read(self) -> Option<R> { if let Self::SingleRead(v) = self { v } else { panic!("called `ActionResult::single_read` on a `{}` value", self); } } #[track_caller] #[inline]
#[track_caller] #[inline] pub fn unwrap_multi_read<I: FromIterator<R>>(self) -> I { if let Self::MultiRead(v) = self { v.into_iter().collect() } else { panic!("called `ActionResult::multi_read` on a `{}` value", self) } } pub unsafe fn unwrap_multi_read_unchecked<I: FromIterator<R>>(self) -> I { debug_assert!(self.is_multi_read()); if let Self::MultiRead(v) = self { v.into_iter().collect() } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_update(self) { assert!( self.is_update(), "called `ActionResult::update` on a `{}` value", self ); } pub unsafe fn unwrap_update_unchecked(self) { debug_assert!(self.is_update()); if let Self::Update = self { } else { unreachable_unchecked() } } #[track_caller] #[inline] pub fn unwrap_delete(self) -> bool { if let Self::Delete(b) = self { b } else { panic!("called `ActionResult::delete` on a `{}` value", self) } } pub unsafe fn unwrap_delete_unchecked(self) -> bool { debug_assert!(self.is_delete()); if let Self::Delete(b) = self { b } else { unreachable_unchecked() } } } impl<R> Display for ActionResult<R> { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { match self { Self::Create => f.write_str("Create"), Self::SingleRead(_) | Self::MultiRead(_) => f.write_str("Read"), Self::Update => f.write_str("Update"), Self::Delete(_) => f.write_str("Delete"), } } }
pub unsafe fn unwrap_single_read_unchecked(self) -> Option<R> { debug_assert!(self.is_single_read()); if let Self::SingleRead(v) = self { v } else { unreachable_unchecked() } }
function_block-full_function
[ { "content": "#[cfg(not(feature = \"metadata\"))]\n\npub fn is_metadata(_: &str) -> bool {\n\n\tfalse\n\n}\n\n\n\npub unsafe trait InnerUnwrap<T> {\n\n\tunsafe fn inner_unwrap(self) -> T;\n\n}\n\n\n\n#[cfg(not(has_unwrap_unchecked))]\n\nunsafe impl<T> InnerUnwrap<T> for Option<T> {\n\n\t#[inline]\n\n\t#[track_caller]\n\n\tunsafe fn inner_unwrap(self) -> T {\n\n\t\tdebug_assert!(self.is_some());\n\n\t\tself.map_or_else(|| unreachable_unchecked(), |v| v)\n\n\t}\n\n}\n\n\n\n#[cfg(has_unwrap_unchecked)]\n\nunsafe impl<T> InnerUnwrap<T> for Option<T> {\n", "file_path": "starchart/src/util.rs", "rank": 0, "score": 122014.81896596614 }, { "content": "#[cfg(feature = \"metadata\")]\n\npub fn is_metadata(key: &str) -> bool {\n\n\tkey == crate::METADATA_KEY\n\n}\n\n\n", "file_path": "starchart/src/util.rs", "rank": 1, "score": 119027.09812816109 }, { "content": "/// A marker trait for use within the [`Starchart`].\n\n///\n\n/// This signifies that the type can be stored within a [`Starchart`].\n\n///\n\n/// [`Starchart`]: crate::Starchart\n\npub trait Entry: Clone + Serialize + DeserializeOwned + Debug + Default + Send + Sync {}\n\n\n\nimpl<T: Clone + Serialize + DeserializeOwned + Debug + Default + Send + Sync> Entry for T {}\n\n\n", "file_path": "starchart/src/entry.rs", "rank": 2, "score": 99746.55154388733 }, { "content": "/// An indexable entry, used for any [`Entry`] that can be indexed by a [`Key`] that it owns.\n\npub trait IndexEntry: Entry {\n\n\t/// The [`Key`] type to index by.\n\n\ttype Key: Key;\n\n\n\n\t/// Returns the valid key for the database to index from.\n\n\tfn key(&self) -> &Self::Key;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse std::fmt::{Debug, Display, Formatter, Result as FmtResult};\n\n\n\n\tuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\n\tuse static_assertions::assert_impl_all;\n\n\n\n\tuse super::{Entry, Key};\n\n\n\n\t#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)]\n\n\tstruct Settings {\n\n\t\tid: u32,\n", "file_path": "starchart/src/entry.rs", "rank": 3, "score": 83316.42971696638 }, { "content": "fn main() -> Result<(), Box<dyn Error + 'static>> {\n\n\tlet ac = AutoCfg::new()?;\n\n\tlet version_data = version_meta()?;\n\n\tif let Channel::Nightly = version_data.channel {\n\n\t\tif ac.probe_rustc_version(1, 57) {\n\n\t\t\temit(\"docsrs\");\n\n\t\t}\n\n\t}\n\n\n\n\tif ac.probe_expression(\"std::result::Result::<(), ()>::unwrap_unchecked\")\n\n\t\t&& ac.probe_expression(\"std::option::Option::<()>::unwrap_unchecked\")\n\n\t{\n\n\t\temit(\"has_unwrap_unchecked\");\n\n\t}\n\n\n\n\tOk(())\n\n}\n", "file_path": "build.rs", "rank": 4, "score": 80366.3482552683 }, { "content": "fn main() -> Result<(), Box<dyn Error + 'static>> {\n\n\tlet ac = AutoCfg::new()?;\n\n\tlet version_data = version_meta()?;\n\n\tif let Channel::Nightly = version_data.channel {\n\n\t\tif ac.probe_rustc_version(1, 57) {\n\n\t\t\temit(\"docsrs\");\n\n\t\t}\n\n\t}\n\n\n\n\tif ac.probe_expression(\"std::result::Result::<(), ()>::unwrap_unchecked\")\n\n\t\t&& ac.probe_expression(\"std::option::Option::<()>::unwrap_unchecked\")\n\n\t{\n\n\t\temit(\"has_unwrap_unchecked\");\n\n\t}\n\n\n\n\tOk(())\n\n}\n", "file_path": "starchart/build.rs", "rank": 5, "score": 78383.28780022784 }, { "content": "fn main() -> Result<(), Box<dyn Error + 'static>> {\n\n\tlet ac = AutoCfg::new()?;\n\n\tlet version_data = version_meta()?;\n\n\tif let Channel::Nightly = version_data.channel {\n\n\t\tif ac.probe_rustc_version(1, 57) {\n\n\t\t\temit(\"docsrs\");\n\n\t\t}\n\n\t}\n\n\n\n\tif ac.probe_expression(\"std::result::Result::<(), ()>::unwrap_unchecked\")\n\n\t\t&& ac.probe_expression(\"std::option::Option::<()>::unwrap_unchecked\")\n\n\t{\n\n\t\temit(\"has_unwrap_unchecked\");\n\n\t}\n\n\n\n\tOk(())\n\n}\n", "file_path": "starchart-backends/build.rs", "rank": 6, "score": 76532.403023812 }, { "content": "/// The key trait to be implemented on [`Entry`] to allow an easy way to get keys.\n\npub trait Key {\n\n\t/// The method to transform a [`Key`] into a value.\n\n\tfn to_key(&self) -> String;\n\n}\n\n\n\nimpl<T: ToString> Key for T {\n\n\tfn to_key(&self) -> String {\n\n\t\tself.to_string()\n\n\t}\n\n}\n\n\n", "file_path": "starchart/src/entry.rs", "rank": 7, "score": 75279.33754320188 }, { "content": "fn parse(input: &DeriveInput) -> Result<TokenStream> {\n\n\tlet ident = input.ident.clone();\n\n\n\n\tlet data = match &input.data {\n\n\t\tData::Struct(st) => st,\n\n\t\t_ => {\n\n\t\t\treturn Err(Error::new_spanned(\n\n\t\t\t\t&input,\n\n\t\t\t\t\"IndexEntry can only be derived on structs\",\n\n\t\t\t))\n\n\t\t}\n\n\t};\n\n\n\n\tlet named_fields = match data.fields {\n\n\t\tFields::Named(ref named) => &named.named,\n\n\t\t_ => {\n\n\t\t\treturn Err(Error::new_spanned(\n\n\t\t\t\t&data.fields,\n\n\t\t\t\t\"IndexEntry can only be derived on a struct with named fields\",\n\n\t\t\t))\n", "file_path": "starchart-derive/src/lib.rs", "rank": 8, "score": 74750.69951602965 }, { "content": "#[proc_macro_derive(IndexEntry, attributes(key))]\n\npub fn derive_entity(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n\tlet input = parse_macro_input!(input as DeriveInput);\n\n\tparse(&input)\n\n\t\t.unwrap_or_else(|err| err.to_compile_error())\n\n\t\t.into()\n\n}\n\n\n", "file_path": "starchart-derive/src/lib.rs", "rank": 9, "score": 62996.85605334873 }, { "content": "#[derive(Deserialize)]\n\n#[serde(field_identifier, rename_all = \"lowercase\")]\n\nenum ActionField {\n\n\tType,\n\n\tTarget,\n\n\tTable,\n\n}\n\n\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 10, "score": 48856.67073727489 }, { "content": "\tpub trait Sealed {}\n\n\n\n\timpl Sealed for CreateOperation {}\n\n\timpl Sealed for ReadOperation {}\n\n\timpl Sealed for UpdateOperation {}\n\n\timpl Sealed for DeleteOperation {}\n\n\timpl Sealed for TableTarget {}\n\n\timpl Sealed for EntryTarget {}\n\n\timpl<'a, S: Entry, C: CrudOperation, T: OperationTarget> Sealed for Action<'a, S, C, T> {}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse std::fmt::Debug;\n\n\n\n\tuse serde::{Deserialize, Serialize};\n\n\tuse static_assertions::assert_impl_all;\n\n\n\n\tuse super::{\n\n\t\tCreateOperation, DeleteOperation, EntryTarget, ReadOperation, TableTarget, UpdateOperation,\n", "file_path": "starchart/src/action/impl.rs", "rank": 11, "score": 42995.71140630776 }, { "content": "/// The backend to be used to manage data.\n\npub trait Backend: Send + Sync {\n\n\t/// The [`Error`] type that the backend will report up.\n\n\t///\n\n\t/// [`Error`]: std::error::Error\n\n\ttype Error: Send + Sync + StdError + 'static;\n\n\n\n\t/// An optional initialization function, useful for making connections to the database.\n\n\t///\n\n\t/// The default impl does nothing\n\n\tfn init(&self) -> InitFuture<'_, Self::Error> {\n\n\t\tok(()).boxed()\n\n\t}\n\n\n\n\t/// An optional shutdown function, useful for disconnecting from databases gracefully.\n\n\t///\n\n\t/// The default impl does nothing\n\n\t///\n\n\t/// # Safety\n\n\t///\n\n\t/// This should not fail, as it's ran upon dropping the [`Starchart`],\n", "file_path": "starchart/src/backend/mod.rs", "rank": 12, "score": 38998.5948486214 }, { "content": "/// A sealed marker trait for helping an [`Action`] represent what type of target the\n\n/// operation will cover.\n\n///\n\n/// [`Action`]: crate::action::Action\n\npub trait OperationTarget: private::Sealed {\n\n\t#[doc(hidden)]\n\n\tfn target() -> TargetKind;\n\n}\n\n\n\nimpl OperationTarget for TableTarget {\n\n\tfn target() -> TargetKind {\n\n\t\tTargetKind::Table\n\n\t}\n\n}\n\n\n\nimpl OperationTarget for EntryTarget {\n\n\tfn target() -> TargetKind {\n\n\t\tTargetKind::Entry\n\n\t}\n\n}\n\n\n\nmod private {\n\n\tuse super::{\n\n\t\tCreateOperation, CrudOperation, DeleteOperation, EntryTarget, OperationTarget,\n\n\t\tReadOperation, TableTarget, UpdateOperation,\n\n\t};\n\n\tuse crate::{Action, Entry};\n\n\n", "file_path": "starchart/src/action/impl.rs", "rank": 13, "score": 38068.02154802303 }, { "content": "/// A sealed marker trait for helping an [`Action`] represent what type of operation will occur.\n\n///\n\n/// [`Action`]: crate::action::Action\n\npub trait CrudOperation: private::Sealed {\n\n\t#[doc(hidden)]\n\n\tfn kind() -> ActionKind;\n\n}\n\n\n\nimpl CrudOperation for CreateOperation {\n\n\tfn kind() -> ActionKind {\n\n\t\tActionKind::Create\n\n\t}\n\n}\n\n\n\nimpl CrudOperation for ReadOperation {\n\n\tfn kind() -> ActionKind {\n\n\t\tActionKind::Read\n\n\t}\n\n}\n\n\n\nimpl CrudOperation for UpdateOperation {\n\n\tfn kind() -> ActionKind {\n\n\t\tActionKind::Update\n", "file_path": "starchart/src/action/impl.rs", "rank": 14, "score": 38068.02154802303 }, { "content": "#[cfg(feature = \"fs\")]\n\npub trait Transcoder: Send + Sync {\n\n\t/// Serializes a value into a [`Vec<u8>`] for writing to a file.\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// Any errors from the transcoder should use [`FsError::serde`] to return properly.\n\n\tfn serialize_value<T: Entry>(&self, value: &T) -> Result<Vec<u8>, FsError>;\n\n\n\n\t/// Deserializes data into the provided type.\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// Any errors from the transcoder should use [`FsError::serde`] to return properly.\n\n\tfn deserialize_data<T: Entry, R: Read>(&self, rdr: R) -> Result<T, FsError>;\n\n}\n\n\n\n/// The transcoders for the [`FsBackend`].\n\npub mod transcoders {\n\n\t#[cfg(feature = \"binary\")]\n\n\tpub use super::binary::{BinaryFormat, BinaryTranscoder};\n", "file_path": "starchart-backends/src/fs/mod.rs", "rank": 15, "score": 38068.02154802303 }, { "content": "fn get_id_field(fields: &[Field]) -> Option<&Field> {\n\n\tfor field in fields {\n\n\t\tif field.attrs.iter().any(|attr| attr.path.is_ident(KEY_IDENT)) {\n\n\t\t\treturn Some(field);\n\n\t\t}\n\n\t}\n\n\n\n\tfor field in fields {\n\n\t\tif field\n\n\t\t\t.ident\n\n\t\t\t.as_ref()\n\n\t\t\t.map_or(false, |ident| ident == KEY_IDENT || ident == ID_IDENT)\n\n\t\t{\n\n\t\t\treturn Some(field);\n\n\t\t}\n\n\t}\n\n\n\n\tNone\n\n}\n", "file_path": "starchart-derive/src/lib.rs", "rank": 16, "score": 36858.102040284066 }, { "content": "\t\tname: String,\n\n\t}\n\n\n\n\t#[derive(Debug, Clone)]\n\n\tstruct Keyable {\n\n\t\tinner: String,\n\n\t}\n\n\n\n\timpl Display for Keyable {\n\n\t\tfn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n\t\t\tDisplay::fmt(&self.inner, f)\n\n\t\t}\n\n\t}\n\n\n\n\tassert_impl_all!(\n\n\t\tSettings: Clone,\n\n\t\tDebug,\n\n\t\tDefault,\n\n\t\tDeserializeOwned,\n\n\t\tEntry,\n", "file_path": "starchart/src/entry.rs", "rank": 17, "score": 33743.512629720426 }, { "content": "use std::fmt::Debug;\n\n\n\nuse serde::{de::DeserializeOwned, Serialize};\n\n\n\n/// The key trait to be implemented on [`Entry`] to allow an easy way to get keys.\n", "file_path": "starchart/src/entry.rs", "rank": 18, "score": 33735.17439405328 }, { "content": "\t\tSerialize\n\n\t);\n\n\n\n\t#[test]\n\n\tfn to_key() {\n\n\t\tlet keyable = Keyable {\n\n\t\t\tinner: \"12345\".to_owned(),\n\n\t\t};\n\n\n\n\t\tassert_eq!(keyable.to_key(), \"12345\".to_owned());\n\n\t}\n\n}\n", "file_path": "starchart/src/entry.rs", "rank": 19, "score": 33731.97629507764 }, { "content": "use std::fmt::{Display, Formatter, Result as FmtResult};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// The target of the [`CRUD`] operation.\n\n///\n\n/// [`CRUD`]: https://en.wikipedia.org/wiki/Create,_read,_update_and_delete\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[must_use = \"getting target information has no side effects\"]\n\npub enum TargetKind {\n\n\t/// The operation will be performed on a table.\n\n\tTable,\n\n\t/// The operation will be performed on a single entry.\n\n\tEntry,\n\n}\n\n\n\nimpl Default for TargetKind {\n\n\tfn default() -> Self {\n\n\t\tSelf::Entry\n\n\t}\n", "file_path": "starchart/src/action/target.rs", "rank": 34, "score": 33.169399149091234 }, { "content": "use std::fmt::Debug;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse super::{ActionKind, TargetKind};\n\n\n\n/// Marker type for a Create operation.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct CreateOperation;\n\n\n\n/// Marker type for a Read operation.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct ReadOperation;\n\n\n\n/// Marker type for an Update operation.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct UpdateOperation;\n\n\n\n/// Marker type for a Delete operation.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct DeleteOperation;\n\n\n\n/// A sealed marker trait for helping an [`Action`] represent what type of operation will occur.\n\n///\n\n/// [`Action`]: crate::action::Action\n", "file_path": "starchart/src/action/impl.rs", "rank": 35, "score": 26.271965066401496 }, { "content": "}\n\n\n\nimpl Display for TargetKind {\n\n\tfn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n\t\tmatch self {\n\n\t\t\tSelf::Table => f.write_str(\"Table\"),\n\n\t\t\tSelf::Entry => f.write_str(\"Entry\"),\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse std::fmt::{Debug, Display};\n\n\n\n\tuse serde::{Deserialize, Serialize};\n\n\tuse static_assertions::assert_impl_all;\n\n\n\n\tuse super::TargetKind;\n\n\n", "file_path": "starchart/src/action/target.rs", "rank": 36, "score": 24.124290560228154 }, { "content": "use std::fmt::{Display, Formatter, Result as FmtResult};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// The type of [`CRUD`] action to perform\n\n///\n\n/// [`CRUD`]: https://en.wikipedia.org/wiki/Create,_read,_update_and_delete\n\n#[must_use = \"getting the information on what action will be performed has no side effects\"]\n\n#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]\n\npub enum ActionKind {\n\n\t/// Signifies that the operation will be a Create.\n\n\t///\n\n\t/// This locks the database and allows no other reads or writes until it is complete.\n\n\tCreate,\n\n\t/// Signifies that the operation will be a Read.\n\n\t///\n\n\t/// This allows multiple different readers, but doesn't allow writing until all Reads are complete.\n\n\tRead,\n\n\t/// Signifies that the operation will be an Update.\n\n\t///\n", "file_path": "starchart/src/action/kind.rs", "rank": 37, "score": 22.595993894568608 }, { "content": "\t}\n\n}\n\n\n\nimpl CrudOperation for DeleteOperation {\n\n\tfn kind() -> ActionKind {\n\n\t\tActionKind::Delete\n\n\t}\n\n}\n\n\n\n/// Marker type for a table operation.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct TableTarget;\n\n\n\n/// Marker type for an entry operation.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]\n\n#[non_exhaustive]\n\npub struct EntryTarget;\n\n\n", "file_path": "starchart/src/action/impl.rs", "rank": 38, "score": 20.267851115647456 }, { "content": "//! A memory based backend. Useful for debugging or applications\n\n//! who only need to store data at runtime.\n\n\n\nuse std::{\n\n\tcollections::hash_map::RandomState,\n\n\terror::Error,\n\n\tfmt::{Debug, Display, Formatter, Result as FmtResult},\n\n\thash::BuildHasher,\n\n\titer::FromIterator,\n\n};\n\n\n\nuse dashmap::DashMap;\n\nuse futures_util::{\n\n\tfuture::{err, ok},\n\n\tFutureExt,\n\n};\n\nuse serde_value::{to_value, DeserializerError, SerializerError, Value};\n\nuse starchart::{\n\n\tbackend::{\n\n\t\tfutures::{\n", "file_path": "starchart-backends/src/memory.rs", "rank": 39, "score": 20.13701377569245 }, { "content": "const METADATA_KEY: &str = \"__metadata__\";\n\n\n\nuse std::result::Result as StdResult;\n\n\n\npub mod action;\n\nmod atomics;\n\npub mod backend;\n\nmod entry;\n\npub mod error;\n\nmod starchart;\n\n#[cfg(not(tarpaulin_include))]\n\nmod util;\n\n\n\n#[doc(inline)]\n\npub use self::{\n\n\taction::Action,\n\n\tentry::{Entry, IndexEntry, Key},\n\n\terror::Error,\n\n\tstarchart::Starchart,\n\n};\n\n\n\n/// A type alias for a [`Result`] that wraps around [`Error`].\n\npub type Result<T, E = Error> = StdResult<T, E>;\n\n\n\n/// The helper derive macro for easily implementing [`IndexEntry`].\n\n#[cfg(feature = \"derive\")]\n\npub use starchart_derive::IndexEntry;\n", "file_path": "starchart/src/lib.rs", "rank": 40, "score": 19.876791114101795 }, { "content": "use std::io::Read;\n\n\n\nuse starchart::Entry;\n\n\n\nuse super::{FsError, Transcoder};\n\n\n\n/// A transcoder for the YAML format.\n\n#[derive(Debug, Default, Clone, Copy)]\n\n#[cfg(feature = \"yaml\")]\n\n#[non_exhaustive]\n\n#[must_use = \"transcoders do nothing by themselves\"]\n\npub struct YamlTranscoder;\n\n\n\nimpl YamlTranscoder {\n\n\t/// Creates a new [`YamlTranscoder`].\n\n\tpub const fn new() -> Self {\n\n\t\tSelf\n\n\t}\n\n}\n\n\n", "file_path": "starchart-backends/src/fs/yaml.rs", "rank": 41, "score": 19.629842485044417 }, { "content": "\tfn default() -> Self {\n\n\t\tSelf::Read\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse std::fmt::{Debug, Display};\n\n\n\n\tuse serde::{Deserialize, Serialize};\n\n\tuse static_assertions::assert_impl_all;\n\n\n\n\tuse super::ActionKind;\n\n\n\n\tassert_impl_all!(\n\n\t\tActionKind: Clone,\n\n\t\tCopy,\n\n\t\tDebug,\n\n\t\tDefault,\n\n\t\tDeserialize<'static>,\n", "file_path": "starchart/src/action/kind.rs", "rank": 42, "score": 18.97098504065039 }, { "content": "\t\tDisplay,\n\n\t\tPartialEq,\n\n\t\tSend,\n\n\t\tSerialize,\n\n\t\tSync\n\n\t);\n\n\n\n\t#[test]\n\n\tfn default() {\n\n\t\tassert_eq!(ActionKind::default(), ActionKind::Read);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn display() {\n\n\t\tassert_eq!(ActionKind::Create.to_string(), \"Create\");\n\n\t\tassert_eq!(ActionKind::Read.to_string(), \"Read\");\n\n\t\tassert_eq!(ActionKind::Update.to_string(), \"Update\");\n\n\t\tassert_eq!(ActionKind::Delete.to_string(), \"Delete\");\n\n\t}\n\n}\n", "file_path": "starchart/src/action/kind.rs", "rank": 43, "score": 18.20659689365592 }, { "content": "//! The different errors within the crate.\n\n\n\nuse std::{\n\n\terror::Error as StdError,\n\n\tfmt::{Display, Formatter, Result as FmtResult},\n\n};\n\n\n\n#[doc(inline)]\n\npub use crate::action::{\n\n\tActionError, ActionErrorType, ActionRunError, ActionRunErrorType, ActionValidationError,\n\n\tActionValidationErrorType,\n\n};\n\n\n\n// NOTE: This error shouldn't be used anywhere inside this crate, it's only meant for end users as an ease of use\n\n// error struct.\n\n\n\n/// An error that occurred within the crate.\n\n#[derive(Debug)]\n\npub struct Error {\n\n\tsource: Option<Box<dyn StdError + Send + Sync>>,\n", "file_path": "starchart/src/error.rs", "rank": 44, "score": 18.17151808226323 }, { "content": "\t\tassert_eq!(CreateOperation::kind(), ActionKind::Create);\n\n\t\tassert_eq!(ReadOperation::kind(), ActionKind::Read);\n\n\t\tassert_eq!(UpdateOperation::kind(), ActionKind::Update);\n\n\t\tassert_eq!(DeleteOperation::kind(), ActionKind::Delete);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn target() {\n\n\t\tassert_eq!(TableTarget::target(), TargetKind::Table);\n\n\t\tassert_eq!(EntryTarget::target(), TargetKind::Entry);\n\n\t}\n\n}\n", "file_path": "starchart/src/action/impl.rs", "rank": 45, "score": 17.86393205440589 }, { "content": "\t\tself.inner.update_entry(chart)\n\n\t}\n\n}\n\n\n\nimpl<'a, S: Entry> DeleteEntryAction<'a, S> {\n\n\t/// Validates and runs a [`DeleteEntryAction`].\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This returns an error if [`Self::validate_table`] or [`Self::validate_key`] fails, or if any of the [`Backend`] methods fail.\n\n\tpub fn run_delete_entry<B: Backend>(\n\n\t\tself,\n\n\t\tgateway: &'a Starchart<B>,\n\n\t) -> impl Future<Output = Result<bool, ActionError>> + 'a {\n\n\t\tself.inner.delete_entry(gateway)\n\n\t}\n\n}\n\n\n\nimpl<'a, S: Entry> CreateTableAction<'a, S> {\n\n\t/// Validates and runs a [`CreateTableAction`].\n", "file_path": "starchart/src/action/mod.rs", "rank": 46, "score": 17.842077516486828 }, { "content": "use std::io::Read;\n\n\n\nuse starchart::Entry;\n\n\n\nuse super::{transcoders::TranscoderFormat, FsError, Transcoder};\n\n\n\n/// A transcoder for the TOML format.\n\n#[derive(Debug, Default, Clone, Copy)]\n\n#[cfg(feature = \"toml\")]\n\n#[must_use = \"transcoders do nothing by themselves\"]\n\npub struct TomlTranscoder(TranscoderFormat);\n\n\n\nimpl TomlTranscoder {\n\n\t/// Creates a new [`TomlTranscoder`], optionally using pretty printing.\n\n\tpub const fn new(format: TranscoderFormat) -> Self {\n\n\t\tSelf(format)\n\n\t}\n\n\n\n\t/// Returns whether or not this transcoder uses pretty formatting.\n\n\t#[must_use]\n", "file_path": "starchart-backends/src/fs/toml.rs", "rank": 47, "score": 17.694808074574446 }, { "content": "use std::io::Read;\n\n\n\nuse starchart::Entry;\n\n\n\nuse super::{transcoders::TranscoderFormat, FsError, Transcoder};\n\n\n\n/// A transcoder for the JSON format.\n\n#[derive(Debug, Default, Clone, Copy)]\n\n#[cfg(feature = \"json\")]\n\n#[must_use = \"transcoders do nothing by themselves\"]\n\npub struct JsonTranscoder(TranscoderFormat);\n\n\n\nimpl JsonTranscoder {\n\n\t/// Creates a new [`JsonTranscoder`], optionally using pretty printing.\n\n\tpub const fn new(format: TranscoderFormat) -> Self {\n\n\t\tSelf(format)\n\n\t}\n\n\n\n\t/// Returns whether or not this transcoder uses pretty formatting.\n\n\t#[must_use]\n", "file_path": "starchart-backends/src/fs/json.rs", "rank": 48, "score": 17.694808074574446 }, { "content": "\tassert_impl_all!(\n\n\t\tTargetKind: Clone,\n\n\t\tCopy,\n\n\t\tDebug,\n\n\t\tDefault,\n\n\t\tDeserialize<'static>,\n\n\t\tDisplay,\n\n\t\tPartialEq,\n\n\t\tSend,\n\n\t\tSerialize,\n\n\t\tSync\n\n\t);\n\n\n\n\t#[test]\n\n\tfn default() {\n\n\t\tassert_eq!(TargetKind::default(), TargetKind::Entry);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn display() {\n\n\t\tassert_eq!(TargetKind::Entry.to_string(), \"Entry\");\n\n\t\tassert_eq!(TargetKind::Table.to_string(), \"Table\");\n\n\t}\n\n}\n", "file_path": "starchart/src/action/target.rs", "rank": 49, "score": 17.678852853376842 }, { "content": "\t}\n\n\n\n\t/// Creates a new error from a backend.\n\n\t#[must_use]\n\n\tpub fn backend(e: Option<Box<dyn StdError + Send + Sync>>) -> Self {\n\n\t\tSelf {\n\n\t\t\tsource: e,\n\n\t\t\tkind: ErrorType::Backend,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Display for Error {\n\n\tfn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n\t\tmatch &self.kind {\n\n\t\t\tErrorType::Backend => f.write_str(\"an error occurred within a backend\"),\n\n\t\t\tErrorType::ActionRun => f.write_str(\"an error occurred running an action\"),\n\n\t\t\tErrorType::ActionValidation => f.write_str(\"an action is invalid\"),\n\n\t\t}\n\n\t}\n", "file_path": "starchart/src/error.rs", "rank": 50, "score": 16.974297222273922 }, { "content": "\tpub const fn is_pretty(self) -> bool {\n\n\t\tmatches!(self.0, TranscoderFormat::Pretty)\n\n\t}\n\n\n\n\t/// Returns whether or not this transcoder uses standard formatting.\n\n\t#[must_use]\n\n\tpub const fn is_standard(self) -> bool {\n\n\t\t!self.is_pretty()\n\n\t}\n\n\n\n\t/// Create a new [`JsonTranscoder`] with prettier file formatting.\n\n\tpub const fn pretty() -> Self {\n\n\t\tSelf::new(TranscoderFormat::Pretty)\n\n\t}\n\n\n\n\t/// Creates a new [`JsonTranscoder`] with standard file formatting.\n\n\tpub const fn standard() -> Self {\n\n\t\tSelf::new(TranscoderFormat::Standard)\n\n\t}\n\n}\n", "file_path": "starchart-backends/src/fs/json.rs", "rank": 51, "score": 16.887222968242256 }, { "content": "\tpub const fn is_pretty(self) -> bool {\n\n\t\tmatches!(self.0, TranscoderFormat::Pretty)\n\n\t}\n\n\n\n\t/// Returns whether or not this transcoder uses standard formatting.\n\n\t#[must_use]\n\n\tpub const fn is_standard(self) -> bool {\n\n\t\t!self.is_pretty()\n\n\t}\n\n\n\n\t/// Create a new [`TomlTranscoder`] with prettier file formatting.\n\n\tpub const fn pretty() -> Self {\n\n\t\tSelf::new(TranscoderFormat::Pretty)\n\n\t}\n\n\n\n\t/// Creates a new [`TomlTranscoder`] with standard file formatting.\n\n\tpub const fn standard() -> Self {\n\n\t\tSelf::new(TranscoderFormat::Standard)\n\n\t}\n\n}\n", "file_path": "starchart-backends/src/fs/toml.rs", "rank": 52, "score": 16.887222968242252 }, { "content": "\t/// This locks the database and allows no other reads or writes until it is complete.\n\n\tUpdate,\n\n\t/// Signifies that the operation will be a Delete.\n\n\t///\n\n\t/// This locks the database and allows no other reads or writes until it is complete.\n\n\tDelete,\n\n}\n\n\n\nimpl Display for ActionKind {\n\n\tfn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n\t\tmatch self {\n\n\t\t\tSelf::Create => f.write_str(\"Create\"),\n\n\t\t\tSelf::Read => f.write_str(\"Read\"),\n\n\t\t\tSelf::Update => f.write_str(\"Update\"),\n\n\t\t\tSelf::Delete => f.write_str(\"Delete\"),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Default for ActionKind {\n", "file_path": "starchart/src/action/kind.rs", "rank": 53, "score": 16.792081439178087 }, { "content": "impl Transcoder for YamlTranscoder {\n\n\tfn serialize_value<T: Entry>(&self, value: &T) -> Result<Vec<u8>, FsError> {\n\n\t\tOk(serde_yaml::to_vec(value)?)\n\n\t}\n\n\n\n\tfn deserialize_data<T: Entry, R: Read>(&self, rdr: R) -> Result<T, FsError> {\n\n\t\tOk(serde_yaml::from_reader(rdr)?)\n\n\t}\n\n}\n\n\n\n#[cfg(all(test, not(miri)))]\n\nmod tests {\n\n\tuse std::{fmt::Debug, fs};\n\n\n\n\tuse starchart::backend::Backend;\n\n\tuse static_assertions::assert_impl_all;\n\n\n\n\tuse crate::{\n\n\t\tfs::{transcoders::YamlTranscoder, FsBackend, FsError},\n\n\t\ttesting::{TestPath, TestSettings, TEST_GUARD},\n", "file_path": "starchart-backends/src/fs/yaml.rs", "rank": 54, "score": 16.62061012918509 }, { "content": "\t/// # Errors\n\n\t///\n\n\t/// This will raise an error if any of the static run methods in [`Action`] fail, as it uses those internally.\n\n\tpub async fn run<B: Backend>(\n\n\t\tself,\n\n\t\tchart: &Starchart<B>,\n\n\t) -> Result<ActionResult<S>, ActionError> {\n\n\t\tmatch (self.kind(), self.target()) {\n\n\t\t\t(ActionKind::Create, TargetKind::Entry) => {\n\n\t\t\t\tlet stat = self.as_static::<CreateOperation, EntryTarget>()?;\n\n\t\t\t\tstat.run_create_entry(chart).await?;\n\n\t\t\t\tOk(ActionResult::Create)\n\n\t\t\t}\n\n\t\t\t(ActionKind::Read, TargetKind::Entry) => {\n\n\t\t\t\tlet stat = self.as_static::<ReadOperation, EntryTarget>()?;\n\n\t\t\t\tlet ret = stat.run_read_entry(chart).await?;\n\n\t\t\t\tOk(ActionResult::SingleRead(ret))\n\n\t\t\t}\n\n\t\t\t(ActionKind::Update, TargetKind::Entry) => {\n\n\t\t\t\tlet stat = self.as_static::<UpdateOperation, EntryTarget>()?;\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 55, "score": 16.32460734597774 }, { "content": "use std::{\n\n\terror::Error,\n\n\tfmt::{Display, Formatter, Result as FmtResult},\n\n\tio::Error as IoError,\n\n\tpath::PathBuf,\n\n};\n\n\n\n/// An error occurred from the [`FsBackend`] or one of it's [`Transcoders`].\n\n///\n\n/// [`FsBackend`]: super::FsBackend\n\n/// [`Transcoders`]: super::Transcoder\n\n#[derive(Debug)]\n\n#[cfg(feature = \"fs\")]\n\npub struct FsError {\n\n\tpub(super) source: Option<Box<dyn Error + Send + Sync>>,\n\n\tpub(super) kind: FsErrorType,\n\n}\n\n\n\nimpl FsError {\n\n\t/// Creates an error from a [`Transcoder`].\n", "file_path": "starchart-backends/src/fs/error.rs", "rank": 56, "score": 16.246028069797102 }, { "content": "use std::{\n\n\tfmt::{Formatter, Result as FmtResult},\n\n\tmarker::PhantomData,\n\n};\n\n\n\nuse serde::{\n\n\tde::{Error as DeError, MapAccess, SeqAccess, Visitor},\n\n\tser::SerializeStruct,\n\n\tDeserialize, Deserializer, Serialize, Serializer,\n\n};\n\n\n\nuse super::{\n\n\tActionError, ActionKind, ActionResult, ActionValidationError, ActionValidationErrorType,\n\n\tCrudOperation, InnerAction, OperationTarget, TargetKind,\n\n};\n\nuse crate::{\n\n\taction::{\n\n\t\tCreateOperation, DeleteOperation, EntryTarget, ReadOperation, TableTarget, UpdateOperation,\n\n\t},\n\n\tbackend::Backend,\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 57, "score": 16.225461889151273 }, { "content": "\n\n\t/// Validates that both the key and data have been set.\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This errors if both the [`Self::set_key`] and [`Self::set_data`] (or [`Self::set_entry`]) has not been called.\n\n\tpub fn validate_entry(&self) -> Result<(), ActionValidationError> {\n\n\t\tself.inner.validate_entry()\n\n\t}\n\n}\n\n\n\nimpl<'a, S: IndexEntry, C: CrudOperation> Action<'a, S, C, EntryTarget> {\n\n\t/// Sets the [`Entry`] and [`Key`] that this [`Action`] will act over.\n\n\tpub fn set_entry(&mut self, entity: &'a S) -> &mut Self {\n\n\t\tself.set_key(entity.key()).set_data(entity)\n\n\t}\n\n}\n\n\n\nimpl<'a, S: Entry, C: CrudOperation, T: OperationTarget> Debug for Action<'a, S, C, T> {\n\n\tfn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n", "file_path": "starchart/src/action/mod.rs", "rank": 58, "score": 16.167506146800218 }, { "content": "\t///\n\n\t/// This returns an error if [`Self::validate_table`] or [`Self::validate_key`] fails, or if any of the [`Backend`] methods fail.\n\n\tpub fn run_read_entry<B: Backend>(\n\n\t\tself,\n\n\t\tgateway: &'a Starchart<B>,\n\n\t) -> impl Future<Output = Result<Option<S>, ActionError>> + 'a {\n\n\t\tself.inner.read_entry(gateway)\n\n\t}\n\n}\n\n\n\nimpl<'a, S: Entry> UpdateEntryAction<'a, S> {\n\n\t/// Validates and runs a [`UpdateEntryAction`].\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This returns an error if [`Self::validate_table`] or [`Self::validate_entry`] fails, or if any of the [`Backend`] methods fail.\n\n\tpub fn run_update_entry<B: Backend>(\n\n\t\tself,\n\n\t\tchart: &'a Starchart<B>,\n\n\t) -> impl Future<Output = Result<(), ActionError>> + 'a {\n", "file_path": "starchart/src/action/mod.rs", "rank": 59, "score": 16.056493561539593 }, { "content": "\t\ttable: &'a str,\n\n\t\tid: &'a str,\n\n\t\tvalue: &'a S,\n\n\t) -> EnsureFuture<'a, Self::Error>\n\n\twhere\n\n\t\tS: Entry,\n\n\t{\n\n\t\tasync move {\n\n\t\t\tif !self.has(table, id).await? {\n\n\t\t\t\tself.create(table, id, value).await?;\n\n\t\t\t}\n\n\n\n\t\t\tOk(())\n\n\t\t}\n\n\t\t.boxed()\n\n\t}\n\n\n\n\t/// Updates an existing entry in a table.\n\n\tfn update<'a, S>(\n\n\t\t&'a self,\n", "file_path": "starchart/src/backend/mod.rs", "rank": 60, "score": 15.97220979463848 }, { "content": "\n\n\t/// Checks whether the transcoder uses the [`CBOR`] format.\n\n\t///\n\n\t/// [`CBOR`]: serde_cbor\n\n\t#[must_use]\n\n\tpub const fn is_cbor(self) -> bool {\n\n\t\tmatches!(self.format(), BinaryFormat::Cbor)\n\n\t}\n\n}\n\n\n\nimpl Transcoder for BinaryTranscoder {\n\n\tfn serialize_value<T: Entry>(&self, value: &T) -> Result<Vec<u8>, FsError> {\n\n\t\tmatch self.format() {\n\n\t\t\tBinaryFormat::Bincode => Ok(serde_bincode::serialize(value)?),\n\n\t\t\tBinaryFormat::Cbor => Ok(serde_cbor::to_vec(value)?),\n\n\t\t}\n\n\t}\n\n\n\n\tfn deserialize_data<T: Entry, R: Read>(&self, rdr: R) -> Result<T, FsError> {\n\n\t\tmatch self.format() {\n", "file_path": "starchart-backends/src/fs/binary.rs", "rank": 61, "score": 15.83152438391227 }, { "content": "\t}\n\n}\n\n\n\nimpl<S: BuildHasher + Clone> MemoryBackend<S> {\n\n\t/// Creates a new [`MemoryBackend`] with the specified hasher.\n\n\tpub fn with_hasher(hasher: S) -> Self {\n\n\t\tSelf::with_capacity_and_hasher(0, hasher)\n\n\t}\n\n\n\n\t/// Creates a new [`MemoryBackend`] with the specified capacity and hasher.\n\n\tpub fn with_capacity_and_hasher(cap: usize, hasher: S) -> Self {\n\n\t\tSelf {\n\n\t\t\ttables: DashMap::with_capacity_and_hasher(cap, hasher),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<S: BuildHasher + Clone> Debug for MemoryBackend<S> {\n\n\tfn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n\n\t\tf.debug_struct(\"MemoryBackend\")\n", "file_path": "starchart-backends/src/memory.rs", "rank": 62, "score": 15.804639261383963 }, { "content": "use crate::{\n\n\tbackend::Backend,\n\n\tutil::{is_metadata, InnerUnwrap},\n\n\tEntry, IndexEntry, Key, Starchart,\n\n};\n\n\n\n/// A type alias for an [`Action`] with [`CreateOperation`] and [`EntryTarget`] as the parameters.\n\npub type CreateEntryAction<'a, S> = Action<'a, S, CreateOperation, EntryTarget>;\n\n\n\n/// A type alias for an [`Action`] with [`ReadOperation`] and [`EntryTarget`] as the parameters.\n\npub type ReadEntryAction<'a, S> = Action<'a, S, ReadOperation, EntryTarget>;\n\n\n\n/// A type alias for an [`Action`] with [`UpdateOperation`] and [`EntryTarget`] as the parameters.\n\npub type UpdateEntryAction<'a, S> = Action<'a, S, UpdateOperation, EntryTarget>;\n\n\n\n/// A type alias for an [`Action`] with [`DeleteOperation`] and [`EntryTarget`] as the parameters.\n\npub type DeleteEntryAction<'a, S> = Action<'a, S, DeleteOperation, EntryTarget>;\n\n\n\n/// A type alias for an [`Action`] with [`CreateOperation`] and [`TableTarget`] as the parameters.\n\npub type CreateTableAction<'a, S> = Action<'a, S, CreateOperation, TableTarget>;\n", "file_path": "starchart/src/action/mod.rs", "rank": 63, "score": 15.70373440817529 }, { "content": "//! Futures for [`Backend`] functions to return, for easier documentation.\n\n//!\n\n//! [`Backend`]: crate::backend::Backend\n\nuse std::{future::Future, pin::Pin};\n\n\n\n#[cfg(doc)]\n\nuse crate::backend::Backend;\n\n\n\n/// The future returned from [`Backend::init`].\n\npub type InitFuture<'a, E> = PinBoxFuture<'a, Result<(), E>>;\n\n\n\n/// The future returned from [`Backend::shutdown`].\n\npub type ShutdownFuture<'a> = PinBoxFuture<'a>;\n\n\n\n/// The future returned from [`Backend::has_table`].\n\npub type HasTableFuture<'a, E> = PinBoxFuture<'a, Result<bool, E>>;\n\n\n\n/// The future returned from [`Backend::create_table`].\n\npub type CreateTableFuture<'a, E> = PinBoxFuture<'a, Result<(), E>>;\n\n\n", "file_path": "starchart/src/backend/futures.rs", "rank": 64, "score": 15.612136451415608 }, { "content": "use std::{\n\n\terror::Error,\n\n\tfmt::{Debug, Display, Formatter, Result as FmtResult},\n\n};\n\n\n\n/// A general [`Action`] error.\n\n///\n\n/// [`Action`]: super::Action\n\n#[derive(Debug)]\n\npub struct ActionError {\n\n\tsource: Option<Box<dyn Error + Send + Sync>>,\n\n\tkind: ActionErrorType,\n\n}\n\n\n\nimpl ActionError {\n\n\t/// Immutable reference to the type of error that occurred.\n\n\t#[must_use = \"retrieving the type has no effect if left unused\"]\n\n\tpub const fn kind(&self) -> &ActionErrorType {\n\n\t\t&self.kind\n\n\t}\n", "file_path": "starchart/src/action/error.rs", "rank": 65, "score": 15.557465292574207 }, { "content": "\t///\n\n\t/// [`bincode`]: serde_bincode\n\n\tpub const fn bincode() -> Self {\n\n\t\tSelf::new(BinaryFormat::Bincode)\n\n\t}\n\n\n\n\t/// Creates a [`BinaryTranscoder`] using [`Cbor`] formatting.\n\n\t///\n\n\t/// [`Cbor`]: serde_cbor\n\n\tpub const fn cbor() -> Self {\n\n\t\tSelf::new(BinaryFormat::Cbor)\n\n\t}\n\n\n\n\t/// Checks whether the transcoder uses the [`Bincode`] format.\n\n\t///\n\n\t/// [`Bincode`]: serde_bincode\n\n\t#[must_use]\n\n\tpub const fn is_bincode(self) -> bool {\n\n\t\tmatches!(self.format(), BinaryFormat::Bincode)\n\n\t}\n", "file_path": "starchart-backends/src/fs/binary.rs", "rank": 66, "score": 15.448692440041453 }, { "content": "}\n\n\n\n/// A transcoder for multiple binary formats.\n\n#[derive(Debug, Clone, Copy)]\n\n#[cfg(feature = \"binary\")]\n\n#[must_use = \"transcoders do nothing by themselves\"]\n\npub struct BinaryTranscoder(BinaryFormat);\n\n\n\nimpl BinaryTranscoder {\n\n\t/// Creates a new [`BinaryTranscoder`].\n\n\tpub const fn new(format: BinaryFormat) -> Self {\n\n\t\tSelf(format)\n\n\t}\n\n\n\n\t/// Returns the binary format being used by the transcoder.\n\n\tpub const fn format(self) -> BinaryFormat {\n\n\t\tself.0\n\n\t}\n\n\n\n\t/// Creates a [`BinaryTranscoder`] using [`bincode`] formatting.\n", "file_path": "starchart-backends/src/fs/binary.rs", "rank": 67, "score": 15.24095785723847 }, { "content": "//! The backend that fetches and provides data for the [`Starchart`].\n\n//!\n\n//! [`Starchart`]: crate::Starchart\n\n\n\nuse std::{error::Error as StdError, iter::FromIterator};\n\n\n\nuse futures_util::{\n\n\tfuture::{join_all, ok, ready},\n\n\tFutureExt,\n\n};\n\n\n\nuse self::futures::{\n\n\tCreateFuture, CreateTableFuture, DeleteFuture, DeleteTableFuture, EnsureFuture,\n\n\tEnsureTableFuture, GetAllFuture, GetFuture, GetKeysFuture, HasFuture, HasTableFuture,\n\n\tInitFuture, ShutdownFuture, UpdateFuture,\n\n};\n\nuse crate::Entry;\n\n\n\npub mod futures;\n\n\n\n/// The backend to be used to manage data.\n", "file_path": "starchart/src/backend/mod.rs", "rank": 68, "score": 15.003841397758192 }, { "content": "\tpub fn validate_data(&self) -> Result<(), ActionValidationError> {\n\n\t\tif self.data.is_none() {\n\n\t\t\treturn Err(ActionValidationError {\n\n\t\t\t\tsource: None,\n\n\t\t\t\tkind: ActionValidationErrorType::Data,\n\n\t\t\t});\n\n\t\t}\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\t/// Validates that both the key and data have been set.\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This errors if both the [`Self::set_key`] and [`Self::set_data`] (or [`Self::set_entry`]) has not been called.\n\n\tpub fn validate_entry(&self) -> Result<(), ActionValidationError> {\n\n\t\tself.validate_key()?;\n\n\t\tself.validate_data()\n\n\t}\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 69, "score": 14.795042834159032 }, { "content": "// Action run impls\n\n\n\nimpl<'a, S: Entry> CreateEntryAction<'a, S> {\n\n\t/// Validates and runs a [`CreateEntryAction`].\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This returns an error if [`Self::validate_table`] or [`Self::validate_entry`] fails, or if any of the [`Backend`] methods fail.\n\n\tpub fn run_create_entry<B: Backend>(\n\n\t\tself,\n\n\t\tchart: &'a Starchart<B>,\n\n\t) -> impl Future<Output = Result<(), ActionError>> + 'a {\n\n\t\tself.inner.create_entry(chart)\n\n\t}\n\n}\n\n\n\nimpl<'a, S: Entry> ReadEntryAction<'a, S> {\n\n\t/// Validates and runs a [`ReadEntryAction`].\n\n\t///\n\n\t/// # Errors\n", "file_path": "starchart/src/action/mod.rs", "rank": 70, "score": 14.748228490772881 }, { "content": "\tDeserialization,\n\n}\n\n\n\n/// A memory-based backend, uses a [`DashMap`] of [`Value`]s\n\n/// to represent data.\n\n#[cfg(feature = \"memory\")]\n\n#[must_use = \"a memory backend does nothing on it's own\"]\n\npub struct MemoryBackend<S = RandomState> {\n\n\ttables: DashMap<String, DashMap<String, Value, S>, S>,\n\n}\n\n\n\nimpl MemoryBackend<RandomState> {\n\n\t/// Creates a new [`MemoryBackend`].\n\n\tpub fn new() -> Self {\n\n\t\tSelf::with_capacity_and_hasher(0, RandomState::default())\n\n\t}\n\n\n\n\t/// Creates a new [`MemoryBackend`] with the specified capacity.\n\n\tpub fn with_capacity(cap: usize) -> Self {\n\n\t\tSelf::with_capacity_and_hasher(cap, RandomState::default())\n", "file_path": "starchart-backends/src/memory.rs", "rank": 71, "score": 14.739977676582152 }, { "content": "\tbackend::{\n\n\t\tfutures::{\n\n\t\t\tCreateFuture, CreateTableFuture, DeleteFuture, DeleteTableFuture, GetFuture,\n\n\t\t\tGetKeysFuture, HasFuture, HasTableFuture, InitFuture, UpdateFuture,\n\n\t\t},\n\n\t\tBackend,\n\n\t},\n\n\tEntry,\n\n};\n\nuse tokio::fs;\n\n\n\npub use self::error::{FsError, FsErrorType};\n\n\n\n/// An fs-based backend for the starchart crate.\n\n#[derive(Debug, Clone)]\n\n#[cfg(feature = \"fs\")]\n\npub struct FsBackend<T> {\n\n\ttranscoder: T,\n\n\textension: String,\n\n\tbase_directory: PathBuf,\n", "file_path": "starchart-backends/src/fs/mod.rs", "rank": 72, "score": 14.640822520243477 }, { "content": "\n\nimpl Transcoder for JsonTranscoder {\n\n\tfn serialize_value<T: Entry>(&self, value: &T) -> Result<Vec<u8>, FsError> {\n\n\t\tif self.is_pretty() {\n\n\t\t\tOk(serde_json::to_vec_pretty(value)?)\n\n\t\t} else {\n\n\t\t\tOk(serde_json::to_vec(value)?)\n\n\t\t}\n\n\t}\n\n\n\n\tfn deserialize_data<T: Entry, R: Read>(&self, rdr: R) -> Result<T, FsError> {\n\n\t\tOk(serde_json::from_reader(rdr)?)\n\n\t}\n\n}\n\n\n\n#[cfg(all(test, not(miri)))]\n\nmod tests {\n\n\tuse std::{fmt::Debug, fs};\n\n\n\n\tuse starchart::backend::Backend;\n", "file_path": "starchart-backends/src/fs/json.rs", "rank": 73, "score": 14.591349146918143 }, { "content": "use futures_util::future::ok;\n\nuse futures_util::Future;\n\n\n\n#[doc(hidden)]\n\npub use self::error::{\n\n\tActionError, ActionErrorType, ActionRunError, ActionRunErrorType, ActionValidationError,\n\n\tActionValidationErrorType,\n\n};\n\npub use self::{\n\n\tdynamic::DynamicAction,\n\n\tkind::ActionKind,\n\n\tr#impl::{\n\n\t\tCreateOperation, CrudOperation, DeleteOperation, EntryTarget, OperationTarget,\n\n\t\tReadOperation, TableTarget, UpdateOperation,\n\n\t},\n\n\tresult::ActionResult,\n\n\ttarget::TargetKind,\n\n};\n\n#[cfg(feature = \"metadata\")]\n\nuse crate::METADATA_KEY;\n", "file_path": "starchart/src/action/mod.rs", "rank": 74, "score": 14.584662827057983 }, { "content": "\t) -> GetAllFuture<'a, I, Self::Error>\n\n\twhere\n\n\t\tD: Entry,\n\n\t\tI: FromIterator<D>,\n\n\t{\n\n\t\tasync move {\n\n\t\t\tself.tables.get(table).map_or_else(\n\n\t\t\t\t|| Ok(None.into_iter().collect::<I>()),\n\n\t\t\t\t|table| {\n\n\t\t\t\t\ttable\n\n\t\t\t\t\t\t.clone()\n\n\t\t\t\t\t\t.into_iter()\n\n\t\t\t\t\t\t.filter_map(|(key, value)| {\n\n\t\t\t\t\t\t\tif entries.contains(&key.as_str()) {\n\n\t\t\t\t\t\t\t\tSome(value.deserialize_into().map_err(MemoryError::from))\n\n\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\tNone\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t})\n\n\t\t\t\t\t\t.collect::<Result<I, Self::Error>>()\n", "file_path": "starchart-backends/src/memory.rs", "rank": 75, "score": 14.375735306560697 }, { "content": "/// [`CRUD`]: https://en.wikipedia.org/wiki/Create,_read,_update_and_delete\n\n/// [`Starchart`]: crate::Starchart\n\n#[derive(Clone)]\n\n#[must_use = \"an action alone has no side effects\"]\n\npub struct Action<'a, S, C, T> {\n\n\tpub(crate) inner: InnerAction<'a, S>,\n\n\tkind: PhantomData<C>,\n\n\ttarget: PhantomData<T>,\n\n}\n\n\n\nimpl<'a, S, C, T> Action<'a, S, C, T> {\n\n\t/// Creates a new [`Action`] with the specified operation.\n\n\tpub const fn new() -> Self {\n\n\t\tSelf {\n\n\t\t\tinner: InnerAction::new(),\n\n\t\t\tkind: PhantomData,\n\n\t\t\ttarget: PhantomData,\n\n\t\t}\n\n\t}\n\n\n", "file_path": "starchart/src/action/mod.rs", "rank": 76, "score": 14.358287561034551 }, { "content": "\tutil::InnerUnwrap,\n\n\tAction, Entry, IndexEntry, Key, Starchart,\n\n};\n\n\n\n/// A dynamic [`Action`] for when certain parameters aren't known until runtime.\n\n#[derive(Clone)]\n\n#[must_use = \"an action alone has no side effects\"]\n\npub struct DynamicAction<S: ?Sized> {\n\n\tpub(super) key: Option<String>,\n\n\tpub(super) data: Option<Box<S>>,\n\n\tpub(super) table: Option<String>,\n\n\tpub(super) kind: ActionKind,\n\n\tpub(super) target: TargetKind,\n\n}\n\n\n\nimpl<S: ?Sized> DynamicAction<S> {\n\n\t/// Creates a new action of the specified type and target.\n\n\tpub const fn new(kind: ActionKind, target: TargetKind) -> Self {\n\n\t\tSelf {\n\n\t\t\tkey: None,\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 77, "score": 14.297953668048269 }, { "content": "\t\tExclusiveGuard(inner)\n\n\t}\n\n}\n\n\n\nimpl Default for Guard {\n\n\tfn default() -> Self {\n\n\t\tSelf::new()\n\n\t}\n\n}\n\n\n\n// implementing send doesn't matter bc we're not actually editing the value, just using it for a locking mechanism\n\npub struct SharedGuard<'a>(RwLockReadGuard<'a, ()>);\n\n\n\nunsafe impl<'a> Send for SharedGuard<'a> {}\n\n\n\npub struct ExclusiveGuard<'a>(RwLockWriteGuard<'a, ()>);\n\n\n\nunsafe impl<'a> Send for ExclusiveGuard<'a> {}\n", "file_path": "starchart/src/atomics.rs", "rank": 78, "score": 13.924707479902402 }, { "content": "\t#[cfg(feature = \"json\")]\n\n\tpub use super::json::JsonTranscoder;\n\n\t#[cfg(feature = \"toml\")]\n\n\tpub use super::toml::TomlTranscoder;\n\n\t#[cfg(feature = \"yaml\")]\n\n\tpub use super::yaml::YamlTranscoder;\n\n\n\n\t/// Transcoder formats for supported transcoders to use.\n\n\t#[cfg(any(feature = \"toml\", feature = \"json\"))]\n\n\t#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\n\tpub enum TranscoderFormat {\n\n\t\t/// Standard formatting, this is the default.\n\n\t\tStandard,\n\n\t\t/// Prettier formatting, this uses more file space but is also more human-readable.\n\n\t\tPretty,\n\n\t}\n\n\n\n\t#[cfg(any(feature = \"toml\", feature = \"json\"))]\n\n\timpl Default for TranscoderFormat {\n\n\t\tfn default() -> Self {\n", "file_path": "starchart-backends/src/fs/mod.rs", "rank": 79, "score": 13.728626554711312 }, { "content": "\tfn get<'a, D>(&'a self, table: &'a str, id: &'a str) -> GetFuture<'a, D, Self::Error>\n\n\twhere\n\n\t\tD: Entry;\n\n\n\n\t/// Checks if an entry exists in a table.\n\n\tfn has<'a>(&'a self, table: &'a str, id: &'a str) -> HasFuture<'a, Self::Error>;\n\n\n\n\t/// Inserts a new entry into a table.\n\n\tfn create<'a, S>(\n\n\t\t&'a self,\n\n\t\ttable: &'a str,\n\n\t\tid: &'a str,\n\n\t\tvalue: &'a S,\n\n\t) -> CreateFuture<'a, Self::Error>\n\n\twhere\n\n\t\tS: Entry;\n\n\n\n\t/// Ensures a value exists in the table.\n\n\tfn ensure<'a, S>(\n\n\t\t&'a self,\n", "file_path": "starchart/src/backend/mod.rs", "rank": 80, "score": 13.636826747675746 }, { "content": "\t}\n\n}\n\n\n\n#[cfg(all(test, not(miri)))]\n\nmod tests {\n\n\tuse std::fmt::Debug;\n\n\n\n\tuse fxhash::FxBuildHasher;\n\n\tuse starchart::backend::Backend;\n\n\tuse static_assertions::assert_impl_all;\n\n\n\n\tuse super::{MemoryBackend, MemoryError};\n\n\tuse crate::testing::TestSettings;\n\n\n\n\tassert_impl_all!(MemoryBackend: Backend, Clone, Debug, Default, Send, Sync);\n\n\n\n\t#[tokio::test]\n\n\tasync fn table_methods() -> Result<(), MemoryError> {\n\n\t\tlet backend = MemoryBackend::with_hasher(FxBuildHasher::default());\n\n\n", "file_path": "starchart-backends/src/memory.rs", "rank": 81, "score": 13.535281686954235 }, { "content": "use std::io::Read;\n\n\n\nuse starchart::Entry;\n\n\n\nuse super::{FsError, Transcoder};\n\n\n\n/// Format types for the [`BinaryTranscoder`].\n\n#[derive(Debug, Clone, Copy)]\n\n#[cfg(feature = \"binary\")]\n\n#[non_exhaustive]\n\n#[must_use = \"binary formats do nothing on their own\"]\n\npub enum BinaryFormat {\n\n\t/// The [`Bincode`] format.\n\n\t///\n\n\t/// [`Bincode`]: serde_bincode\n\n\tBincode,\n\n\t/// The [`CBOR`] format.\n\n\t///\n\n\t/// [`CBOR`]: serde_cbor\n\n\tCbor,\n", "file_path": "starchart-backends/src/fs/binary.rs", "rank": 82, "score": 13.274595364509688 }, { "content": "\t\t}\n\n\t\tOk(Action {\n\n\t\t\tinner: InnerAction {\n\n\t\t\t\tdata: self.data.as_deref(),\n\n\t\t\t\tkey: self.key.clone(),\n\n\t\t\t\ttable: self.table.as_deref(),\n\n\t\t\t},\n\n\t\t\tkind: PhantomData,\n\n\t\t\ttarget: PhantomData,\n\n\t\t})\n\n\t}\n\n}\n\n\n\nimpl<S: IndexEntry + ?Sized> DynamicAction<S> {\n\n\t/// Sets both a key and a value to run the action with.\n\n\tpub fn set_entry(&mut self, entry: S) -> &mut Self {\n\n\t\tself.set_key(entry.key()).set_entry(entry)\n\n\t}\n\n}\n\n\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 83, "score": 13.154708706294734 }, { "content": "\t\ttable: &'a str,\n\n\t\tid: &'a str,\n\n\t\tvalue: &'a S,\n\n\t) -> UpdateFuture<'a, Self::Error>\n\n\twhere\n\n\t\tS: Entry;\n\n\n\n\t/// Deletes an entry from a table.\n\n\tfn delete<'a>(&'a self, table: &'a str, id: &'a str) -> DeleteFuture<'a, Self::Error>;\n\n}\n", "file_path": "starchart/src/backend/mod.rs", "rank": 84, "score": 13.131053647262899 }, { "content": "\n\nimpl Transcoder for TomlTranscoder {\n\n\tfn serialize_value<T: Entry>(&self, value: &T) -> Result<Vec<u8>, FsError> {\n\n\t\tif self.is_pretty() {\n\n\t\t\tOk(serde_toml::to_string_pretty(value).map(String::into_bytes)?)\n\n\t\t} else {\n\n\t\t\tOk(serde_toml::to_vec(value)?)\n\n\t\t}\n\n\t}\n\n\n\n\tfn deserialize_data<T: Entry, R: Read>(&self, mut rdr: R) -> Result<T, FsError> {\n\n\t\tlet mut output = String::new();\n\n\t\trdr.read_to_string(&mut output)?;\n\n\t\tOk(serde_toml::from_str(&output)?)\n\n\t}\n\n}\n\n\n\n#[cfg(all(test, not(miri)))]\n\nmod tests {\n\n\tuse std::{fmt::Debug, fs};\n", "file_path": "starchart-backends/src/fs/toml.rs", "rank": 85, "score": 12.983974807634631 }, { "content": "\t\t\t\tstat.run_update_entry(chart).await?;\n\n\t\t\t\tOk(ActionResult::Update)\n\n\t\t\t}\n\n\t\t\t(ActionKind::Delete, TargetKind::Entry) => {\n\n\t\t\t\tlet stat = self.as_static::<DeleteOperation, EntryTarget>()?;\n\n\t\t\t\tlet ret = stat.run_delete_entry(chart).await?;\n\n\t\t\t\tOk(ActionResult::Delete(ret))\n\n\t\t\t}\n\n\t\t\t(ActionKind::Create, TargetKind::Table) => {\n\n\t\t\t\tlet stat = self.as_static::<CreateOperation, TableTarget>()?;\n\n\t\t\t\tstat.run_create_table(chart).await?;\n\n\t\t\t\tOk(ActionResult::Create)\n\n\t\t\t}\n\n\t\t\t(ActionKind::Read, TargetKind::Table) => {\n\n\t\t\t\tlet stat = self.as_static::<ReadOperation, TableTarget>()?;\n\n\t\t\t\tlet ret = stat.run_read_table(chart).await?;\n\n\t\t\t\tOk(ActionResult::MultiRead(ret))\n\n\t\t\t}\n\n\t\t\t(ActionKind::Update, TargetKind::Table) => panic!(\"updating tables is unsupported\"),\n\n\t\t\t(ActionKind::Delete, TargetKind::Table) => {\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 86, "score": 12.832872923888964 }, { "content": "\n\n\t\tself\n\n\t}\n\n\n\n\t/// Sets the key for the action.\n\n\t///\n\n\t/// Users should prefer to call [`Self::set_entry`] over this, as setting the\n\n\t/// entry will automatically call this.\n\n\t///\n\n\t/// This is unused on [`TargetKind::Table`] actions.\n\n\tpub fn set_key<K: Key>(&mut self, key: &K) -> &mut Self {\n\n\t\tself.key.replace(key.to_key());\n\n\n\n\t\tself\n\n\t}\n\n\n\n\t/// Sets the data for the action.\n\n\t///\n\n\t/// This is unused on [`TargetKind::Table`] actions.\n\n\tpub fn set_data(&mut self, data: S) -> &mut Self {\n", "file_path": "starchart/src/action/dynamic.rs", "rank": 87, "score": 12.807186972685189 }, { "content": "\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This returns an error if [`Self::validate_table`] fails, or if any of the [`Backend`] methods fail.\n\n\tpub fn run_create_table<B: Backend>(\n\n\t\tself,\n\n\t\tgateway: &'a Starchart<B>,\n\n\t) -> impl Future<Output = Result<(), ActionError>> + 'a {\n\n\t\tself.inner.create_table(gateway)\n\n\t}\n\n}\n\n\n\nimpl<'a, S: Entry> ReadTableAction<'a, S> {\n\n\t/// Validates and runs a [`ReadTableAction`].\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// This returns an error if [`Self::validate_table`] fails, or if any of the [`Backend`] methods fail.\n\n\tpub fn run_read_table<B: Backend, I>(\n\n\t\tself,\n", "file_path": "starchart/src/action/mod.rs", "rank": 88, "score": 12.750109865151861 }, { "content": "\t\tself // coverage:ignore-line\n\n\t}\n\n\n\n\t/// Validate that the key has been set.\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// Errors if [`Self::set_key`] has not yet been called.\n\n\tpub fn validate_key(&self) -> Result<(), ActionValidationError> {\n\n\t\tself.inner.validate_key()\n\n\t}\n\n\n\n\t/// Validates that the data has been set.\n\n\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// Errors if [`Self::set_data`] has not yet been called.\n\n\tpub fn validate_data(&self) -> Result<(), ActionValidationError> {\n\n\t\tself.inner.validate_data()\n\n\t}\n", "file_path": "starchart/src/action/mod.rs", "rank": 89, "score": 12.660720974553106 }, { "content": "//! The action structs for CRUD operations.\n\n\n\n// TODO: Add overwrite option.\n\n\n\nmod dynamic;\n\nmod error;\n\nmod r#impl;\n\nmod kind;\n\nmod result;\n\nmod target;\n\n\n\n#[cfg(feature = \"metadata\")]\n\nuse std::any::type_name;\n\nuse std::{\n\n\tfmt::{Debug, Formatter, Result as FmtResult},\n\n\titer::FromIterator,\n\n\tmarker::PhantomData,\n\n};\n\n\n\n#[cfg(not(feature = \"metadata\"))]\n", "file_path": "starchart/src/action/mod.rs", "rank": 90, "score": 12.637438565051093 }, { "content": "\t\tE: Entry,\n\n\t{\n\n\t\tif let Some(table) = self.tables.get(table) {\n\n\t\t\tlet serialized = match to_value(value) {\n\n\t\t\t\tOk(v) => v,\n\n\t\t\t\tErr(e) => return err(e.into()).boxed(),\n\n\t\t\t};\n\n\n\n\t\t\ttable.insert(id.to_owned(), serialized);\n\n\t\t}\n\n\n\n\t\tok(()).boxed()\n\n\t}\n\n\n\n\tfn update<'a, E>(\n\n\t\t&'a self,\n\n\t\ttable: &'a str,\n\n\t\tid: &'a str,\n\n\t\tvalue: &'a E,\n\n\t) -> UpdateFuture<'a, Self::Error>\n", "file_path": "starchart-backends/src/memory.rs", "rank": 91, "score": 12.628961913593116 }, { "content": "//! The base structure to use for starchart.\n\n\n\nuse std::{ops::Deref, sync::Arc};\n\n\n\nuse futures_executor::block_on;\n\n\n\nuse crate::{atomics::Guard, backend::Backend};\n\n\n\n/// The base structure for managing data.\n\n///\n\n/// The inner data is wrapped in an [`Arc`], so cloning\n\n/// is cheap and will allow multiple accesses to the data.\n\n#[derive(Debug, Default)]\n\npub struct Starchart<B: Backend> {\n\n\tbackend: Arc<B>,\n\n\tpub(crate) guard: Arc<Guard>,\n\n}\n\n\n\nimpl<B: Backend> Starchart<B> {\n\n\t/// Creates a new [`Starchart`], and initializes the [`Backend`].\n", "file_path": "starchart/src/starchart.rs", "rank": 92, "score": 12.610540743277756 }, { "content": "\t#[allow(clippy::inline_always)]\n\n\t#[inline(always)]\n\n\t#[track_caller]\n\n\tunsafe fn inner_unwrap(self) -> T {\n\n\t\tself.unwrap_unchecked()\n\n\t}\n\n}\n\n\n\n#[cfg(not(has_unwrap_unchecked))]\n\nunsafe impl<T, E> InnerUnwrap<T> for Result<T, E> {\n\n\t#[inline]\n\n\t#[track_caller]\n\n\tunsafe fn inner_unwrap(self) -> T {\n\n\t\tdebug_assert!(self.is_ok());\n\n\t\tif let Ok(v) = self {\n\n\t\t\tv\n\n\t\t} else {\n\n\t\t\tunreachable_unchecked()\n\n\t\t}\n\n\t}\n", "file_path": "starchart/src/util.rs", "rank": 93, "score": 12.56413301107484 }, { "content": "\n\n\t\tlet serialized = match self.transcoder().serialize_value(value) {\n\n\t\t\tOk(v) => v,\n\n\t\t\tErr(e) => return err(e).boxed(),\n\n\t\t};\n\n\n\n\t\tfs::write(path, serialized)\n\n\t\t\t.map(|res| res.map_err(Into::into))\n\n\t\t\t.boxed()\n\n\t}\n\n\n\n\tfn update<'a, S>(\n\n\t\t&'a self,\n\n\t\ttable: &'a str,\n\n\t\tid: &'a str,\n\n\t\tvalue: &'a S,\n\n\t) -> UpdateFuture<'a, Self::Error>\n\n\twhere\n\n\t\tS: Entry,\n\n\t{\n", "file_path": "starchart-backends/src/fs/mod.rs", "rank": 94, "score": 12.528373161347671 }, { "content": "\t///\n\n\t/// # Errors\n\n\t///\n\n\t/// Any errors that [`Backend::init`] can raise.\n\n\tpub async fn new(backend: B) -> Result<Self, B::Error> {\n\n\t\tbackend.init().await?;\n\n\t\tOk(Self {\n\n\t\t\tbackend: Arc::new(backend),\n\n\t\t\tguard: Arc::default(),\n\n\t\t})\n\n\t}\n\n}\n\n\n\nimpl<B: Backend> Clone for Starchart<B> {\n\n\tfn clone(&self) -> Self {\n\n\t\tSelf {\n\n\t\t\tbackend: self.backend.clone(),\n\n\t\t\tguard: self.guard.clone(),\n\n\t\t}\n\n\t}\n", "file_path": "starchart/src/starchart.rs", "rank": 95, "score": 12.48173500974048 }, { "content": "\t\tassert_eq!(\n\n\t\t\tbackend.get::<TestSettings>(\"table\", \"1\").await?,\n\n\t\t\tSome(settings)\n\n\t\t);\n\n\n\n\t\tbackend.delete(\"table\", \"1\").await?;\n\n\n\n\t\tassert_eq!(backend.get::<TestSettings>(\"table\", \"1\").await?, None);\n\n\n\n\t\tOk(())\n\n\t}\n\n\n\n\t#[tokio::test]\n\n\tasync fn update_and_delete_pretty() -> Result<(), FsError> {\n\n\t\tlet _lock = TEST_GUARD.lock().await;\n\n\t\tlet path = TestPath::new(\"update_and_delete_pretty\", \"toml\");\n\n\t\tlet backend = FsBackend::new(TomlTranscoder::pretty(), \"toml\".to_owned(), &path)?;\n\n\n\n\t\tbackend.init().await?;\n\n\t\tbackend.create_table(\"table\").await?;\n", "file_path": "starchart-backends/src/fs/toml.rs", "rank": 96, "score": 12.462576637271471 }, { "content": "// Entry helpers\n\nimpl<'a, S: Entry, C: CrudOperation> Action<'a, S, C, EntryTarget> {\n\n\t/// Sets the key for the action.\n\n\t///\n\n\t/// Users should prefer to call [`Self::set_entry`] over this, as setting the\n\n\t/// entry will automatically call this.\n\n\t///\n\n\t/// This is unused on [`TargetKind::Table`] actions.\n\n\tpub fn set_key<K: Key>(&mut self, key: &K) -> &mut Self {\n\n\t\tself.inner.key.replace(key.to_key());\n\n\n\n\t\tself // coverage:ignore-line\n\n\t}\n\n\n\n\t/// Sets the data for the action.\n\n\t///\n\n\t/// This is unused on [`TargetKind::Table`] actions.\n\n\tpub fn set_data(&mut self, entity: &'a S) -> &mut Self {\n\n\t\tself.inner.data.replace(entity);\n\n\n", "file_path": "starchart/src/action/mod.rs", "rank": 97, "score": 12.456296216708289 }, { "content": "\t\tasync move {\n\n\t\t\tif !self.has_table(table).await? {\n\n\t\t\t\tself.create_table(table).await?;\n\n\t\t\t}\n\n\n\n\t\t\tOk(())\n\n\t\t}\n\n\t\t.boxed()\n\n\t}\n\n\n\n\t/// Gets all entries that match a predicate, to get all entries, use [`get_keys`] first.\n\n\t///\n\n\t/// [`get_keys`]: Self::get_keys\n\n\tfn get_all<'a, D, I>(\n\n\t\t&'a self,\n\n\t\ttable: &'a str,\n\n\t\tentries: &'a [&'a str],\n\n\t) -> GetAllFuture<'a, I, Self::Error>\n\n\twhere\n\n\t\tD: Entry,\n", "file_path": "starchart/src/backend/mod.rs", "rank": 98, "score": 12.11439557556566 }, { "content": "\n\n\t#[tokio::test]\n\n\tasync fn update_and_delete() -> Result<(), FsError> {\n\n\t\tlet _lock = TEST_GUARD.lock().await;\n\n\t\tlet path = TestPath::new(\"update_and_delete\", \"json\");\n\n\t\tlet backend = FsBackend::new(JsonTranscoder::default(), \"json\".to_owned(), &path)?;\n\n\n\n\t\tbackend.init().await?;\n\n\t\tbackend.create_table(\"table\").await?;\n\n\n\n\t\tlet mut settings = TestSettings::default();\n\n\n\n\t\tbackend.create(\"table\", \"1\", &settings).await?;\n\n\n\n\t\tsettings.opt = None;\n\n\n\n\t\tbackend.update(\"table\", \"1\", &settings).await?;\n\n\n\n\t\tassert_eq!(\n\n\t\t\tbackend.get::<TestSettings>(\"table\", \"1\").await?,\n", "file_path": "starchart-backends/src/fs/json.rs", "rank": 99, "score": 11.959151527498515 } ]
Rust
src/connectivity/bluetooth/profiles/bt-hfp-audio-gateway/src/peer/procedure/nrec.rs
fabio-d/fuchsia-stardock
e57f5d1cf015fe2294fc2a5aea704842294318d2
use super::{Procedure, ProcedureError, ProcedureMarker, ProcedureRequest}; use crate::peer::{service_level_connection::SlcState, slc_request::SlcRequest, update::AgUpdate}; use at_commands as at; #[derive(Debug, PartialEq, Clone, Copy)] enum State { Start, SetRequest, Terminated, } impl State { fn transition(&mut self) { match *self { Self::Start => *self = Self::SetRequest, Self::SetRequest => *self = Self::Terminated, Self::Terminated => *self = Self::Terminated, } } } #[derive(Debug)] pub struct NrecProcedure { state: State, } impl NrecProcedure { pub fn new() -> Self { Self { state: State::Start } } } impl Procedure for NrecProcedure { fn marker(&self) -> ProcedureMarker { ProcedureMarker::Nrec } fn hf_update(&mut self, update: at::Command, _state: &mut SlcState) -> ProcedureRequest { match (self.state, update) { (State::Start, at::Command::Nrec { nrec: enable }) => { self.state.transition(); let response = Box::new(Into::into); SlcRequest::SetNrec { enable, response }.into() } (_, update) => ProcedureRequest::Error(ProcedureError::UnexpectedHf(update)), } } fn ag_update(&mut self, update: AgUpdate, _state: &mut SlcState) -> ProcedureRequest { match (self.state, update) { (State::SetRequest, update @ AgUpdate::Ok) | (State::SetRequest, update @ AgUpdate::Error) => { self.state.transition(); update.into() } (_, update) => ProcedureRequest::Error(ProcedureError::UnexpectedAg(update)), } } fn is_terminated(&self) -> bool { self.state == State::Terminated } } #[cfg(test)] mod tests { use super::*; use assert_matches::assert_matches; #[test] fn state_transitions() { let mut state = State::Start; state.transition(); assert_eq!(state, State::SetRequest); state.transition(); assert_eq!(state, State::Terminated); state.transition(); assert_eq!(state, State::Terminated); } #[test] fn correct_marker() { let marker = NrecProcedure::new().marker(); assert_eq!(marker, ProcedureMarker::Nrec); } #[test] fn is_terminated_in_terminated_state() { let mut proc = NrecProcedure::new(); assert!(!proc.is_terminated()); proc.state = State::SetRequest; assert!(!proc.is_terminated()); proc.state = State::Terminated; assert!(proc.is_terminated()); } #[test] fn unexpected_hf_update_returns_error() { let mut proc = NrecProcedure::new(); let mut state = SlcState::default(); let random_hf = at::Command::CindRead {}; assert_matches!( proc.hf_update(random_hf, &mut state), ProcedureRequest::Error(ProcedureError::UnexpectedHf(_)) ); } #[test] fn unexpected_ag_update_returns_error() { let mut proc = NrecProcedure::new(); let mut state = SlcState::default(); let random_ag = AgUpdate::ThreeWaySupport; assert_matches!( proc.ag_update(random_ag, &mut state), ProcedureRequest::Error(ProcedureError::UnexpectedAg(_)) ); } #[test] fn updates_produce_expected_requests() { let mut proc = NrecProcedure::new(); let mut state = SlcState::default(); let req = proc.hf_update(at::Command::Nrec { nrec: true }, &mut state); let update = match req { ProcedureRequest::Request(SlcRequest::SetNrec { enable: true, response }) => { response(Ok(())) } x => panic!("Unexpected message: {:?}", x), }; let req = proc.ag_update(update, &mut state); assert_matches!( req, ProcedureRequest::SendMessages(resp) if resp == vec![at::Response::Ok] ); assert!(proc.is_terminated()); assert_matches!( proc.hf_update(at::Command::Nrec { nrec: true }, &mut state), ProcedureRequest::Error(ProcedureError::UnexpectedHf(_)) ); assert_matches!( proc.ag_update(AgUpdate::Ok, &mut state), ProcedureRequest::Error(ProcedureError::UnexpectedAg(_)) ); } }
use super::{Procedure, ProcedureError, ProcedureMarker, ProcedureRequest}; use crate::peer::{service_level_connection::SlcState, slc_request::SlcRequest, update::AgUpdate}; use at_commands as at; #[derive(Debug, PartialEq, Clone, Copy)] enum State { Start, SetRequest, Terminated, } impl State { fn transition(&mut self) { match *self { Self::Start => *self = Self::SetRequest, Self::SetRequest => *self = Self::Terminated, Self::Terminated => *self = Self::Terminated, } } } #[derive(Debug)] pub struct NrecProcedure { state: State, } impl NrecProcedure { pub fn new() -> Self { Self { state: State::Start } } } impl Procedure for NrecProcedure { fn marker(&self) -> ProcedureMarker { ProcedureMarker::Nrec } fn hf_update(&mut self, update: at::Command, _state: &mut SlcState) -> ProcedureRequest { match (self.state, update) { (State::Start, at::Command::Nrec { nrec: enable }) => { self.state.transition(); let response = Box::new(Into::into); SlcRequest::SetNrec { enable, response }.into() } (_, update) => ProcedureRequest::Error(ProcedureError::UnexpectedHf(update)), } } fn ag_update(&mut self, update: AgUpdate, _state: &mut SlcState) -> ProcedureRequest { match (self.state, update) { (State::SetRequest, update @ AgUpdate::Ok) | (State::SetRequest, update @ AgUpdate::Error) => { self.state.transition(); update.into() } (_, update) => ProcedureRequest::Error(ProcedureError::UnexpectedAg(update)), } } fn is_terminated(&self) -> bool { self.state == State::Terminated } } #[cfg(test)] mod tests { use super::*; use assert_matches::assert_matches; #[test] fn state_transitions() { let mut state = State::Start; state.transition(); assert_eq!(state, State::SetRequest); state.transition(); assert_eq!(state, State::Terminated); state.transition(); assert_eq!(state, State::Terminated); } #[test] fn correct_marker() { let marker = NrecProcedure::new().marker(); assert_eq!(marker, ProcedureMarker::Nrec); } #[test] fn is_terminated_in_terminated_state() { let mut proc = NrecProcedure::new(); assert!(!proc.is_terminated()); proc.state = State::SetRequest; assert!(!proc.is_terminated()); proc.state = State::Terminated; assert!(proc.is_terminated()); } #[test] fn unexpected_hf_update_returns_error() { let mut proc = NrecProcedure::new(); let mut state = SlcState::default(); let random_hf = at::Command::CindRead {}; assert_matches!( proc.hf_update(random_hf, &mut state)
eRequest::Error(ProcedureError::UnexpectedAg(_)) ); } #[test] fn updates_produce_expected_requests() { let mut proc = NrecProcedure::new(); let mut state = SlcState::default(); let req = proc.hf_update(at::Command::Nrec { nrec: true }, &mut state); let update = match req { ProcedureRequest::Request(SlcRequest::SetNrec { enable: true, response }) => { response(Ok(())) } x => panic!("Unexpected message: {:?}", x), }; let req = proc.ag_update(update, &mut state); assert_matches!( req, ProcedureRequest::SendMessages(resp) if resp == vec![at::Response::Ok] ); assert!(proc.is_terminated()); assert_matches!( proc.hf_update(at::Command::Nrec { nrec: true }, &mut state), ProcedureRequest::Error(ProcedureError::UnexpectedHf(_)) ); assert_matches!( proc.ag_update(AgUpdate::Ok, &mut state), ProcedureRequest::Error(ProcedureError::UnexpectedAg(_)) ); } }
, ProcedureRequest::Error(ProcedureError::UnexpectedHf(_)) ); } #[test] fn unexpected_ag_update_returns_error() { let mut proc = NrecProcedure::new(); let mut state = SlcState::default(); let random_ag = AgUpdate::ThreeWaySupport; assert_matches!( proc.ag_update(random_ag, &mut state), Procedur
random
[]
Rust
src/sstable/writer.rs
ikatson/rust-sstb
8f2996d4e330dd1e98a16154e5bb38c6f7b81576
use std::convert::TryFrom; use std::fs::File; use std::io::BufWriter; use std::io::{Seek, SeekFrom, Write}; use std::path::Path; use bincode; use bloomfilter::Bloom; use super::compress_ctx_writer::*; use super::compression; use super::ondisk_format::*; use super::options::*; use super::poswriter::PosWriter; use super::result::Result; use super::types::*; pub trait RawSSTableWriter { fn set(&mut self, key: &[u8], value: &[u8]) -> Result<()>; fn close(self) -> Result<()>; } pub struct SSTableWriterV2 { file: PosWriter<Box<dyn CompressionContextWriter<PosWriter<BufWriter<File>>>>>, meta: MetaV3_0, meta_start: u64, data_start: u64, flush_every: usize, sparse_index: Vec<(Vec<u8>, u64)>, bloom: Bloom<[u8]>, } impl SSTableWriterV2 { pub fn new<P: AsRef<Path>>(path: P) -> Result<Self> { Self::new_with_options(path, &WriteOptions::default()) } pub fn new_with_options<P: AsRef<Path>>(path: P, options: &WriteOptions) -> Result<Self> { let file = File::create(path)?; let mut writer = PosWriter::new(BufWriter::new(file), 0); writer.write_all(MAGIC)?; bincode::serialize_into(&mut writer, &VERSION_30)?; let meta_start = writer.current_offset() as u64; let mut meta = MetaV3_0::default(); meta.compression = options.compression; bincode::serialize_into(&mut writer, &meta)?; let data_start = writer.current_offset() as u64; let file: Box<dyn CompressionContextWriter<PosWriter<BufWriter<File>>>> = match options.compression { Compression::None => Box::new(UncompressedWriter::new(writer)), Compression::Zlib => Box::new(CompressionContextWriterImpl::new( writer, compression::ZlibCompressorFactory::new(None), )), Compression::Snappy => Box::new(CompressionContextWriterImpl::new( writer, compression::SnappyCompressorFactory::new(), )), }; Ok(Self { file: PosWriter::new(file, data_start), meta, meta_start, data_start, flush_every: options.flush_every, sparse_index: Vec::new(), bloom: Bloom::new( options.bloom.bitmap_size as usize, options.bloom.items_count, ), }) } pub fn finish(self) -> Result<()> { match self { SSTableWriterV2 { file, mut meta, meta_start, data_start, sparse_index, bloom, .. } => { let mut writer = file.into_inner(); let index_start = self.data_start + writer.reset_compression_context()? as u64; for (key, offset) in sparse_index.into_iter() { KVOffset::new(key.len(), offset)?.serialize_into(&mut writer)?; writer.write_all(&key)?; } let bloom_start = self.data_start + writer.reset_compression_context()? as u64; writer.write_all(&bloom.bitmap())?; let end = self.data_start + writer.reset_compression_context()? as u64; meta.finished = true; meta.index_len = bloom_start - index_start; meta.data_len = index_start - data_start; meta.bloom_len = end - bloom_start; meta.bloom.bitmap_bytes = u32::try_from(bloom.number_of_bits() / 8)?; meta.bloom.k_num = bloom.number_of_hash_functions(); meta.bloom.sip_keys = bloom.sip_keys(); let mut writer = writer.into_inner()?.into_inner(); writer.seek(SeekFrom::Start(meta_start as u64))?; bincode::serialize_into(&mut writer, &meta)?; Ok(()) } } } } impl RawSSTableWriter for SSTableWriterV2 { #[allow(clippy::collapsible_if)] fn set(&mut self, key: &[u8], value: &[u8]) -> Result<()> { let approx_msg_len = key.len() + 5 + value.len(); if self.meta.items == 0 { self.sparse_index.push((key.to_owned(), self.data_start)); } else { if self.file.current_offset() + approx_msg_len as u64 >= self.flush_every as u64 { let total_offset = self.data_start + self.file.get_mut().reset_compression_context()? as u64; self.file.reset_offset(0); self.sparse_index .push((key.to_owned(), total_offset as u64)); } } self.bloom.set(key); KVLength::new(key.len(), value.len())?.serialize_into(&mut self.file)?; self.file.write_all(key)?; self.file.write_all(value)?; self.meta.items += 1; Ok(()) } fn close(self) -> Result<()> { self.finish() } }
use std::convert::TryFrom; use std::fs::File; use std::io::BufWriter; use std::io::{Seek, SeekFrom, Write}; use std::path::Path; use bincode; use bloomfilter::Bloom; use super::compress_ctx_writer::*; use super::compression; use super::ondisk_format::*; use super::options::*; use super::poswriter::PosWriter; use super::result::Result; use super::types::*; pub trait RawSSTableWriter { fn set(&mut self, key: &[u8], value: &[u8]) -> Result<()>; fn close(self) -> Result<()>; } pub struct SSTableWriterV2 { file: PosWriter<Box<dyn CompressionContextWriter<PosWriter<BufWriter<File>>>>>, meta: MetaV3_0, meta_start: u64, data_start: u64, flush_every: usize, sparse_index: Vec<(Vec<u8>, u64)>, bloom: Bloom<[u8]>, } impl SSTableWriterV2 { pub fn new<P: AsRef<Path>>(path: P) -> Result<Self> { Self::new_with_options(path, &WriteOptions::default()) } pub fn new_with_options<P: AsRef<Path>>(path: P, options: &WriteOptions) -> Result<Self> { let file = File::create(path)?; let mut writer = PosWriter::new(BufWriter::new(file), 0); writer.write_all(MAGIC)?; bincode::serialize_into(&mut writer, &VERSION_30)?; let meta_start = writer.current_offset() as u64; let mut meta = MetaV3_0::default(); meta.compression = options.compression; bincode::serialize_into(&mut writer, &meta)?; let data_start = writer.current_offset() as u64; let file: Box<dyn CompressionContextWriter<PosWriter<BufWriter<File>>>> = match options.compression { Compression::None => Box::new(UncompressedWriter::new(writer)), Compression::Zlib => Box::new(CompressionContextWriterImpl::new( writer, compression::ZlibCompressorFactory::new(None), )), Compression::Snappy => Box::new(CompressionContextWriterImpl::new( writer, compression::SnappyCompressorFactory::new(), )), }; Ok(Self { file: PosWriter::new(file, data_start), meta, meta_start, data_start, flush_every: options.flush_every, sparse_index: Vec::new(), bloom: Bloom::new( options.bloom.bitmap_size as usize, options.bloom.items_count, ), }) }
} impl RawSSTableWriter for SSTableWriterV2 { #[allow(clippy::collapsible_if)] fn set(&mut self, key: &[u8], value: &[u8]) -> Result<()> { let approx_msg_len = key.len() + 5 + value.len(); if self.meta.items == 0 { self.sparse_index.push((key.to_owned(), self.data_start)); } else { if self.file.current_offset() + approx_msg_len as u64 >= self.flush_every as u64 { let total_offset = self.data_start + self.file.get_mut().reset_compression_context()? as u64; self.file.reset_offset(0); self.sparse_index .push((key.to_owned(), total_offset as u64)); } } self.bloom.set(key); KVLength::new(key.len(), value.len())?.serialize_into(&mut self.file)?; self.file.write_all(key)?; self.file.write_all(value)?; self.meta.items += 1; Ok(()) } fn close(self) -> Result<()> { self.finish() } }
pub fn finish(self) -> Result<()> { match self { SSTableWriterV2 { file, mut meta, meta_start, data_start, sparse_index, bloom, .. } => { let mut writer = file.into_inner(); let index_start = self.data_start + writer.reset_compression_context()? as u64; for (key, offset) in sparse_index.into_iter() { KVOffset::new(key.len(), offset)?.serialize_into(&mut writer)?; writer.write_all(&key)?; } let bloom_start = self.data_start + writer.reset_compression_context()? as u64; writer.write_all(&bloom.bitmap())?; let end = self.data_start + writer.reset_compression_context()? as u64; meta.finished = true; meta.index_len = bloom_start - index_start; meta.data_len = index_start - data_start; meta.bloom_len = end - bloom_start; meta.bloom.bitmap_bytes = u32::try_from(bloom.number_of_bits() / 8)?; meta.bloom.k_num = bloom.number_of_hash_functions(); meta.bloom.sip_keys = bloom.sip_keys(); let mut writer = writer.into_inner()?.into_inner(); writer.seek(SeekFrom::Start(meta_start as u64))?; bincode::serialize_into(&mut writer, &meta)?; Ok(()) } } }
function_block-full_function
[ { "content": "/// Find the key in the chunk by scanning sequentially.\n\n///\n\n/// This assumes the chunk was fetched from disk and has V1 ondisk format.\n\n///\n\n/// Returns the start and end index of the value.\n\n///\n\n/// TODO: this probably belongs in \"ondisk\" for version V1.\n\npub fn find_value_offset_v2(buf: &[u8], key: &[u8]) -> Result<Option<(usize, usize)>> {\n\n macro_rules! buf_get {\n\n ($x:expr) => {{\n\n buf.get($x).ok_or(INVALID_DATA)?\n\n }};\n\n }\n\n\n\n let kvlen_encoded_size = KVLength::encoded_size();\n\n\n\n let mut offset = 0;\n\n while offset < buf.len() {\n\n let kvlength = bincode::deserialize::<KVLength>(&buf)?;\n\n let (start_key, cursor) = {\n\n let key_start = offset + kvlen_encoded_size;\n\n let key_end = key_start + kvlength.key_length as usize;\n\n (buf_get!(key_start..key_end), key_end)\n\n };\n\n\n\n let (start, end) = {\n\n let value_end = cursor + kvlength.value_length as usize;\n", "file_path": "src/sstable/ondisk_format.rs", "rank": 0, "score": 226046.14298968075 }, { "content": "/// Read the bloom filter from a reader.\n\nfn read_bloom<R: Read>(mut reader: R, config: &BloomV3_0) -> Result<Bloom<[u8]>> {\n\n let len_bytes = usize::try_from(config.bitmap_bytes)?;\n\n // I don't think there's a way not to do this allocation.\n\n let mut buf = vec![0u8; len_bytes];\n\n reader.read_exact(&mut buf)?;\n\n Ok(Bloom::from_existing(\n\n &buf,\n\n config.bitmap_bytes as u64 * 8,\n\n config.k_num,\n\n config.sip_keys,\n\n ))\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 1, "score": 154394.13033039993 }, { "content": "fn pread_exact(fd: RawFd, mut offset: u64, length: u64) -> Result<Vec<u8>> {\n\n // if this was mmaped, there will be no truncation.\n\n #[allow(clippy::cast_possible_truncation)]\n\n let mut buf = vec![0_u8; length as usize];\n\n let mut remaining = length;\n\n while remaining > 0 {\n\n let size = pread(fd, &mut buf, i64::try_from(offset)?)? as u64;\n\n if size == 0 {\n\n return Err(error::INVALID_DATA);\n\n }\n\n remaining -= size;\n\n offset += size;\n\n }\n\n Ok(buf)\n\n}\n\n\n", "file_path": "src/sstable/concurrent_page_cache.rs", "rank": 2, "score": 149888.55622072855 }, { "content": "// Read metadata of any format (only V1 is supported now) from a reader.\n\n// This will fail if the file is not a valid sstable.\n\nfn read_metadata<B: Read + Seek>(mut file: B) -> Result<MetaResult> {\n\n file.seek(SeekFrom::Start(0))?;\n\n let mut reader = posreader::PosReader::new(BufReader::new(file), 0);\n\n let mut buf = [0; MAGIC.len()];\n\n if reader.read(&mut buf)? != MAGIC.len() {\n\n return Err(Error::InvalidData(\"not an sstable\"));\n\n }\n\n if buf != MAGIC {\n\n return Err(Error::InvalidData(\"not an sstable\"));\n\n }\n\n let version: Version = bincode::deserialize_from(&mut reader)?;\n\n let meta = match version {\n\n VERSION_30 => {\n\n let meta: MetaV3_0 = bincode::deserialize_from(&mut reader)?;\n\n MetaData::V3_0(meta)\n\n }\n\n _ => return Err(Error::UnsupportedVersion(version)),\n\n };\n\n\n\n let offset = reader.current_offset();\n\n let mut file = reader.into_inner().into_inner();\n\n file.seek(SeekFrom::Start(offset as u64))?;\n\n\n\n Ok(MetaResult { meta, offset })\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 3, "score": 149503.60750560224 }, { "content": "/// A writer that maybe compresses the input.\n\n///\n\n/// The difference with a regular writer, is that if you call reset_compression_context()\n\n/// all compression state will be reset and flushed, and the offset in the underlying\n\n/// writer will be returned.\n\npub trait CompressionContextWriter<I: Write>: Write {\n\n /// Reset and flush compression state.\n\n ///\n\n /// It must be possible to read from the returned offset with a newly\n\n /// created decompressor.\n\n ///\n\n /// Returns number of bytes written so far, i.e. relative offset\n\n /// from the creation of Self.\n\n fn reset_compression_context(&mut self) -> Result<usize>;\n\n fn into_inner(self: Box<Self>) -> Result<I>;\n\n}\n\n\n\npub struct UncompressedWriter<W> {\n\n writer: PosWriter<W>,\n\n}\n\n\n\nimpl<W> UncompressedWriter<W> {\n\n pub fn new(writer: W) -> Self {\n\n UncompressedWriter {\n\n writer: PosWriter::new(writer, 0),\n", "file_path": "src/sstable/compress_ctx_writer.rs", "rank": 4, "score": 149128.62453828414 }, { "content": "/// A convenience function to write a btree map to a file.\n\n///\n\n///\n\n/// Example:\n\n/// ```\n\n/// use std::collections::BTreeMap;\n\n/// use sstb::sstable::{write_btree_map, WriteOptions};\n\n///\n\n/// let mut map = BTreeMap::new();\n\n/// let filename = \"/tmp/some-sstable\";\n\n/// let write_options = WriteOptions::default();\n\n///\n\n/// map.insert(b\"foo\", b\"some foo\");\n\n/// map.insert(b\"bar\", b\"some bar\");\n\n/// write_btree_map(&map, filename, Some(write_options)).unwrap();\n\n/// ```\n\npub fn write_btree_map<K: AsRef<[u8]>, V: AsRef<[u8]>, P: AsRef<Path>>(\n\n map: &BTreeMap<K, V>,\n\n filename: P,\n\n options: Option<WriteOptions>,\n\n) -> Result<()> {\n\n let options = options.unwrap_or_default();\n\n let mut writer = writer::SSTableWriterV2::new_with_options(filename, &options)?;\n\n\n\n for (key, value) in map.iter() {\n\n writer.set(key.as_ref(), value.as_ref())?;\n\n }\n\n writer.close()?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::collections::BTreeMap;\n\n\n", "file_path": "src/sstable/mod.rs", "rank": 5, "score": 143596.56735375128 }, { "content": "pub trait Compressor<W: Write>: Write {\n\n fn into_inner(self) -> Result<W>;\n\n}\n\n\n", "file_path": "src/sstable/compression.rs", "rank": 6, "score": 126900.99306293768 }, { "content": "pub trait CompressorFactory<W: Write, C: Compressor<W>> {\n\n fn from_writer(&self, writer: W) -> C;\n\n}\n\n\n", "file_path": "src/sstable/compression.rs", "rank": 8, "score": 100192.30168054218 }, { "content": "struct MetaResult {\n\n meta: MetaData,\n\n offset: usize,\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 9, "score": 99681.86247595647 }, { "content": "#[cfg(test)]\n\npub fn get_current_pid_rss() -> usize {\n\n let pid = format!(\"{}\", std::process::id());\n\n let out = std::process::Command::new(\"ps\")\n\n .args(&[\"-p\", &pid, \"-o\", \"rss\"])\n\n .output()\n\n .unwrap();\n\n let out = String::from_utf8(out.stdout).unwrap();\n\n let pid_line = out.lines().nth(1).unwrap();\n\n pid_line.trim().parse::<usize>().unwrap()\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 10, "score": 99084.9410414474 }, { "content": "/// Find the potential start and end offsets of the key.\n\n/// This will be used later to fetch the chunk from the page cache.\n\nfn find_bounds<K, T>(map: &BTreeMap<K, T>, key: &[u8], end_default: T) -> Option<(T, T)>\n\nwhere\n\n K: Borrow<[u8]> + std::cmp::Ord,\n\n T: Copy,\n\n{\n\n use std::ops::Bound;\n\n\n\n let start = {\n\n let mut iter_left = map.range::<[u8], _>((Bound::Unbounded, Bound::Included(key)));\n\n let closest_left = iter_left.next_back();\n\n match closest_left {\n\n Some((_, offset)) => *offset,\n\n None => return None,\n\n }\n\n };\n\n\n\n let end = {\n\n let mut iter_right = map.range::<[u8], _>((Bound::Excluded(key), Bound::Unbounded));\n\n let closest_right = iter_right.next();\n\n match closest_right {\n\n Some((_, offset)) => *offset,\n\n None => end_default,\n\n }\n\n };\n\n Some((start, end))\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 11, "score": 98152.77994097568 }, { "content": "pub trait Uncompress {\n\n fn uncompress(&self, buf: &[u8]) -> Result<Vec<u8>>;\n\n}\n\n\n\n/// ZLIB\n\npub struct ZlibCompressorFactory<W: Write> {\n\n compression: flate2::Compression,\n\n marker: std::marker::PhantomData<W>,\n\n}\n\n\n\npub struct ZlibCompressor<W: Write> {\n\n inner: flate2::write::ZlibEncoder<W>,\n\n}\n\n\n\npub struct ZlibUncompress {}\n\n\n\nimpl<W: Write> ZlibCompressor<W> {\n\n pub fn new(writer: W, compression: flate2::Compression) -> Self {\n\n Self {\n\n inner: flate2::write::ZlibEncoder::new(writer, compression),\n", "file_path": "src/sstable/compression.rs", "rank": 12, "score": 87699.504242712 }, { "content": "/// PageCache is something that can get byte chunks of a given length, given an offset.\n\n///\n\n/// This is used for 2 purposes: reading from disk, and optionally uncompressing the\n\n/// chunk that was read from disk.\n\n///\n\n/// If compression is used, there are 2 PageCache objects used - one to read from disk\n\n/// (or mmap buffer), and another to uncompress that and cache the result.\n\n/// In the latter case, the outer PageCache wraps the inner one.\n\n///\n\n/// Note, that this cannot be used concurrently, note the &mut self. For concurrent use,\n\n/// more complicated concurrent cache can be used, from another file.\n\npub trait PageCache {\n\n fn get_chunk(&mut self, offset: u64, length: u64) -> Result<&[u8]>;\n\n}\n\n\n\n/// This is used to read from the mmap'ed region. It's a mere proxy to the slice.\n\npub struct StaticBufCache {\n\n buf: &'static [u8],\n\n}\n\n\n\nimpl StaticBufCache {\n\n pub fn new(buf: &'static [u8]) -> Self {\n\n Self { buf }\n\n }\n\n pub fn get_buf(&self) -> &'static [u8] {\n\n self.buf\n\n }\n\n}\n\n\n\nimpl PageCache for StaticBufCache {\n\n fn get_chunk(&mut self, offset: u64, length: u64) -> Result<&[u8]> {\n", "file_path": "src/sstable/page_cache.rs", "rank": 13, "score": 83456.5879590894 }, { "content": "pub trait ConcurrentPageCache {\n\n fn get_chunk(&self, offset: u64, length: u64) -> Result<Bytes>;\n\n}\n\n\n\nimpl ConcurrentPageCache for page_cache::StaticBufCache {\n\n fn get_chunk(&self, offset: u64, length: u64) -> Result<Bytes> {\n\n // if this was mmaped, there will be no truncation.\n\n #[allow(clippy::cast_possible_truncation)]\n\n self.get_buf()\n\n .get(offset as usize..(offset + length) as usize)\n\n .map(Bytes::from_static)\n\n .ok_or(error::INVALID_DATA)\n\n }\n\n}\n\n\n\npub struct FileBackedPageCache {\n\n file: File,\n\n caches: ConcurrentLRUCache,\n\n}\n\n\n", "file_path": "src/sstable/concurrent_page_cache.rs", "rank": 14, "score": 79819.41174252215 }, { "content": "fn compare_with_others(c: &mut Criterion) {\n\n let size = 100_000;\n\n use rocksdb::{DBCompressionType, Options, DB};\n\n let path = \"/tmp/sstb\";\n\n let rocks_path = \"/tmp/rocksdb-rust-lsm\";\n\n\n\n let bench = |group: &mut BenchmarkGroup<_>, state: &TestState, suffix: f32, threads| {\n\n let pool = rayon::ThreadPoolBuilder::new()\n\n .num_threads(threads)\n\n .build()\n\n .unwrap();\n\n\n\n for (name, opts) in [\n\n (\"rocksdb,mmap-reads,no-compression\", {\n\n let mut opts = Options::default();\n\n opts.set_compression_type(DBCompressionType::None);\n\n opts.set_allow_mmap_reads(true);\n\n opts.create_if_missing(true);\n\n opts\n\n }),\n", "file_path": "benches/sstable_implementations.rs", "rank": 15, "score": 78872.30058336235 }, { "content": "fn criterion_benchmark(c: &mut Criterion) {\n\n let make_write_opts = |compression, flush| {\n\n WriteOptions::default()\n\n .compression(compression)\n\n .flush_every(flush)\n\n .clone()\n\n };\n\n let filename = \"/tmp/sstable\";\n\n let variants = vec![\n\n (\n\n \"mmap,compress=none,flush=4096,nocache\",\n\n make_write_opts(Compression::None, 4096),\n\n ReadOptions::default().cache(None).use_mmap(true).clone(),\n\n ),\n\n (\n\n \"mmap,compress=none,flush=4096,nocache,use_bloom=false\",\n\n make_write_opts(Compression::None, 4096),\n\n ReadOptions::default()\n\n .cache(None)\n\n .use_mmap(true)\n", "file_path": "benches/sstable_implementations.rs", "rank": 16, "score": 78872.30058336235 }, { "content": "/// An efficient way to deserialize and NOT fail when the reader is at EOF right\n\n/// from the start, without any allocations.\n\npub fn deserialize_from_eof_is_ok<T: serde::de::DeserializeOwned, R: Read>(\n\n reader: R,\n\n) -> Result<Option<T>> {\n\n let mut pr = PosReader::new(reader, 0);\n\n let result = bincode::deserialize_from::<_, T>(&mut pr);\n\n match result {\n\n Ok(val) => Ok(Some(val)),\n\n Err(e) => match &*e {\n\n bincode::ErrorKind::Io(ioe) => {\n\n if ioe.kind() == std::io::ErrorKind::UnexpectedEof && pr.current_offset() == 0 {\n\n // This is actually fine and we hit EOF right away.\n\n return Ok(None);\n\n }\n\n Err(e.into())\n\n }\n\n _ => Err(e.into()),\n\n },\n\n }\n\n}\n", "file_path": "src/sstable/utils.rs", "rank": 17, "score": 60056.949478944414 }, { "content": "/// An object that can find the potential start and end offsets of the key.\n\n///\n\n/// A trait is used instead of a struct cause we have multiple implementations,\n\n/// owning and not owning.\n\ntrait Index {\n\n fn find_bounds(&self, key: &[u8], end_default: u64) -> Option<(u64, u64)>;\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 18, "score": 47266.146714013434 }, { "content": "struct KV {\n\n key: Vec<u8>,\n\n is_present: bool,\n\n}\n\n\n", "file_path": "benches/sstable_implementations.rs", "rank": 19, "score": 47255.12218825896 }, { "content": "/// An index that is used with Mmap blocks.\n\nstruct MemIndex {\n\n index: BTreeMap<&'static [u8], u64>,\n\n}\n\n\n\nimpl MemIndex {\n\n fn from_static_buf(buf: &'static [u8], expected_len: u64) -> Result<Self> {\n\n // Build the index from mmap here.\n\n let mut index = BTreeMap::new();\n\n let mut index_data = &buf[..];\n\n if index_data.len() as u64 != expected_len {\n\n return Err(Error::InvalidData(\"invalid index length\"));\n\n }\n\n\n\n let kvoffset_encoded_size = KVOffset::encoded_size();\n\n\n\n while !index_data.is_empty() {\n\n let kvoffset = bincode::deserialize::<KVOffset>(\n\n index_data\n\n .get(..kvoffset_encoded_size)\n\n .ok_or(INVALID_DATA)?,\n", "file_path": "src/sstable/reader.rs", "rank": 20, "score": 46047.24577421584 }, { "content": "struct Inner {\n\n value: RwLock<Option<Bytes>>,\n\n}\n\n\n\nimpl Inner {\n\n fn new() -> Self {\n\n Self {\n\n value: RwLock::new(None),\n\n }\n\n }\n\n fn get_or_insert<F>(&self, func: F) -> Result<Bytes>\n\n where\n\n F: Fn() -> Result<Bytes>,\n\n {\n\n {\n\n let g = self.value.read();\n\n if let Some(bytes) = g.as_ref() {\n\n return Ok(bytes.clone());\n\n }\n\n }\n", "file_path": "src/sstable/concurrent_lru.rs", "rank": 21, "score": 46044.11570010401 }, { "content": "struct OwnedIndex {\n\n index: BTreeMap<Vec<u8>, u64>,\n\n}\n\n\n\nimpl OwnedIndex {\n\n fn from_reader<R: Read>(mut reader: R) -> Result<Self> {\n\n let mut index = BTreeMap::new();\n\n\n\n loop {\n\n let kvoffset = KVOffset::deserialize_from_eof_is_ok(&mut reader)?;\n\n let kvoffset = match kvoffset {\n\n Some(kvoffset) => kvoffset,\n\n None => break,\n\n };\n\n let mut key = vec![0; kvoffset.key_length as usize];\n\n reader.read_exact(&mut key)?;\n\n index.insert(key, kvoffset.offset);\n\n }\n\n Ok(Self { index })\n\n }\n", "file_path": "src/sstable/reader.rs", "rank": 22, "score": 46044.11570010401 }, { "content": "struct TestState {\n\n sorted_iter: SortedBytesIterator,\n\n shuffled: Vec<KV>,\n\n}\n\n\n\nimpl TestState {\n\n fn new(len: usize, limit: usize, percent_missing: f32) -> Self {\n\n let mut it = SortedBytesIterator::new(len, limit).unwrap();\n\n let shuffled = {\n\n let mut shuffled: Vec<KV> = Vec::with_capacity(limit * 2);\n\n let mut small_rng = SmallRng::from_seed(*b\"seedseedseedseed\");\n\n let missing_threshold = u32::max_value() as f64 * percent_missing as f64;\n\n let missing_threshold = if missing_threshold > u32::max_value() as f64 {\n\n u32::max_value()\n\n } else if missing_threshold < 0. {\n\n 0\n\n } else {\n\n missing_threshold as u32\n\n };\n\n while let Some(value) = it.next() {\n", "file_path": "benches/sstable_implementations.rs", "rank": 23, "score": 46044.11570010401 }, { "content": "struct InnerReader {\n\n index: Box<dyn Index>,\n\n // This is just to hold an mmap reference to be dropped in the end.\n\n _mmap: Option<memmap::Mmap>,\n\n page_cache: Box<dyn page_cache::PageCache>,\n\n meta: MetaV3_0,\n\n data_start: u64,\n\n use_bloom_default: bool,\n\n bloom: Bloom<[u8]>,\n\n}\n\n\n\nimpl InnerReader {\n\n pub fn new(\n\n mut file: File,\n\n data_start: u64,\n\n meta: MetaResult,\n\n opts: &ReadOptions,\n\n ) -> Result<Self> {\n\n #[allow(clippy::infallible_destructuring_match)]\n\n let meta = match meta.meta {\n", "file_path": "src/sstable/reader.rs", "rank": 24, "score": 46044.11570010401 }, { "content": "struct ConcurrentInnerReader {\n\n index: Box<dyn Index + Sync + Send>,\n\n // This is just to hold an mmap reference to be dropped in the end.\n\n _mmap: Option<memmap::Mmap>,\n\n page_cache: Box<dyn concurrent_page_cache::ConcurrentPageCache + Sync + Send>,\n\n meta: MetaV3_0,\n\n data_start: u64,\n\n use_bloom_default: bool,\n\n bloom: Bloom<[u8]>,\n\n}\n\n\n\nimpl ConcurrentInnerReader {\n\n pub fn new(\n\n mut file: File,\n\n data_start: u64,\n\n meta: MetaResult,\n\n opts: &ReadOptions,\n\n ) -> Result<Self> {\n\n #[allow(clippy::infallible_destructuring_match)]\n\n let meta = match meta.meta {\n", "file_path": "src/sstable/reader.rs", "rank": 25, "score": 44933.85874728962 }, { "content": "fn default_criterion() -> Criterion {\n\n Criterion::default().sample_size(10)\n\n}\n\n\n\ncriterion_group! {\n\n name = sstable;\n\n config = default_criterion();\n\n targets = criterion_benchmark, compare_with_others\n\n}\n\n\n\ncriterion_main!(sstable);\n", "file_path": "benches/sstable_implementations.rs", "rank": 26, "score": 42295.093529176964 }, { "content": "use super::error::Error;\n\n\n\npub type Result<T> = core::result::Result<T, Error>;\n", "file_path": "src/sstable/result.rs", "rank": 27, "score": 29317.43946709647 }, { "content": "/// Options for writing sstables.\n\n#[derive(Debug, Copy, Clone)]\n\npub struct WriteOptions {\n\n /// Compression to use. The default is None.\n\n pub compression: Compression,\n\n /// How often to store the records in the index.\n\n pub flush_every: usize,\n\n /// Options for the bloom filter.\n\n pub bloom: BloomConfig,\n\n}\n\n\n\nimpl WriteOptions {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n pub fn compression(&mut self, compression: Compression) -> &mut Self {\n\n self.compression = compression;\n\n self\n\n }\n\n pub fn flush_every(&mut self, flush_every: usize) -> &mut Self {\n", "file_path": "src/sstable/options.rs", "rank": 28, "score": 29171.070798337314 }, { "content": "}\n\n\n\nimpl ReadOptions {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n pub fn cache(&mut self, cache: Option<ReadCache>) -> &mut Self {\n\n self.cache = cache;\n\n self\n\n }\n\n pub fn use_mmap(&mut self, use_mmap: bool) -> &mut Self {\n\n self.use_mmap = use_mmap;\n\n self\n\n }\n\n pub fn use_bloom(&mut self, use_bloom: bool) -> &mut Self {\n\n self.use_bloom = use_bloom;\n\n self\n\n }\n\n pub fn thread_buckets(&mut self, thread_buckets: Option<usize>) -> &mut Self {\n\n self.thread_buckets = thread_buckets;\n", "file_path": "src/sstable/options.rs", "rank": 29, "score": 29169.921276512807 }, { "content": " pub use_bloom: bool,\n\n}\n\n\n\nimpl GetOptions {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n pub fn use_bloom(&mut self, use_bloom: bool) -> &mut Self {\n\n self.use_bloom = use_bloom;\n\n self\n\n }\n\n}\n\n\n\nimpl Default for GetOptions {\n\n fn default() -> Self {\n\n Self { use_bloom: true }\n\n }\n\n}\n", "file_path": "src/sstable/options.rs", "rank": 30, "score": 29169.4164442337 }, { "content": " self.flush_every = flush_every;\n\n self\n\n }\n\n pub fn bloom(&mut self, bloom: BloomConfig) -> &mut Self {\n\n self.bloom = bloom;\n\n self\n\n }\n\n}\n\n\n\nimpl Default for WriteOptions {\n\n fn default() -> Self {\n\n WriteOptions {\n\n compression: Compression::None,\n\n flush_every: 4096,\n\n bloom: BloomConfig::default(),\n\n }\n\n }\n\n}\n\n\n\n/// Configures the caches for reading.\n", "file_path": "src/sstable/options.rs", "rank": 31, "score": 29166.11537678911 }, { "content": "impl Default for ReadCache {\n\n fn default() -> Self {\n\n Self::Unbounded\n\n }\n\n}\n\n\n\n/// Options for reading sstables.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct ReadOptions {\n\n /// The caching strategy to use.\n\n pub cache: Option<ReadCache>,\n\n /// If mmap can be used for reading the sstable from disk.\n\n pub use_mmap: bool,\n\n /// How many buckets to split the caches into for efficient\n\n /// thread-safe access.\n\n pub thread_buckets: Option<usize>,\n\n // Set if you want to use bloom filters during lookups.\n\n // This has a performance penalty for positive lookups,\n\n // but if you have a lot of maybe-negative, it should make things faster.\n\n pub use_bloom: bool,\n", "file_path": "src/sstable/options.rs", "rank": 32, "score": 29163.56029398526 }, { "content": "use super::types::Compression;\n\n\n\nuse lru::LruCache;\n\n\n\n// The configuration for the bloom filter.\n\n#[derive(Debug, Copy, Clone)]\n\npub struct BloomConfig {\n\n pub bitmap_size: u32,\n\n pub items_count: usize,\n\n}\n\n\n\nimpl Default for BloomConfig {\n\n fn default() -> Self {\n\n return Self {\n\n bitmap_size: 1_000_000,\n\n items_count: 1_000_000,\n\n };\n\n }\n\n}\n\n\n", "file_path": "src/sstable/options.rs", "rank": 33, "score": 29162.833521798046 }, { "content": " self\n\n }\n\n}\n\n\n\nimpl Default for ReadOptions {\n\n fn default() -> Self {\n\n Self {\n\n cache: Some(ReadCache::default()),\n\n use_mmap: true,\n\n thread_buckets: Some(num_cpus::get()),\n\n use_bloom: true,\n\n }\n\n }\n\n}\n\n\n\n/// Options for \"get\" method.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct GetOptions {\n\n /// Set this if you want to use the bloom filter to speed\n\n /// up negative lookups at a cost for positive lookup.\n", "file_path": "src/sstable/options.rs", "rank": 34, "score": 29161.85172921269 }, { "content": "#[derive(Copy, Clone, Debug)]\n\npub enum ReadCache {\n\n // How many chunks(blocks) to store in LRU.\n\n Blocks(usize),\n\n // Unbounded cache, the default.\n\n Unbounded,\n\n}\n\n\n\nimpl ReadCache {\n\n pub fn lru<K, V>(&self) -> LruCache<K, V>\n\n where\n\n K: std::cmp::Eq + std::hash::Hash,\n\n {\n\n match self {\n\n Self::Blocks(b) => LruCache::new(*b),\n\n Self::Unbounded => LruCache::unbounded(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/sstable/options.rs", "rank": 35, "score": 29157.38783519341 }, { "content": "enum MetaData {\n\n V3_0(MetaV3_0),\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 44, "score": 26873.747280632982 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<W: Write> Write for UncompressedWriter<W> {\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n self.writer.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> std::io::Result<()> {\n\n self.writer.flush()\n\n }\n\n}\n\n\n\nimpl<W: Write> CompressionContextWriter<W> for UncompressedWriter<W> {\n\n fn reset_compression_context(&mut self) -> Result<usize> {\n\n Ok(usize::try_from(self.writer.current_offset())?)\n\n }\n\n fn into_inner(self: Box<Self>) -> Result<W> {\n\n Ok(self.writer.into_inner())\n", "file_path": "src/sstable/compress_ctx_writer.rs", "rank": 45, "score": 26448.86784154131 }, { "content": " factory,\n\n _w: std::marker::PhantomData {},\n\n }\n\n }\n\n fn get_mut_compressor(&mut self) -> Result<&mut C> {\n\n Ok(self.compressor.as_mut().ok_or(COMPRESSOR_MISSING)?)\n\n }\n\n}\n\n\n\nimpl<F, C, W> Write for CompressionContextWriterImpl<F, C, W>\n\nwhere\n\n F: CompressorFactory<PosWriter<W>, C>,\n\n W: Write,\n\n C: Compressor<PosWriter<W>>,\n\n{\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n self.get_mut_compressor().unwrap().write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> std::io::Result<()> {\n", "file_path": "src/sstable/compress_ctx_writer.rs", "rank": 46, "score": 26444.436398103826 }, { "content": " self.get_mut_compressor().unwrap().flush()\n\n }\n\n}\n\n\n\nimpl<F, C, W> CompressionContextWriter<W> for CompressionContextWriterImpl<F, C, W>\n\nwhere\n\n F: CompressorFactory<PosWriter<W>, C>,\n\n W: Write,\n\n C: Compressor<PosWriter<W>>,\n\n{\n\n fn reset_compression_context(&mut self) -> Result<usize> {\n\n let enc = self.compressor.take().ok_or(COMPRESSOR_MISSING)?;\n\n let pos_writer = enc.into_inner()?;\n\n let offset = pos_writer.current_offset();\n\n self.compressor\n\n .replace(self.factory.from_writer(pos_writer));\n\n Ok(usize::try_from(offset)?)\n\n }\n\n fn into_inner(mut self: Box<Self>) -> Result<W> {\n\n let enc = self.compressor.take().ok_or(COMPRESSOR_MISSING)?;\n\n Ok(enc.into_inner()?.into_inner())\n\n }\n\n}\n", "file_path": "src/sstable/compress_ctx_writer.rs", "rank": 47, "score": 26441.998195117983 }, { "content": " }\n\n}\n\n\n\n/// A version of CompressionContextWriter that knows\n\n/// how to create new compressors (encoders) from a factory.\n\npub struct CompressionContextWriterImpl<F, C, W> {\n\n factory: F,\n\n compressor: Option<C>,\n\n _w: std::marker::PhantomData<W>,\n\n}\n\n\n\nimpl<F, C, W> CompressionContextWriterImpl<F, C, W>\n\nwhere\n\n F: CompressorFactory<PosWriter<W>, C>,\n\n W: Write,\n\n C: Compressor<PosWriter<W>>,\n\n{\n\n pub fn new(writer: W, factory: F) -> Self {\n\n Self {\n\n compressor: Some(factory.from_writer(PosWriter::new(writer, 0))),\n", "file_path": "src/sstable/compress_ctx_writer.rs", "rank": 48, "score": 26440.81215867089 }, { "content": "use std::io::Write;\n\n\n\nuse super::compression::*;\n\nuse super::poswriter::PosWriter;\n\nuse super::{Error, Result};\n\nuse std::convert::TryFrom;\n\n\n\nconst COMPRESSOR_MISSING: Error = Error::ProgrammingError(\"compressor missing\");\n\n\n\n/// A writer that maybe compresses the input.\n\n///\n\n/// The difference with a regular writer, is that if you call reset_compression_context()\n\n/// all compression state will be reset and flushed, and the offset in the underlying\n\n/// writer will be returned.\n", "file_path": "src/sstable/compress_ctx_writer.rs", "rank": 49, "score": 26436.94059188318 }, { "content": " bloom: Bloom<[u8]>,\n\n}\n\n\n\nimpl MmapUncompressedSSTableReader {\n\n /// Construct a new mmap reader from a file with default options.\n\n ///\n\n /// `new_with_options` has more details.\n\n pub fn new<P: AsRef<Path>>(filename: P) -> Result<Self> {\n\n Self::new_with_options(filename, &ReadOptions::default())\n\n }\n\n\n\n /// Construct a new mmap reader from a file.\n\n ///\n\n /// All options except \"use_bloom\" are ignored.\n\n ///\n\n /// Returns `Error::CantUseCompressedFileWithMultiThreadedMmap` if you try to open a compressed file with it.\n\n pub fn new_with_options<P: AsRef<Path>>(filename: P, opts: &ReadOptions) -> Result<Self> {\n\n let mut file = File::open(filename)?;\n\n let meta = read_metadata(&mut file)?;\n\n let data_start = meta.offset as u64;\n", "file_path": "src/sstable/reader.rs", "rank": 50, "score": 28.265756371482908 }, { "content": " &mut self,\n\n key: &[u8],\n\n options: Option<GetOptions>,\n\n ) -> Result<Option<&[u8]>> {\n\n let use_bloom = options\n\n .map(|o| o.use_bloom)\n\n .unwrap_or(self.use_bloom_default);\n\n if use_bloom && !self.bloom.check(key) {\n\n return Ok(None);\n\n }\n\n let index_start = self.data_start + self.meta.data_len as u64;\n\n let (offset, right_bound) = match self.index.find_bounds(key, index_start) {\n\n Some(v) => v,\n\n None => return Ok(None),\n\n };\n\n\n\n let chunk = self.page_cache.get_chunk(offset, right_bound - offset)?;\n\n Ok(find_value_offset_v2(chunk, key)?.map(|(start, end)| &chunk[start..end]))\n\n }\n\n\n\n fn get(&mut self, key: &[u8]) -> Result<Option<&[u8]>> {\n\n self.get_with_options(key, None)\n\n }\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 51, "score": 25.886616275571 }, { "content": " pub key_length: KeyLength,\n\n pub value_length: ValueLength,\n\n}\n\n\n\nimpl KVLength {\n\n pub fn new(k: usize, v: usize) -> Result<Self> {\n\n Ok(Self {\n\n key_length: KeyLength::try_from(k).map_err(|_| Error::KeyTooLong(k))?,\n\n value_length: ValueLength::try_from(v).map_err(|_| Error::ValueTooLong(v))?,\n\n })\n\n }\n\n pub const fn encoded_size() -> usize {\n\n // can't use sizeof Self as bincode has no padding while the struct might.\n\n size_of::<KeyLength>() + size_of::<ValueLength>()\n\n }\n\n pub fn serialize_into<W: Write>(&self, w: W) -> Result<()> {\n\n Ok(bincode::serialize_into(w, self)?)\n\n }\n\n}\n\n\n", "file_path": "src/sstable/ondisk_format.rs", "rank": 52, "score": 25.67458738077324 }, { "content": " &mut self.w\n\n }\n\n pub fn into_inner(self) -> W {\n\n self.w\n\n }\n\n}\n\n\n\nimpl<W: Write> Write for PosWriter<W> {\n\n fn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n let l = self.w.write(buf)?;\n\n self.offset += l as u64;\n\n Ok(l)\n\n }\n\n\n\n fn flush(&mut self) -> Result<()> {\n\n self.w.flush()\n\n }\n\n}\n", "file_path": "src/sstable/poswriter.rs", "rank": 53, "score": 24.40421104581772 }, { "content": " })\n\n }\n\n\n\n pub fn get<'a>(&'a self, key: &[u8]) -> Result<Option<&'a [u8]>> {\n\n self.get_with_options(key, None)\n\n }\n\n\n\n /// Get a key from the sstable with options.\n\n pub fn get_with_options<'a>(\n\n &'a self,\n\n key: &[u8],\n\n options: Option<GetOptions>,\n\n ) -> Result<Option<&'a [u8]>> {\n\n let use_bloom = options\n\n .map(|o| o.use_bloom)\n\n .unwrap_or(self.use_bloom_default);\n\n if use_bloom && !self.bloom.check(key) {\n\n return Ok(None);\n\n }\n\n let (offset, right_bound) = match self.index.find_bounds(key, self.index_start) {\n", "file_path": "src/sstable/reader.rs", "rank": 54, "score": 24.16316382256028 }, { "content": " page_cache: uncompressed_cache,\n\n data_start,\n\n meta,\n\n bloom,\n\n use_bloom_default: opts.use_bloom,\n\n })\n\n }\n\n\n\n fn get(&self, key: &[u8]) -> Result<Option<Bytes>> {\n\n self.get_with_options(key, None)\n\n }\n\n\n\n fn get_with_options(&self, key: &[u8], options: Option<GetOptions>) -> Result<Option<Bytes>> {\n\n let use_bloom = options\n\n .map(|o| o.use_bloom)\n\n .unwrap_or(self.use_bloom_default);\n\n if use_bloom && !self.bloom.check(key) {\n\n return Ok(None);\n\n }\n\n let index_start = self.data_start + self.meta.data_len as u64;\n", "file_path": "src/sstable/reader.rs", "rank": 55, "score": 23.126617968781535 }, { "content": "/// If your data is uncompressed, you probably better use `MmapUncompressedSSTableReader`,\n\n/// which is a lot simpler wait-free implementation.\n\n///\n\n/// However mmap's one superiority needs to be confirmed in benchmarks. There are benchmarks,\n\n/// but conclusions are TBD.\n\npub struct ConcurrentSSTableReader {\n\n inner: ConcurrentInnerReader,\n\n}\n\n\n\nimpl ConcurrentSSTableReader {\n\n pub fn new<P: AsRef<Path>>(filename: P) -> Result<Self> {\n\n Self::new_with_options(filename, &ReadOptions::default())\n\n }\n\n\n\n pub fn new_with_options<P: AsRef<Path>>(filename: P, opts: &ReadOptions) -> Result<Self> {\n\n let mut file = File::open(filename)?;\n\n let meta = read_metadata(&mut file)?;\n\n let data_start = meta.offset as u64;\n\n let inner = ConcurrentInnerReader::new(file, data_start, meta, opts)?;\n\n Ok(Self { inner })\n", "file_path": "src/sstable/reader.rs", "rank": 56, "score": 22.31213204213309 }, { "content": "use std::io::{Result, Write};\n\n\n\n// PosWriter is a Writer that remembers the position and can report it at any time.\n\n#[derive(Debug)]\n\npub struct PosWriter<W> {\n\n w: W,\n\n offset: u64,\n\n}\n\n\n\nimpl<W> PosWriter<W> {\n\n pub fn new(w: W, offset: u64) -> Self {\n\n PosWriter { w, offset }\n\n }\n\n pub fn current_offset(&self) -> u64 {\n\n self.offset\n\n }\n\n pub fn reset_offset(&mut self, offset: u64) {\n\n self.offset = offset;\n\n }\n\n pub fn get_mut(&mut self) -> &mut W {\n", "file_path": "src/sstable/poswriter.rs", "rank": 57, "score": 21.979836047161676 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default)]\n\npub struct KVOffset {\n\n pub key_length: KeyLength,\n\n pub offset: Offset,\n\n}\n\n\n\nimpl KVOffset {\n\n pub fn new(k: usize, offset: Offset) -> Result<Self> {\n\n Ok(Self {\n\n key_length: KeyLength::try_from(k).map_err(|_| Error::KeyTooLong(k))?,\n\n offset,\n\n })\n\n }\n\n pub const fn encoded_size() -> usize {\n\n // can't use sizeof Self as bincode has no padding while the struct might.\n\n size_of::<KeyLength>() + size_of::<Offset>()\n\n }\n\n pub fn deserialize_from_eof_is_ok<R: Read>(r: R) -> Result<Option<Self>> {\n\n Ok(deserialize_from_eof_is_ok(r)?)\n\n }\n", "file_path": "src/sstable/ondisk_format.rs", "rank": 58, "score": 20.866977872849258 }, { "content": " let (offset, right_bound) = match self.index.find_bounds(key, index_start) {\n\n Some(v) => v,\n\n None => return Ok(None),\n\n };\n\n\n\n let chunk: Bytes = self.page_cache.get_chunk(offset, right_bound - offset)?;\n\n if let Some((start, end)) = find_value_offset_v2(&chunk, key)? {\n\n Ok(Some(chunk.slice(start..end)))\n\n } else {\n\n Ok(None)\n\n }\n\n }\n\n}\n\n\n\nimpl SSTableReader {\n\n pub fn new<P: AsRef<Path>>(filename: P) -> Result<Self> {\n\n Self::new_with_options(filename, &ReadOptions::default())\n\n }\n\n\n\n pub fn new_with_options<P: AsRef<Path>>(filename: P, opts: &ReadOptions) -> Result<Self> {\n", "file_path": "src/sstable/reader.rs", "rank": 59, "score": 20.69295158330976 }, { "content": "impl ConcurrentLRUCache {\n\n pub fn new(shards: usize, cache: Option<ReadCache>) -> Self {\n\n Self {\n\n caches: cache.map(|cache| (0..shards).map(|_| Mutex::new(cache.lru())).collect()),\n\n }\n\n }\n\n\n\n /// Get or insert the value into the cache. The inserted value is computed\n\n /// using the provided callback.\n\n ///\n\n /// The shard-level lock is NOT held during the computation.\n\n /// During the computation the chunk-level lock is held, so only threads contending\n\n /// on the specific chunk will get blocked.\n\n pub fn get_or_insert<F>(&self, offset: u64, func: F) -> Result<Bytes>\n\n where\n\n F: Fn() -> Result<Bytes>,\n\n {\n\n let caches = match self.caches.as_ref() {\n\n Some(caches) => caches,\n\n None => return func(),\n", "file_path": "src/sstable/concurrent_lru.rs", "rank": 60, "score": 19.767907705463713 }, { "content": "\n\npub use writer::RawSSTableWriter;\n\npub use writer::SSTableWriterV2;\n\n\n\npub use error::{Error, INVALID_DATA};\n\npub use options::*;\n\npub use result::Result;\n\npub use types::*;\n\n\n\n/// A convenience function to write a btree map to a file.\n\n///\n\n///\n\n/// Example:\n\n/// ```\n\n/// use std::collections::BTreeMap;\n\n/// use sstb::sstable::{write_btree_map, WriteOptions};\n\n///\n\n/// let mut map = BTreeMap::new();\n\n/// let filename = \"/tmp/some-sstable\";\n\n/// let write_options = WriteOptions::default();\n\n///\n\n/// map.insert(b\"foo\", b\"some foo\");\n\n/// map.insert(b\"bar\", b\"some bar\");\n\n/// write_btree_map(&map, filename, Some(write_options)).unwrap();\n\n/// ```\n", "file_path": "src/sstable/mod.rs", "rank": 61, "score": 19.73856072019538 }, { "content": " let mut file = File::open(filename)?;\n\n let meta = read_metadata(&mut file)?;\n\n let data_start = meta.offset as u64;\n\n let inner = InnerReader::new(file, data_start, meta, opts)?;\n\n Ok(SSTableReader { inner })\n\n }\n\n pub fn get(&mut self, key: &[u8]) -> Result<Option<&[u8]>> {\n\n self.inner.get(key)\n\n }\n\n}\n\n\n\n/// A reader that can be used efficiently from multiple threads.\n\n///\n\n/// There is internal mutability inside. The LRU caches are sharded into multiple locks.\n\n///\n\n/// You get `Bytes` references in return instead of slices, so that atomic reference counting\n\n/// can happen behind the scenes for properly tracking chunks still in-use.\n\n///\n\n/// If you want to use this with multiple threads just put it into an `Arc`.\n\n///\n", "file_path": "src/sstable/reader.rs", "rank": 62, "score": 19.419992799539443 }, { "content": " pub fn serialize_into<W: Write>(&self, w: W) -> Result<()> {\n\n Ok(bincode::serialize_into(w, self)?)\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Default, Debug)]\n\npub struct BloomV3_0 {\n\n pub bitmap_bytes: u32,\n\n pub k_num: u32,\n\n pub sip_keys: [(u64, u64); 2],\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Default, Debug)]\n\npub struct MetaV3_0 {\n\n pub data_len: u64,\n\n pub index_len: u64,\n\n pub bloom_len: u64,\n\n pub items: u64,\n\n pub compression: Compression,\n\n // updating this field is done as the last step.\n", "file_path": "src/sstable/ondisk_format.rs", "rank": 63, "score": 18.455937086991035 }, { "content": " fn write_sstable(&self, filename: &str, write_opts: &WriteOptions) -> Result<()> {\n\n let mut iter = self.sorted_iter.clone();\n\n\n\n let mut writer = writer::SSTableWriterV2::new_with_options(filename, write_opts)?;\n\n\n\n while let Some(key) = iter.next() {\n\n writer.set(key, key)?;\n\n }\n\n\n\n writer.finish()\n\n }\n\n}\n\n\n", "file_path": "benches/sstable_implementations.rs", "rank": 64, "score": 18.448286385294864 }, { "content": "impl FileBackedPageCache {\n\n pub fn new(file: File, cache: Option<ReadCache>, count: usize) -> Self {\n\n Self {\n\n file,\n\n caches: ConcurrentLRUCache::new(count, cache),\n\n }\n\n }\n\n fn read_chunk(&self, offset: u64, length: u64) -> Result<Bytes> {\n\n let buf = pread_exact(self.file.as_raw_fd(), offset, length)?;\n\n Ok(Bytes::from(buf))\n\n }\n\n}\n\n\n\nimpl ConcurrentPageCache for FileBackedPageCache {\n\n fn get_chunk(&self, offset: u64, length: u64) -> Result<Bytes> {\n\n self.caches\n\n .get_or_insert(offset, || self.read_chunk(offset, length))\n\n }\n\n}\n\n\n", "file_path": "src/sstable/concurrent_page_cache.rs", "rank": 65, "score": 17.594867750089104 }, { "content": " let mut g = self.value.write();\n\n match g.as_mut() {\n\n Some(bytes) => Ok(bytes.clone()),\n\n None => {\n\n let value = func()?;\n\n g.replace(value.clone());\n\n Ok(value)\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// An LRU cache of Bytes that can be used by multiple threads\n\n/// concurrently.\n\n///\n\n/// Works by sharding the single-threaded LRUCache into multiple shards.\n\npub struct ConcurrentLRUCache {\n\n caches: Option<Vec<Mutex<lru::LruCache<u64, Arc<Inner>>>>>,\n\n}\n\n\n", "file_path": "src/sstable/concurrent_lru.rs", "rank": 66, "score": 17.423328700884436 }, { "content": "\n\n/// The resulting sstable files MUST have this prefix.\n\npub const MAGIC: &[u8] = b\"\\x80LSM\";\n\npub type KeyLength = u16;\n\npub type ValueLength = u32;\n\npub type Offset = u64;\n\n\n\nuse super::error::{Error, INVALID_DATA};\n\nuse super::result::Result;\n\nuse super::types::Compression;\n\nuse super::utils::deserialize_from_eof_is_ok;\n\nuse core::mem::size_of;\n\nuse std::cmp::{Ord, Ordering};\n\nuse std::convert::TryFrom;\n\nuse std::io::{Read, Write};\n\n\n\npub use super::types::Version;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Default)]\n\npub struct KVLength {\n", "file_path": "src/sstable/ondisk_format.rs", "rank": 67, "score": 17.401556512341926 }, { "content": " }\n\n}\n\n\n\npub struct SnappyUncompress {}\n\n\n\nimpl<W: Write> Write for SnappyCompressor<W> {\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n self.inner.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> std::io::Result<()> {\n\n self.inner.flush()\n\n }\n\n}\n\n\n\nimpl<W: Write> Compressor<W> for SnappyCompressor<W> {\n\n fn into_inner(self) -> Result<W> {\n\n self.inner.into_inner().map_err(|e| {\n\n let kind = e.error().kind();\n\n let io = std::io::Error::from(kind);\n", "file_path": "src/sstable/compression.rs", "rank": 68, "score": 16.891109958773672 }, { "content": "impl<W: Write> ZlibCompressorFactory<W> {\n\n pub fn new(compression: Option<flate2::Compression>) -> Self {\n\n ZlibCompressorFactory {\n\n compression: compression.unwrap_or_default(),\n\n marker: std::marker::PhantomData {},\n\n }\n\n }\n\n}\n\n\n\nimpl<W: Write> CompressorFactory<W, ZlibCompressor<W>> for ZlibCompressorFactory<W> {\n\n fn from_writer(&self, writer: W) -> ZlibCompressor<W> {\n\n ZlibCompressor::new(writer, self.compression)\n\n }\n\n}\n\n\n\nimpl Uncompress for ZlibUncompress {\n\n fn uncompress(&self, buf: &[u8]) -> Result<Vec<u8>> {\n\n let mut dec = flate2::read::ZlibDecoder::new(Cursor::new(buf));\n\n // TODO: buf.len() here is a bad heuristic. Need the real number, this can be pulled during\n\n // compression.\n", "file_path": "src/sstable/compression.rs", "rank": 69, "score": 16.44995242391304 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl<W: Write> Write for ZlibCompressor<W> {\n\n fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {\n\n self.inner.write(buf)\n\n }\n\n\n\n fn flush(&mut self) -> std::io::Result<()> {\n\n self.inner.flush()\n\n }\n\n}\n\n\n\nimpl<W: Write> Compressor<W> for ZlibCompressor<W> {\n\n fn into_inner(self) -> Result<W> {\n\n Ok(self.inner.finish()?)\n\n }\n\n}\n\n\n", "file_path": "src/sstable/compression.rs", "rank": 70, "score": 16.38221438332255 }, { "content": " }\n\n}\n\n\n\nimpl<R: Read + Seek> PageCache for ReadPageCache<R> {\n\n fn get_chunk(&mut self, offset: u64, length: u64) -> Result<&[u8]> {\n\n match self.cache.get(&offset) {\n\n Some(bytes) => Ok(unsafe { &*(bytes as &[u8] as *const [u8]) }),\n\n None => {\n\n let mut buf = vec![0; usize::try_from(length)?];\n\n // TODO: this can use pread instead of 2 syscalls.\n\n self.reader.seek(SeekFrom::Start(offset))?;\n\n self.reader.read_exact(&mut buf)?;\n\n self.cache.put(offset, buf);\n\n Ok(self.cache.get(&offset).unwrap())\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// A cache that wraps another one, uncompresses the inner cache's results and\n", "file_path": "src/sstable/page_cache.rs", "rank": 71, "score": 16.297011162694112 }, { "content": " let mut buf = Vec::with_capacity(buf.len());\n\n dec.read_to_end(&mut buf)?;\n\n Ok(buf)\n\n }\n\n}\n\n\n\n/// Snappy\n\npub struct SnappyCompressorFactory<W: Write> {\n\n marker: std::marker::PhantomData<W>,\n\n}\n\n\n\npub struct SnappyCompressor<W: Write> {\n\n inner: snap::Writer<W>,\n\n}\n\n\n\nimpl<W: Write> SnappyCompressor<W> {\n\n pub fn new(writer: W) -> Self {\n\n Self {\n\n inner: snap::Writer::new(writer),\n\n }\n", "file_path": "src/sstable/compression.rs", "rank": 72, "score": 16.19597659375784 }, { "content": "/// store the uncompressed chunks in the LRU cache inside.\n\npub struct WrappedCache<PC, U> {\n\n inner: PC,\n\n cache: LruCache<u64, Vec<u8>>,\n\n uncompress: U,\n\n}\n\n\n\nimpl<PC, U> WrappedCache<PC, U> {\n\n pub fn new(inner: PC, uncompress: U, cache: ReadCache) -> Self {\n\n Self {\n\n inner,\n\n cache: cache.lru(),\n\n uncompress,\n\n }\n\n }\n\n}\n\n\n\nimpl PageCache for Box<dyn PageCache> {\n\n fn get_chunk(&mut self, offset: u64, length: u64) -> Result<&[u8]> {\n\n self.as_mut().get_chunk(offset, length)\n", "file_path": "src/sstable/page_cache.rs", "rank": 73, "score": 16.092943945948868 }, { "content": "}\n\n\n\nimpl Index for OwnedIndex {\n\n fn find_bounds(&self, key: &[u8], end_default: u64) -> Option<(u64, u64)> {\n\n find_bounds(&self.index, key, end_default)\n\n }\n\n}\n\n\n\n/// The default single-threaded reader for sstables.\n\n///\n\n/// As the get() method takes a mutable reference, you will not be able to use this in\n\n/// multiple threads.\n\npub struct SSTableReader {\n\n inner: InnerReader,\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 74, "score": 16.018747183955707 }, { "content": " }\n\n pub fn get(&self, key: &[u8]) -> Result<Option<Bytes>> {\n\n self.inner.get(key)\n\n }\n\n}\n\n\n\n/// A multi-threaded reader that only works with fully uncompressed data.\n\n///\n\n/// There is no locking happening inside, there is no internal mutability either.\n\n/// Everything just relies on the OS page cache to work, so if you are ok with storing\n\n/// uncompressed sstables, this reader the way to go.\n\n///\n\n/// If you try to use it with a compressed sstable it will return `Error::CantUseCompressedFileWithMultiThreadedMmap`\n\n///\n\n/// If you want to use this with multiple threads just put it into an Arc without Mutex'es.\n\npub struct MmapUncompressedSSTableReader {\n\n index_start: u64,\n\n mmap: memmap::Mmap,\n\n index: MemIndex,\n\n use_bloom_default: bool,\n", "file_path": "src/sstable/reader.rs", "rank": 75, "score": 15.900397033309257 }, { "content": " )?;\n\n let key_end = kvoffset_encoded_size + kvoffset.key_length as usize;\n\n let key = index_data\n\n .get(kvoffset_encoded_size..key_end)\n\n .ok_or(INVALID_DATA)?;\n\n let key: &'static [u8] = unsafe { &*(key as *const _) };\n\n index.insert(key, kvoffset.offset);\n\n if index_data.len() == key_end {\n\n break;\n\n }\n\n index_data = &index_data[key_end..];\n\n }\n\n\n\n Ok(Self { index })\n\n }\n\n}\n\n\n\nimpl Index for MemIndex {\n\n fn find_bounds(&self, key: &[u8], end_default: u64) -> Option<(u64, u64)> {\n\n find_bounds(&self.index, key, end_default)\n\n }\n\n}\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 76, "score": 15.640957214003643 }, { "content": " let (index, bloom): (Box<dyn Index>, Bloom<[u8]>) = match meta.compression {\n\n Compression::None => match mmap_buf {\n\n Some(mmap) => {\n\n let index = Box::new(MemIndex::from_static_buf(\n\n // if it was mmaped, it won't truncate\n\n #[allow(clippy::cast_possible_truncation)]\n\n &mmap\n\n .get(index_start as usize..index_end as usize)\n\n .ok_or(INVALID_DATA)?,\n\n meta.index_len,\n\n )?);\n\n file.seek(SeekFrom::Start(index_end))?;\n\n let bloom = read_bloom((&mut file).take(meta.bloom_len), &meta.bloom)?;\n\n (index, bloom)\n\n }\n\n None => {\n\n let index =\n\n Box::new(OwnedIndex::from_reader((&mut file).take(meta.index_len))?);\n\n let bloom = read_bloom((&mut file).take(meta.bloom_len), &meta.bloom)?;\n\n (index, bloom)\n", "file_path": "src/sstable/reader.rs", "rank": 77, "score": 15.306541972616742 }, { "content": " let (index, bloom): (Box<dyn Index + Send + Sync>, Bloom<[u8]>) = match meta.compression {\n\n Compression::None => match mmap_buf {\n\n Some(mmap) => {\n\n let index = Box::new(MemIndex::from_static_buf(\n\n // if it was mmaped, it won't truncate\n\n #[allow(clippy::cast_possible_truncation)]\n\n &mmap\n\n .get(index_start as usize..index_end as usize)\n\n .ok_or(INVALID_DATA)?,\n\n meta.index_len,\n\n )?);\n\n\n\n file.seek(SeekFrom::Start(index_end))?;\n\n let bloom = read_bloom((&mut file).take(meta.bloom_len), &meta.bloom)?;\n\n (index, bloom)\n\n }\n\n None => {\n\n let index =\n\n Box::new(OwnedIndex::from_reader((&mut file).take(meta.index_len))?);\n\n let bloom = read_bloom((&mut file).take(meta.bloom_len), &meta.bloom)?;\n", "file_path": "src/sstable/reader.rs", "rank": 78, "score": 15.247720799513399 }, { "content": " };\n\n\n\n // if it was mmaped, it won't truncate\n\n #[allow(clippy::cast_possible_truncation)]\n\n let index = MemIndex::from_static_buf(\n\n &mmap_buf\n\n .get(index_start as usize..index_end as usize)\n\n .ok_or(INVALID_DATA)?,\n\n meta.index_len,\n\n )?;\n\n\n\n file.seek(SeekFrom::Start(index_end))?;\n\n let bloom = read_bloom((&mut file).take(meta.bloom_len), &meta.bloom)?;\n\n\n\n Ok(Self {\n\n mmap,\n\n index,\n\n index_start,\n\n bloom,\n\n use_bloom_default: opts.use_bloom,\n", "file_path": "src/sstable/reader.rs", "rank": 79, "score": 14.990643180320419 }, { "content": "/// ];\n\n/// for expected_value in expected.into_iter() {\n\n/// assert_eq!(iter.next(), Some(*expected_value as &[u8]));\n\n/// }\n\n///\n\n/// assert_eq!(iter.next(), None);\n\n/// assert_eq!(iter.next(), None);\n\n/// ```\n\npub struct SortedBytesIterator {\n\n buf: Vec<u8>,\n\n // points to the element being made larger.\n\n current: usize,\n\n first: u8,\n\n last: u8,\n\n counter: usize,\n\n limit: usize,\n\n}\n\n\n\nimpl Clone for SortedBytesIterator {\n\n fn clone(&self) -> Self {\n", "file_path": "src/utils/sorted_bytes_iterator.rs", "rank": 80, "score": 14.640129925423217 }, { "content": " // if this was mmaped, there will be no truncation.\n\n #[allow(clippy::cast_possible_truncation)]\n\n self.buf\n\n .get(offset as usize..(offset + length) as usize)\n\n .ok_or(error::INVALID_DATA)\n\n }\n\n}\n\n\n\n/// This is used to read from a file (or any seek'able reader).\n\npub struct ReadPageCache<R> {\n\n reader: R,\n\n cache: LruCache<u64, Vec<u8>>,\n\n}\n\n\n\nimpl<R> ReadPageCache<R> {\n\n pub fn new(reader: R, cache: ReadCache) -> Self {\n\n Self {\n\n reader,\n\n cache: cache.lru(),\n\n }\n", "file_path": "src/sstable/page_cache.rs", "rank": 81, "score": 14.262206457751208 }, { "content": " /// Reset the state of the iterator to initial.\n\n pub fn reset(&mut self) {\n\n for v in self.buf.iter_mut() {\n\n *v = self.first;\n\n }\n\n self.current = self.buf.len();\n\n self.counter = 0;\n\n }\n\n\n\n /// Customize the bytes being returned.\n\n pub fn new_first_last(length: usize, first: u8, last: u8, limit: usize) -> Result<Self> {\n\n if length == 0 {\n\n return Err(Error::ProgrammingError(\"length should be greater than 0\"));\n\n }\n\n if last <= first {\n\n return Err(Error::ProgrammingError(\"expected last > first\"));\n\n }\n\n let buf = vec![first; length];\n\n Ok(Self {\n\n buf,\n", "file_path": "src/utils/sorted_bytes_iterator.rs", "rank": 82, "score": 14.215214492864822 }, { "content": "pub struct WrappedCache<PC, U> {\n\n inner: PC,\n\n caches: ConcurrentLRUCache,\n\n uncompress: U,\n\n}\n\n\n\nimpl<PC, U> WrappedCache<PC, U> {\n\n pub fn new(inner: PC, uncompress: U, cache: Option<ReadCache>, count: usize) -> Self {\n\n Self {\n\n inner,\n\n caches: ConcurrentLRUCache::new(count, cache),\n\n uncompress,\n\n }\n\n }\n\n}\n\n\n\nimpl ConcurrentPageCache for Box<dyn ConcurrentPageCache + Send + Sync> {\n\n fn get_chunk(&self, offset: u64, length: u64) -> Result<Bytes> {\n\n self.as_ref().get_chunk(offset, length)\n\n }\n", "file_path": "src/sstable/concurrent_page_cache.rs", "rank": 83, "score": 14.010830500308675 }, { "content": "use std::io::{Read, Result};\n\n\n\n/// PosReader is a reader that remembers the position read and can\n\n/// report it back at any time.\n\n#[derive(Debug)]\n\npub struct PosReader<R> {\n\n r: R,\n\n offset: usize,\n\n}\n\n\n\nimpl<R> PosReader<R> {\n\n pub fn new(r: R, offset: usize) -> Self {\n\n PosReader { r, offset }\n\n }\n\n pub fn current_offset(&self) -> usize {\n\n self.offset\n\n }\n\n pub fn into_inner(self) -> R {\n\n self.r\n\n }\n", "file_path": "src/sstable/posreader.rs", "rank": 84, "score": 13.973469217330237 }, { "content": "\n\n #[allow(clippy::infallible_destructuring_match)]\n\n let meta = match meta.meta {\n\n MetaData::V3_0(meta) => meta,\n\n };\n\n\n\n if meta.compression != Compression::None {\n\n return Err(Error::CantUseCompressedFileWithMultiThreadedMmap);\n\n }\n\n\n\n let index_start = data_start + (meta.data_len as u64);\n\n let index_end = index_start + meta.index_len;\n\n\n\n file.seek(SeekFrom::Start(index_start))?;\n\n let mmap = unsafe { memmap::Mmap::map(&file) }?;\n\n let mmap_buf = {\n\n let buf = &mmap as &[u8];\n\n let buf = buf as *const [u8];\n\n let buf: &'static [u8] = unsafe { &*buf };\n\n buf\n", "file_path": "src/sstable/reader.rs", "rank": 85, "score": 13.781856593175547 }, { "content": "}\n\n\n\nimpl<R: Read> Read for PosReader<R> {\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize> {\n\n let l = self.r.read(buf)?;\n\n self.offset += l;\n\n Ok(l)\n\n }\n\n}\n", "file_path": "src/sstable/posreader.rs", "rank": 86, "score": 13.689256277634836 }, { "content": "use super::Version;\n\n\n\npub const INVALID_DATA: Error = Error::InvalidData(\"corrupt SStable or bug\");\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(std::io::Error),\n\n CantUseCompressedFileWithMultiThreadedMmap,\n\n ProgrammingError(&'static str),\n\n InvalidData(&'static str),\n\n UnsupportedVersion(Version),\n\n Bincode(bincode::Error),\n\n Utf8Error(std::str::Utf8Error),\n\n KeyTooLong(usize),\n\n ValueTooLong(usize),\n\n StdStringFromUtf8Error(std::string::FromUtf8Error),\n\n TryFromInt(std::num::TryFromIntError),\n\n NixError(nix::Error),\n\n}\n\n\n", "file_path": "src/sstable/error.rs", "rank": 87, "score": 13.634737157348521 }, { "content": " Error::from(io)\n\n })\n\n }\n\n}\n\n\n\nimpl<W: Write> SnappyCompressorFactory<W> {\n\n pub fn new() -> Self {\n\n Self {\n\n marker: std::marker::PhantomData {},\n\n }\n\n }\n\n}\n\n\n\nimpl<W: Write> CompressorFactory<W, SnappyCompressor<W>> for SnappyCompressorFactory<W> {\n\n fn from_writer(&self, writer: W) -> SnappyCompressor<W> {\n\n SnappyCompressor::new(writer)\n\n }\n\n}\n\n\n\nimpl Uncompress for SnappyUncompress {\n", "file_path": "src/sstable/compression.rs", "rank": 88, "score": 13.619349732298687 }, { "content": "//! Ondisk format structs, serialized with \"bincode\".\n\n//!\n\n//! Ondisk format has the following preamble.\n\n//!\n\n//! | MAGIC: [u8; 4] | version: struct{u16, u16} |\n\n//!\n\n//! Then depending on version the rest of the file is structured.\n\n//! Full v1 format looks like this:\n\n//!\n\n//! | MAGIC: [u8; 4] | Version_1_0: [1u16, 0u16] | Meta_V1_0 | DATA | INDEX_DATA |\n\n//!\n\n//! V1 data has the following layout\n\n//!\n\n//! | KVLength | key: [u8] | value: [u8] |\n\n//!\n\n//! V1 index data has the following layout\n\n//!\n\n//! | KVOffset | key: [u8] | offset: Offset |\n\n\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "src/sstable/ondisk_format.rs", "rank": 89, "score": 12.833384736149398 }, { "content": "//!\n\n//! ```\n\n//! use sstb::*;\n\n//! use std::collections::BTreeMap;\n\n//!\n\n//! let filename = \"/tmp/example-sstable\";\n\n//! let mut map = BTreeMap::new();\n\n//! map.insert(b\"foo\", b\"some foo\");\n\n//! map.insert(b\"bar\", b\"some bar\");\n\n//!\n\n//! write_btree_map(&map, filename, None).unwrap();\n\n//!\n\n//! let mut reader =\n\n//! SSTableReader::new_with_options(filename, &ReadOptions::default())\n\n//! .unwrap();\n\n\n\n//! assert_eq!(reader.get(b\"foo\").unwrap(), Some(b\"some foo\" as &[u8]));\n\n//! assert_eq!(reader.get(b\"bar\").unwrap(), Some(b\"some bar\" as &[u8]));\n\n//! assert_eq!(reader.get(b\"foobar\").unwrap(), None);\n\n//! ```\n\n\n\npub mod sstable;\n\npub mod utils;\n\n\n\npub use sstable::*;\n\n\n\n#[cfg(test)]\n\nmod tests {}\n", "file_path": "src/lib.rs", "rank": 90, "score": 12.756074265414359 }, { "content": " }\n\n}\n\n\n\nimpl<PC, U> PageCache for WrappedCache<PC, U>\n\nwhere\n\n U: Uncompress,\n\n PC: PageCache,\n\n{\n\n fn get_chunk(&mut self, offset: u64, length: u64) -> Result<&[u8]> {\n\n match self.cache.get(&offset) {\n\n Some(bytes) => Ok(unsafe { &*(bytes as &[u8] as *const [u8]) }),\n\n None => {\n\n let inner_chunk = self.inner.get_chunk(offset, length)?;\n\n let buf = self.uncompress.uncompress(inner_chunk)?;\n\n self.cache.put(offset, buf);\n\n Ok(self.cache.get(&offset).unwrap())\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/sstable/page_cache.rs", "rank": 91, "score": 12.426204465262298 }, { "content": " );\n\n },\n\n );\n\n\n\n for (prefix, write_opts, read_opts) in variants.iter() {\n\n state.write_sstable(filename, &write_opts).unwrap();\n\n\n\n group.bench_function(BenchmarkId::new(*prefix, threads), |b| {\n\n b.iter_batched(\n\n || ConcurrentSSTableReader::new_with_options(filename, &read_opts).unwrap(),\n\n |reader| {\n\n pool.install(|| {\n\n state.get_shuffled_input_ref().par_iter().for_each(|kv| {\n\n let KV { key, is_present } = &kv;\n\n let key = key as &[u8];\n\n let value = reader.get(key).unwrap();\n\n if *is_present {\n\n assert_eq!(value.as_deref(), Some(key));\n\n } else {\n\n assert_eq!(value, None);\n", "file_path": "benches/sstable_implementations.rs", "rank": 92, "score": 12.100472128454792 }, { "content": " group.bench_function(BenchmarkId::new(*prefix, *size), |b| {\n\n b.iter_batched(\n\n || SSTableReader::new_with_options(filename, &read_opts).unwrap(),\n\n |mut reader| {\n\n for kv in state.get_shuffled_input() {\n\n let KV { key, is_present } = &kv;\n\n let key = key as &[u8];\n\n let value = reader.get(key).unwrap();\n\n if *is_present {\n\n assert_eq!(value, Some(key));\n\n } else {\n\n assert_eq!(value, None);\n\n }\n\n }\n\n },\n\n BatchSize::LargeInput,\n\n );\n\n });\n\n }\n\n }\n", "file_path": "benches/sstable_implementations.rs", "rank": 93, "score": 11.991764878083828 }, { "content": "mod compress_ctx_writer;\n\nmod compression;\n\nmod concurrent_lru;\n\nmod concurrent_page_cache;\n\nmod error;\n\nmod ondisk_format;\n\nmod options;\n\nmod page_cache;\n\nmod posreader;\n\nmod poswriter;\n\nmod result;\n\nmod types;\n\nmod utils;\n\n\n\npub mod reader;\n\npub mod writer;\n\n\n\npub use reader::ConcurrentSSTableReader;\n\npub use reader::MmapUncompressedSSTableReader;\n\npub use reader::SSTableReader;\n", "file_path": "src/sstable/mod.rs", "rank": 94, "score": 11.96769643622501 }, { "content": "use std::path::Path;\n\n\n\nuse bincode;\n\nuse memmap;\n\nuse num_cpus;\n\n\n\nuse bloomfilter::Bloom;\n\nuse bytes::Bytes;\n\n\n\nuse super::error::INVALID_DATA;\n\nuse super::ondisk_format::*;\n\nuse super::options::*;\n\nuse super::types::*;\n\nuse super::{compression, concurrent_page_cache, page_cache, posreader, Error, Result};\n\n\n", "file_path": "src/sstable/reader.rs", "rank": 95, "score": 11.88445495001495 }, { "content": " |reader| {\n\n for kv in state.get_shuffled_input() {\n\n let KV { key, is_present } = &kv;\n\n let key = key as &[u8];\n\n let value = reader.get(key).unwrap();\n\n if *is_present {\n\n assert_eq!(value, Some(key));\n\n } else {\n\n assert_eq!(value, None);\n\n }\n\n }\n\n },\n\n BatchSize::LargeInput,\n\n );\n\n },\n\n );\n\n\n\n for (prefix, write_opts, read_opts) in variants.iter() {\n\n state.write_sstable(filename, &write_opts).unwrap();\n\n\n", "file_path": "benches/sstable_implementations.rs", "rank": 96, "score": 11.876882402126888 }, { "content": "# TODO ([x] means done)\n\n- [ ] Prettify and publish benchmark results in the readme. For now one can \"cargo bench\" and look at the reports.\n\n- [x] cache=none does not work. It uses unbounded cache as default which is incorrect.\n\n- [-] open-source\n\n - [x] write README with badges\n\n - [ ] Travis tests etc\n\n- [ ] backtraces in errors\n\n- [ ] range queries\n\n- [x] bloom filters on disk\n\n - they slowed things down by 25% though! but it works\n\n- [ ] writing \"flush_every\"'s default should depend on the default compression.\n\n- [ ] read cache size configurable both for page cache and for uncompressed cache\n\n- [ ] read cache size should be in bytes, not blocks\n\n- [ ] cache cannot be explicitly disabled in some places\n\n- [ ] add length to encoded bits\n\n- [ ] indexes as separate files\n\n in this case don't need to maintain the index in memory while writing\n\n- [ ] remove as much as possible unsafe and unwrap\n\n - [ ] Mmap can be put into an Arc, to remove unsafe static buffer casts. This should not matter at runtime.\n\n- [ ] the index can store the number of items and uncompressed length (in case the file is compressed)\n\n - the uncompressed length can be used when allocating memory for uncompressed chunks\n\n - the number of items in the chunk can be used for HashMap capacity IF we get back the \"Block\" structure which helps not scan the whole table every time.\n\n - there's a space tradeoff here, so maybe it's all not worth it\n\n- [ ] consider getting back the \"Block\" trait and its implementations\n\n - this will help not scan through the chunk on each get()\n\n - however, there are costs\n\n - need to allocate HashMaps for lookups\n\n - if length is not known, might even reallocate\n\n - messes up the concurrency as the hashmap becomes the contention point\n\n - an RWLock might help for the majority of the cases\n\n - even if all this is implemented, it's totally not guaranteed that it's going to be faster in the end.\n\n- [x] u16 keys and u32 values, not u64, for saving space\n\n- [x] mmap with no compression is already multi-threaded, but the API does not\n\n reflect that\n\n- [x] zlib bounded and unbounded performs the same in benchmarks\n\n- [x] analyze all casts from u64 to usize\n\n - [x] clippy actually has a lint for it in pedantic\n\n- [x] multi-threading\n\n- [x] compression is all over the place\n\n- [x] files and imports are all over the place, reorganize\n\n- [x] fail if keys or values are too long (> u32_max)\n\n- [x] byte keys\n\n - [x] also \"memchr\" is very slow, better to use offsets\n", "file_path": "TODO.md", "rank": 97, "score": 11.767948544841154 }, { "content": "//! Traits and implementations of various compressors and decompressors.\n\n\n\nuse super::Result;\n\n\n\nuse super::Error;\n\nuse snap;\n\nuse std::io::{Cursor, Read, Write};\n\n\n", "file_path": "src/sstable/compression.rs", "rank": 98, "score": 11.568370828017864 }, { "content": " Compression::Snappy => {\n\n let dec = compression::SnappyUncompress {};\n\n let cache = opts.cache.clone().unwrap_or_default();\n\n let wrapped = page_cache::WrappedCache::new(pc, dec, cache);\n\n Box::new(wrapped)\n\n }\n\n };\n\n\n\n Ok(Self {\n\n _mmap: mmap,\n\n index,\n\n page_cache: uncompressed_cache,\n\n data_start,\n\n meta,\n\n bloom,\n\n use_bloom_default: opts.use_bloom,\n\n })\n\n }\n\n\n\n fn get_with_options(\n", "file_path": "src/sstable/reader.rs", "rank": 99, "score": 11.553275137549367 } ]
Rust
fifteen_min/src/isochrone.rs
balbok0/abstreet
3af15fefdb2772c83864c08724318418da8190a9
use std::collections::{HashMap, HashSet}; use abstutil::MultiMap; use geom::{Duration, Polygon}; use map_gui::tools::Grid; use map_model::{ connectivity, AmenityType, BuildingID, BuildingType, LaneType, Map, Path, PathConstraints, PathRequest, }; use widgetry::{Color, Drawable, EventCtx, GeomBatch}; use crate::App; pub struct Isochrone { pub start: BuildingID, pub options: Options, pub draw: Drawable, pub time_to_reach_building: HashMap<BuildingID, Duration>, pub amenities_reachable: MultiMap<AmenityType, BuildingID>, pub population: usize, pub onstreet_parking_spots: usize, } #[derive(Clone)] pub enum Options { Walking(connectivity::WalkingOptions), Biking, } impl Options { pub fn time_to_reach_building( self, map: &Map, start: BuildingID, ) -> HashMap<BuildingID, Duration> { match self { Options::Walking(opts) => { connectivity::all_walking_costs_from(map, start, Duration::minutes(15), opts) } Options::Biking => connectivity::all_vehicle_costs_from( map, start, Duration::minutes(15), PathConstraints::Bike, ), } } } impl Isochrone { pub fn new(ctx: &mut EventCtx, app: &App, start: BuildingID, options: Options) -> Isochrone { let time_to_reach_building = options.clone().time_to_reach_building(&app.map, start); let mut amenities_reachable = MultiMap::new(); let mut population = 0; let mut all_roads = HashSet::new(); for b in time_to_reach_building.keys() { let bldg = app.map.get_b(*b); for amenity in &bldg.amenities { if let Some(category) = AmenityType::categorize(&amenity.amenity_type) { amenities_reachable.insert(category, bldg.id); } } match bldg.bldg_type { BuildingType::Residential { num_residents, .. } | BuildingType::ResidentialCommercial(num_residents, _) => { population += num_residents; } _ => {} } all_roads.insert(app.map.get_l(bldg.sidewalk_pos.lane()).parent); } let mut onstreet_parking_spots = 0; for r in all_roads { let r = app.map.get_r(r); for (l, _, lt) in r.lanes_ltr() { if lt == LaneType::Parking { onstreet_parking_spots += app.map.get_l(l).number_parking_spots(app.map.get_config()); } } } let mut i = Isochrone { start, options, draw: Drawable::empty(ctx), time_to_reach_building, amenities_reachable, population, onstreet_parking_spots, }; i.draw = i.draw_isochrone(app).upload(ctx); i } pub fn path_to(&self, map: &Map, to: BuildingID) -> Option<Path> { if !self.time_to_reach_building.contains_key(&to) { return None; } let req = PathRequest::between_buildings( map, self.start, to, match self.options { Options::Walking(_) => PathConstraints::Pedestrian, Options::Biking => PathConstraints::Bike, }, )?; map.pathfind(req).ok() } pub fn draw_isochrone(&self, app: &App) -> GeomBatch { let bounds = app.map.get_bounds(); let resolution_m = 100.0; let mut grid: Grid<f64> = Grid::new( (bounds.width() / resolution_m).ceil() as usize, (bounds.height() / resolution_m).ceil() as usize, 0.0, ); for (b, cost) in &self.time_to_reach_building { let pt = app.map.get_b(*b).polygon.center(); let idx = grid.idx( ((pt.x() - bounds.min_x) / resolution_m) as usize, ((pt.y() - bounds.min_y) / resolution_m) as usize, ); grid.data[idx] = cost.inner_seconds(); } let thresholds = vec![ 0.1, Duration::minutes(5).inner_seconds(), Duration::minutes(10).inner_seconds(), Duration::minutes(15).inner_seconds(), ]; let colors = vec![ Color::GREEN.alpha(0.5), Color::ORANGE.alpha(0.5), Color::RED.alpha(0.5), ]; let smooth = false; let c = contour::ContourBuilder::new(grid.width as u32, grid.height as u32, smooth); let mut batch = GeomBatch::new(); for (feature, color) in c .contours(&grid.data, &thresholds) .unwrap() .into_iter() .zip(colors) { match feature.geometry.unwrap().value { geojson::Value::MultiPolygon(polygons) => { for p in polygons { if let Ok(poly) = Polygon::from_geojson(&p) { batch.push(color, poly.scale(resolution_m)); } } } _ => unreachable!(), } } batch } }
use std::collections::{HashMap, HashSet}; use abstutil::MultiMap; use geom::{Duration, Polygon}; use map_gui::tools::Grid; use map_model::{ connectivity, AmenityType, BuildingID, BuildingType, LaneType, Map, Path, PathConstraints, PathRequest, }; use widgetry::{Color, Drawable, EventCtx, GeomBatch}; use crate::App; pub struct Isochrone { pub start: BuildingID, pub options: Options, pub draw: Drawable, pub time_to_reach_building: HashMap<BuildingID, Duration>, pub amenities_reachable: MultiMap<AmenityType, BuildingID>, pub population: usize, pub onstreet_parking_spots: usize, } #[derive(Clone)] pub enum Options { Walking(connectivity::WalkingOptions), Biking, } impl Options { pub fn time_to_reach_building( self, map: &Map, start: BuildingID, ) -> HashMap<BuildingID, Duration> { match self { Options::Walking(opts) => { connectivity::all_walking_costs_from(map, start, Duration::minutes(15), opts) } Options::Biking => connectivity::all_vehicle_costs_from( map, start, Duration::minutes(15), PathConstraints::Bike, ), } } } impl Isochrone {
pub fn path_to(&self, map: &Map, to: BuildingID) -> Option<Path> { if !self.time_to_reach_building.contains_key(&to) { return None; } let req = PathRequest::between_buildings( map, self.start, to, match self.options { Options::Walking(_) => PathConstraints::Pedestrian, Options::Biking => PathConstraints::Bike, }, )?; map.pathfind(req).ok() } pub fn draw_isochrone(&self, app: &App) -> GeomBatch { let bounds = app.map.get_bounds(); let resolution_m = 100.0; let mut grid: Grid<f64> = Grid::new( (bounds.width() / resolution_m).ceil() as usize, (bounds.height() / resolution_m).ceil() as usize, 0.0, ); for (b, cost) in &self.time_to_reach_building { let pt = app.map.get_b(*b).polygon.center(); let idx = grid.idx( ((pt.x() - bounds.min_x) / resolution_m) as usize, ((pt.y() - bounds.min_y) / resolution_m) as usize, ); grid.data[idx] = cost.inner_seconds(); } let thresholds = vec![ 0.1, Duration::minutes(5).inner_seconds(), Duration::minutes(10).inner_seconds(), Duration::minutes(15).inner_seconds(), ]; let colors = vec![ Color::GREEN.alpha(0.5), Color::ORANGE.alpha(0.5), Color::RED.alpha(0.5), ]; let smooth = false; let c = contour::ContourBuilder::new(grid.width as u32, grid.height as u32, smooth); let mut batch = GeomBatch::new(); for (feature, color) in c .contours(&grid.data, &thresholds) .unwrap() .into_iter() .zip(colors) { match feature.geometry.unwrap().value { geojson::Value::MultiPolygon(polygons) => { for p in polygons { if let Ok(poly) = Polygon::from_geojson(&p) { batch.push(color, poly.scale(resolution_m)); } } } _ => unreachable!(), } } batch } }
pub fn new(ctx: &mut EventCtx, app: &App, start: BuildingID, options: Options) -> Isochrone { let time_to_reach_building = options.clone().time_to_reach_building(&app.map, start); let mut amenities_reachable = MultiMap::new(); let mut population = 0; let mut all_roads = HashSet::new(); for b in time_to_reach_building.keys() { let bldg = app.map.get_b(*b); for amenity in &bldg.amenities { if let Some(category) = AmenityType::categorize(&amenity.amenity_type) { amenities_reachable.insert(category, bldg.id); } } match bldg.bldg_type { BuildingType::Residential { num_residents, .. } | BuildingType::ResidentialCommercial(num_residents, _) => { population += num_residents; } _ => {} } all_roads.insert(app.map.get_l(bldg.sidewalk_pos.lane()).parent); } let mut onstreet_parking_spots = 0; for r in all_roads { let r = app.map.get_r(r); for (l, _, lt) in r.lanes_ltr() { if lt == LaneType::Parking { onstreet_parking_spots += app.map.get_l(l).number_parking_spots(app.map.get_config()); } } } let mut i = Isochrone { start, options, draw: Drawable::empty(ctx), time_to_reach_building, amenities_reachable, population, onstreet_parking_spots, }; i.draw = i.draw_isochrone(app).upload(ctx); i }
function_block-full_function
[ { "content": "pub fn pathfind(req: PathRequest, params: &RoutingParams, map: &Map) -> Option<(Path, Duration)> {\n\n if req.constraints == PathConstraints::Pedestrian {\n\n pathfind_walking(req, map)\n\n } else {\n\n let graph = build_graph_for_vehicles(map, req.constraints);\n\n calc_path(graph, req, params, map)\n\n }\n\n}\n\n\n", "file_path": "map_model/src/pathfind/dijkstra.rs", "rank": 0, "score": 400765.943363308 }, { "content": "fn pathfind_walking(req: PathRequest, map: &Map) -> Option<(Path, Duration)> {\n\n if req.start.lane() == req.end.lane() {\n\n return Some(one_step_walking_path(req, map));\n\n }\n\n\n\n let graph = build_graph_for_pedestrians(map);\n\n\n\n let closest_start = WalkingNode::closest(req.start, map);\n\n let closest_end = WalkingNode::closest(req.end, map);\n\n let (cost, nodes) = petgraph::algo::astar(\n\n &graph,\n\n closest_start,\n\n |end| end == closest_end,\n\n |(_, _, cost)| *cost,\n\n |_| Duration::ZERO,\n\n )?;\n\n let steps = walking_path_to_steps(nodes, map);\n\n Some((Path::new(map, steps, req, Vec::new()), cost))\n\n}\n", "file_path": "map_model/src/pathfind/dijkstra.rs", "rank": 1, "score": 372943.4484057581 }, { "content": "// TODO Do we even need this at all?\n\npub fn one_step_walking_path(req: PathRequest, map: &Map) -> (Path, Duration) {\n\n // Weird case, but it can happen for walking from a building path to a bus stop that're\n\n // actually at the same spot.\n\n let steps = if req.start.dist_along() == req.end.dist_along() {\n\n vec![PathStep::Lane(req.start.lane())]\n\n } else if req.start.dist_along() < req.end.dist_along() {\n\n vec![PathStep::Lane(req.start.lane())]\n\n } else {\n\n vec![PathStep::ContraflowLane(req.start.lane())]\n\n };\n\n let mut cost = (req.start.dist_along() - req.end.dist_along()).abs()\n\n / Traversable::Lane(req.start.lane()).max_speed_along(\n\n Some(crate::MAX_WALKING_SPEED),\n\n PathConstraints::Pedestrian,\n\n map,\n\n );\n\n if map.get_l(req.start.lane()).is_shoulder() {\n\n cost = 2.0 * cost;\n\n }\n\n (Path::new(map, steps, req, Vec::new()), cost)\n\n}\n", "file_path": "map_model/src/pathfind/walking.rs", "rank": 2, "score": 371019.7673763352 }, { "content": "/// Fill in empty space between one-way roads.\n\npub fn find_medians(map: &Map) -> Vec<Polygon> {\n\n // TODO Needs more work\n\n if true {\n\n return Vec::new();\n\n }\n\n\n\n let mut candidates = Vec::new();\n\n for r in map.all_roads() {\n\n if r.osm_tags.is(\"dual_carriageway\", \"yes\") {\n\n // TODO Always to the left? Maybe driving side matters; test in southbank too\n\n let lanes_ltr = r.lanes_ltr();\n\n candidates.push(lanes_ltr[0].0);\n\n }\n\n }\n\n\n\n let mut visited = BTreeSet::new();\n\n let mut polygons = Vec::new();\n\n for start in candidates {\n\n if visited.contains(&start) {\n\n continue;\n\n }\n\n if let Some((poly, lanes)) = map.get_l(start).trace_around_block(map) {\n\n polygons.push(poly);\n\n visited.extend(lanes);\n\n }\n\n }\n\n\n\n polygons\n\n}\n", "file_path": "map_model/src/make/medians.rs", "rank": 3, "score": 314307.5938521245 }, { "content": "/// Calculate the strongly connected components (SCC) of the part of the map accessible by\n\n/// constraints (ie, the graph of sidewalks or driving+bike lanes). The largest component is the\n\n/// \"main\" graph; the rest is disconnected. Returns (lanes in the largest \"main\" component, all\n\n/// other disconnected lanes)\n\npub fn find_scc(map: &Map, constraints: PathConstraints) -> (HashSet<LaneID>, HashSet<LaneID>) {\n\n let mut graph = DiGraphMap::new();\n\n for turn in map.all_turns().values() {\n\n if constraints.can_use(map.get_l(turn.id.src), map)\n\n && constraints.can_use(map.get_l(turn.id.dst), map)\n\n {\n\n graph.add_edge(turn.id.src, turn.id.dst, 1);\n\n }\n\n }\n\n let components = petgraph::algo::kosaraju_scc(&graph);\n\n if components.is_empty() {\n\n return (HashSet::new(), HashSet::new());\n\n }\n\n let largest_group: HashSet<LaneID> = components\n\n .into_iter()\n\n .max_by_key(|c| c.len())\n\n .unwrap()\n\n .into_iter()\n\n .collect();\n\n let disconnected = map\n", "file_path": "map_model/src/connectivity/mod.rs", "rank": 4, "score": 306670.85593047534 }, { "content": "// TODO Temporarily public for debugging.\n\npub fn calculate_corners(i: &Intersection, map: &Map) -> Vec<Polygon> {\n\n if i.is_footway(map) {\n\n return Vec::new();\n\n }\n\n\n\n let mut corners = Vec::new();\n\n\n\n for turn in map.get_turns_in_intersection(i.id) {\n\n if turn.turn_type == TurnType::SharedSidewalkCorner {\n\n // Avoid double-rendering\n\n if map.get_l(turn.id.src).dst_i != i.id {\n\n continue;\n\n }\n\n let l1 = map.get_l(turn.id.src);\n\n let l2 = map.get_l(turn.id.dst);\n\n\n\n // Special case for dead-ends: just thicken the geometry.\n\n if i.roads.len() == 1 {\n\n corners.push(turn.geom.make_polygons(l1.width.min(l2.width)));\n\n continue;\n", "file_path": "map_gui/src/render/intersection.rs", "rank": 5, "score": 298167.0278163951 }, { "content": "pub fn walking_path_to_steps(path: Vec<WalkingNode>, map: &Map) -> Vec<PathStep> {\n\n let mut steps: Vec<PathStep> = Vec::new();\n\n\n\n for pair in path.windows(2) {\n\n let (r1, l1_endpt) = match pair[0] {\n\n WalkingNode::SidewalkEndpoint(r, endpt) => (r, endpt),\n\n WalkingNode::RideBus(_) => unreachable!(),\n\n WalkingNode::LeaveMap(_) => unreachable!(),\n\n };\n\n let r2 = match pair[1] {\n\n WalkingNode::SidewalkEndpoint(r, _) => r,\n\n WalkingNode::RideBus(_) => unreachable!(),\n\n WalkingNode::LeaveMap(_) => unreachable!(),\n\n };\n\n\n\n let l1 = r1.must_get_sidewalk(map);\n\n let l2 = r2.must_get_sidewalk(map);\n\n\n\n if l1 == l2 {\n\n if l1_endpt {\n", "file_path": "map_model/src/pathfind/walking.rs", "rank": 6, "score": 297974.0853288801 }, { "content": "/// Heavily penalize crossing into an access-restricted zone that doesn't allow this mode.\n\npub fn zone_cost(mvmnt: MovementID, constraints: PathConstraints, map: &Map) -> Duration {\n\n // Detect when we cross into a new zone that doesn't allow constraints.\n\n if map\n\n .get_r(mvmnt.from.id)\n\n .access_restrictions\n\n .allow_through_traffic\n\n .contains(constraints)\n\n && !map\n\n .get_r(mvmnt.to.id)\n\n .access_restrictions\n\n .allow_through_traffic\n\n .contains(constraints)\n\n {\n\n // This should be high enough to achieve the desired effect of somebody not entering\n\n // the zone unless absolutely necessary. Someone would violate that and cut through anyway\n\n // only when the alternative route would take more than 3 hours longer!\n\n Duration::hours(3)\n\n } else {\n\n Duration::ZERO\n\n }\n", "file_path": "map_model/src/pathfind/mod.rs", "rank": 7, "score": 287987.4559570607 }, { "content": "pub fn round(cost: Duration) -> usize {\n\n // Round up! 0 cost edges are ignored\n\n (cost.inner_seconds().round() as usize).max(1)\n\n}\n", "file_path": "map_model/src/pathfind/ch.rs", "rank": 8, "score": 275394.8920086846 }, { "content": "/// Create a traffic signal which has a stage that is: protected straight, protected right,\n\n/// unprotected left, unprotected right on red. Followed by a variable stage that has protected\n\n/// left, unprotected right on red. With a last stage that is all-walk and variable.\n\n/// In some degenerate cases, usually with one or more one-way, this can reduce to stage per road.\n\n/// In some rare cases, usually with an alleyway, oncoming lanes can't both be protected left turns.\n\n/// In such cases the stage is split into two stages with each having a protected and yeild turn.\n\npub fn make_traffic_signal(map: &Map, id: IntersectionID) -> Option<ControlTrafficSignal> {\n\n // Try to create the stages, this returns a unoptimized signal, which is then optimized.\n\n if let Some(ts) = make_signal(map, id) {\n\n return optimize(ts);\n\n }\n\n None\n\n}\n\n\n", "file_path": "map_model/src/make/traffic_signals/lagging_green.rs", "rank": 9, "score": 271667.4035031707 }, { "content": "pub fn apply_parking(map: &mut RawMap, opts: &Options, timer: &mut Timer) {\n\n match opts.onstreet_parking {\n\n OnstreetParking::JustOSM => {}\n\n OnstreetParking::Blockface(ref path) => {\n\n use_parking_hints(map, path.clone(), timer);\n\n }\n\n OnstreetParking::SomeAdditionalWhereNoData { pct } => {\n\n let pct = pct as i64;\n\n for (id, r) in map.roads.iter_mut() {\n\n // The 20m minimum is a rough heuristic.\n\n if r.osm_tags.contains_key(osm::INFERRED_PARKING)\n\n && r.osm_tags\n\n .is_any(osm::HIGHWAY, vec![\"residential\", \"tertiary\"])\n\n && !r.osm_tags.is(\"foot\", \"no\")\n\n && id.osm_way_id.0 % 100 <= pct\n\n && PolyLine::unchecked_new(r.center_points.clone()).length()\n\n >= Distance::meters(20.0)\n\n {\n\n if r.osm_tags.is(\"oneway\", \"yes\") {\n\n r.osm_tags.remove(osm::PARKING_BOTH);\n", "file_path": "convert_osm/src/parking.rs", "rank": 10, "score": 269661.3271609946 }, { "content": "/// Extract the map and scenario name from a path. Crashes if the input is strange.\n\npub fn parse_scenario_path(path: &str) -> (MapName, String) {\n\n // TODO regex\n\n let parts = path.split(\"/\").collect::<Vec<_>>();\n\n let country = parts[parts.len() - 5];\n\n let city = parts[parts.len() - 4];\n\n let map = parts[parts.len() - 2];\n\n let scenario = basename(parts[parts.len() - 1]);\n\n let map_name = MapName::new(country, city, map);\n\n (map_name, scenario)\n\n}\n\n\n\n// Player data (Players edit this)\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 11, "score": 268395.3108036621 }, { "content": "// Adjust the path to start on the building's border, not center\n\nfn trim_path(poly: &Polygon, path: Line) -> Line {\n\n for bldg_line in poly.points().windows(2) {\n\n if let Some(l1) = Line::new(bldg_line[0], bldg_line[1]) {\n\n if let Some(hit) = l1.intersection(&path) {\n\n if let Some(l2) = Line::new(hit, path.pt2()) {\n\n return l2;\n\n }\n\n }\n\n }\n\n }\n\n // Just give up\n\n path\n\n}\n\n\n", "file_path": "map_model/src/make/buildings.rs", "rank": 12, "score": 266515.22076968837 }, { "content": "pub fn build_graph_for_pedestrians(map: &Map) -> DiGraphMap<WalkingNode, Duration> {\n\n let max_speed = Some(crate::MAX_WALKING_SPEED);\n\n let mut graph: DiGraphMap<WalkingNode, Duration> = DiGraphMap::new();\n\n for l in map.all_lanes() {\n\n if l.is_walkable() {\n\n let cost = l.length()\n\n / Traversable::Lane(l.id).max_speed_along(\n\n max_speed,\n\n PathConstraints::Pedestrian,\n\n map,\n\n );\n\n let n1 = WalkingNode::SidewalkEndpoint(l.get_directed_parent(), true);\n\n let n2 = WalkingNode::SidewalkEndpoint(l.get_directed_parent(), false);\n\n graph.add_edge(n1, n2, cost);\n\n graph.add_edge(n2, n1, cost);\n\n\n\n for turn in map.get_turns_for(l.id, PathConstraints::Pedestrian) {\n\n graph.add_edge(\n\n WalkingNode::SidewalkEndpoint(\n\n l.get_directed_parent(),\n", "file_path": "map_model/src/pathfind/dijkstra.rs", "rank": 13, "score": 264618.32638200244 }, { "content": "fn sidewalk_to_bike(sidewalk_pos: Position, map: &Map) -> Option<(Position, Position)> {\n\n let lane = map.get_parent(sidewalk_pos.lane()).find_closest_lane(\n\n sidewalk_pos.lane(),\n\n |l| !l.biking_blackhole && PathConstraints::Bike.can_use(l, map),\n\n map,\n\n )?;\n\n // No buffer needed\n\n Some((sidewalk_pos.equiv_pos(lane, map), sidewalk_pos))\n\n}\n\n\n\n/// Businesses are categorized into one of these types.\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum AmenityType {\n\n Groceries,\n\n Food,\n\n Bar,\n\n Medical,\n\n Religious,\n\n Education,\n\n Financial,\n", "file_path": "map_model/src/objects/building.rs", "rank": 14, "score": 263706.4068986103 }, { "content": "// Adjust the path to start on the building's border, not center\n\nfn trim_path(poly: &Polygon, path: Line) -> Line {\n\n for bldg_line in poly.points().windows(2) {\n\n if let Some(l1) = Line::new(bldg_line[0], bldg_line[1]) {\n\n if let Some(hit) = l1.intersection(&path) {\n\n if let Some(l2) = Line::new(hit, path.pt2()) {\n\n return l2;\n\n }\n\n }\n\n }\n\n }\n\n // Just give up\n\n path\n\n}\n\n\n", "file_path": "map_model/src/make/parking_lots.rs", "rank": 15, "score": 262363.77172604925 }, { "content": "pub fn extract_osm(map: &mut RawMap, opts: &Options, timer: &mut Timer) -> OsmExtract {\n\n let mut doc = crate::reader::read(&opts.osm_input, &map.gps_bounds, timer).unwrap();\n\n\n\n // Use this to quickly test overrides to some ways before upstreaming in OSM.\n\n if false {\n\n let ways: BTreeSet<WayID> = abstio::read_json(\"osm_ways.json\".to_string(), timer);\n\n for id in ways {\n\n doc.ways\n\n .get_mut(&id)\n\n .unwrap()\n\n .tags\n\n .insert(\"junction\", \"intersection\");\n\n }\n\n }\n\n\n\n // TODO Hacks to override OSM data. There's no problem upstream, but we want to accomplish\n\n // various things for A/B Street.\n\n if let Some(way) = doc.ways.get_mut(&WayID(881403608)) {\n\n // https://www.openstreetmap.org/way/881403608 is a roundabout that keeps causing gridlock\n\n way.tags.insert(\"highway\", \"construction\");\n", "file_path": "convert_osm/src/extract.rs", "rank": 16, "score": 258026.52754004212 }, { "content": "fn prebake(map: &Map, scenario: Scenario, opts: Option<SimOptions>, timer: &mut Timer) {\n\n timer.start(format!(\n\n \"prebake for {} / {}\",\n\n scenario.map_name.describe(),\n\n scenario.scenario_name\n\n ));\n\n\n\n let opts = opts.unwrap_or_else(|| {\n\n let mut opts = SimOptions::new(\"prebaked\");\n\n opts.alerts = AlertHandler::Silence;\n\n opts\n\n });\n\n let mut sim = Sim::new(&map, opts);\n\n // Bit of an abuse of this, but just need to fix the rng seed.\n\n let mut rng = SimFlags::for_test(\"prebaked\").make_rng();\n\n scenario.instantiate(&mut sim, &map, &mut rng, timer);\n\n sim.timed_step(\n\n &map,\n\n sim.get_end_of_day() - Time::START_OF_DAY,\n\n &mut None,\n", "file_path": "game/src/challenges/prebake.rs", "rank": 17, "score": 256469.8645143385 }, { "content": "/// Starting from one building, calculate the cost to all others. If a destination isn't reachable,\n\n/// it won't be included in the results. Ignore results greater than the time_limit away.\n\n///\n\n/// If the start building is on the shoulder of a road and `!opts.allow_shoulders`, then the\n\n/// results will always be empty.\n\npub fn all_walking_costs_from(\n\n map: &Map,\n\n start: BuildingID,\n\n time_limit: Duration,\n\n opts: WalkingOptions,\n\n) -> HashMap<BuildingID, Duration> {\n\n let start_lane = map.get_l(map.get_b(start).sidewalk_pos.lane());\n\n if start_lane.lane_type == LaneType::Shoulder && !opts.allow_shoulders {\n\n return HashMap::new();\n\n }\n\n\n\n let mut queue: BinaryHeap<Item> = BinaryHeap::new();\n\n queue.push(Item {\n\n cost: Duration::ZERO,\n\n node: WalkingNode::closest(map.get_b(start).sidewalk_pos, map),\n\n });\n\n\n\n let mut cost_per_node: HashMap<WalkingNode, Duration> = HashMap::new();\n\n while let Some(current) = queue.pop() {\n\n if cost_per_node.contains_key(&current.node) {\n", "file_path": "map_model/src/connectivity/walking.rs", "rank": 18, "score": 255566.3236784251 }, { "content": "/// Starting from one building, calculate the cost to all others. If a destination isn't reachable,\n\n/// it won't be included in the results. Ignore results greater than the time_limit away.\n\npub fn all_vehicle_costs_from(\n\n map: &Map,\n\n start: BuildingID,\n\n time_limit: Duration,\n\n constraints: PathConstraints,\n\n) -> HashMap<BuildingID, Duration> {\n\n assert!(constraints != PathConstraints::Pedestrian);\n\n let mut results = HashMap::new();\n\n\n\n // TODO We have a graph of DirectedRoadIDs, but mapping a building to one isn't\n\n // straightforward. In the common case it'll be fine, but some buildings are isolated from the\n\n // graph by some sidewalks.\n\n let mut bldg_to_road = HashMap::new();\n\n for b in map.all_buildings() {\n\n if constraints == PathConstraints::Car {\n\n if let Some((pos, _)) = b.driving_connection(map) {\n\n bldg_to_road.insert(b.id, map.get_l(pos.lane()).get_directed_parent());\n\n }\n\n } else if constraints == PathConstraints::Bike {\n\n if let Some((pos, _)) = b.biking_connection(map) {\n", "file_path": "map_model/src/connectivity/mod.rs", "rank": 19, "score": 255560.48429935137 }, { "content": "pub fn path_raw_map(name: &MapName) -> String {\n\n path(format!(\n\n \"input/{}/{}/raw_maps/{}.bin\",\n\n name.city.country, name.city.city, name.map\n\n ))\n\n}\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 20, "score": 250995.60975151445 }, { "content": "/// Return the cost of a single path, and also a mapping from every directed road to the cost of\n\n/// getting there from the same start. This can be used to understand why an alternative route\n\n/// wasn't chosen.\n\npub fn debug_vehicle_costs(\n\n req: PathRequest,\n\n map: &Map,\n\n) -> Option<(Duration, HashMap<DirectedRoadID, Duration>)> {\n\n // TODO Support this\n\n if req.constraints == PathConstraints::Pedestrian {\n\n return None;\n\n }\n\n\n\n let (_, cost) = crate::pathfind::dijkstra::pathfind(req.clone(), map.routing_params(), map)?;\n\n\n\n let graph = build_graph_for_vehicles(map, req.constraints);\n\n let road_costs = petgraph::algo::dijkstra(\n\n &graph,\n\n map.get_l(req.start.lane()).get_directed_parent(),\n\n None,\n\n |(_, _, mvmnt)| {\n\n vehicle_cost(\n\n mvmnt.from,\n\n *mvmnt,\n\n req.constraints,\n\n map.routing_params(),\n\n map,\n\n ) + zone_cost(*mvmnt, req.constraints, map)\n\n },\n\n );\n\n\n\n Some((cost, road_costs))\n\n}\n", "file_path": "map_model/src/connectivity/mod.rs", "rank": 21, "score": 250281.8352254677 }, { "content": "/// Also returns a list of labeled polygons for debugging.\n\n///\n\n/// Ideally, the resulting polygon should exist entirely within the thick bands around all original\n\n/// roads -- it just carves up part of that space, doesn't reach past it. But that's not always true\n\n/// yet.\n\npub fn intersection_polygon(\n\n i: &Intersection,\n\n roads: &mut BTreeMap<OriginalRoad, Road>,\n\n) -> Result<(Polygon, Vec<(String, Polygon)>)> {\n\n if i.roads.is_empty() {\n\n panic!(\"{} has no roads\", i.id);\n\n }\n\n\n\n // Turn all of the incident roads into two PolyLines (the \"forwards\" and \"backwards\" borders of\n\n // the road, if the roads were oriented to both be incoming to the intersection), both ending\n\n // at the intersection\n\n // TODO Maybe express the two incoming PolyLines as the \"right\" and \"left\"\n\n let mut lines: Vec<(OriginalRoad, Pt2D, PolyLine, PolyLine)> = Vec::new();\n\n // This is guaranteed to get set, since i.roads is non-empty\n\n let mut intersection_center = Pt2D::new(0.0, 0.0);\n\n for id in &i.roads {\n\n let r = &roads[id];\n\n\n\n let pl = if r.src_i == i.id {\n\n r.trimmed_center_pts.reversed()\n", "file_path": "map_model/src/make/initial/geometry.rs", "rank": 22, "score": 250209.49325407043 }, { "content": "/// Transform a sequence of roads representing a path into the current lane-based path, by picking\n\n/// particular lanes and turns to use.\n\npub fn path_v2_to_v1(\n\n req: PathRequest,\n\n road_steps: Vec<DirectedRoadID>,\n\n uber_turns_v2: Vec<UberTurnV2>,\n\n map: &Map,\n\n) -> Result<Path> {\n\n // This is a somewhat brute-force method: run Dijkstra's algorithm on a graph of lanes and\n\n // turns, but only build the graph along the path of roads we've already found. This handles\n\n // arbitrary lookahead needed, and forces use of the original start/end lanes requested.\n\n //\n\n // Eventually we'll directly return road-based paths. Most callers will actually just use that\n\n // directly, and mainly the simulation will need to expand to specific lanes, but it'll do so\n\n // dynamically/lazily to account for current traffic conditions.\n\n let mut graph = petgraph::graphmap::DiGraphMap::new();\n\n for pair in road_steps.windows(2) {\n\n for src in pair[0].lanes(req.constraints, map) {\n\n for dst in pair[1].lanes(req.constraints, map) {\n\n let turn = TurnID {\n\n parent: map.get_l(src).dst_i,\n\n src,\n", "file_path": "map_model/src/pathfind/v2.rs", "rank": 23, "score": 250197.25577988217 }, { "content": "pub fn distribute_residents(map: &mut map_model::Map, timer: &mut Timer) {\n\n for shape in abstio::read_binary::<ExtraShapes>(\n\n \"data/input/de/berlin/planning_areas.bin\".to_string(),\n\n timer,\n\n )\n\n .shapes\n\n {\n\n let pts = map.get_gps_bounds().convert(&shape.points);\n\n if pts\n\n .iter()\n\n .all(|pt| !map.get_boundary_polygon().contains_pt(*pt))\n\n {\n\n continue;\n\n }\n\n let region = Ring::must_new(pts).to_polygon();\n\n // Deterministically seed using the planning area's ID.\n\n let mut rng =\n\n XorShiftRng::seed_from_u64(shape.attributes[\"spatial_name\"].parse::<u64>().unwrap());\n\n\n\n for (home, n) in popdat::distribute_population_to_homes(\n", "file_path": "importer/src/berlin.rs", "rank": 24, "score": 250152.17990445186 }, { "content": "/// Run the contents of a .osm through the full map importer with default options.\n\nfn import_map(path: String) -> Map {\n\n let mut timer = Timer::new(\"convert synthetic map\");\n\n let raw = convert_osm::convert(\n\n convert_osm::Options {\n\n name: MapName::new(\"zz\", \"oneshot\", &abstutil::basename(&path)),\n\n osm_input: path,\n\n clip: None,\n\n map_config: map_model::MapConfig {\n\n driving_side: map_model::DrivingSide::Right,\n\n bikes_can_use_bus_lanes: true,\n\n inferred_sidewalks: true,\n\n street_parking_spot_length: Distance::meters(8.0),\n\n },\n\n onstreet_parking: convert_osm::OnstreetParking::JustOSM,\n\n public_offstreet_parking: convert_osm::PublicOffstreetParking::None,\n\n private_offstreet_parking: convert_osm::PrivateOffstreetParking::FixedPerBldg(0),\n\n include_railroads: true,\n\n extra_buildings: None,\n\n },\n\n &mut timer,\n\n );\n\n let map = Map::create_from_raw(raw, true, true, &mut timer);\n\n map\n\n}\n\n\n", "file_path": "tests/src/main.rs", "rank": 25, "score": 249939.81911922386 }, { "content": "fn load_study_area(map: &Map) -> Result<Polygon> {\n\n let bytes = abstio::slurp_file(abstio::path(format!(\n\n \"system/study_areas/{}.geojson\",\n\n map.get_name().city.city.replace(\"_\", \"-\")\n\n )))?;\n\n let raw_string = std::str::from_utf8(&bytes)?;\n\n let geojson = raw_string.parse::<geojson::GeoJson>()?;\n\n\n\n if let geojson::GeoJson::FeatureCollection(collection) = geojson {\n\n for feature in collection.features {\n\n if let Some(geom) = feature.geometry {\n\n if let geojson::Value::Polygon(raw_pts) = geom.value {\n\n return parse_polygon(raw_pts, map.get_gps_bounds());\n\n }\n\n }\n\n }\n\n }\n\n bail!(\"no study area\");\n\n}\n", "file_path": "importer/src/uk.rs", "rank": 26, "score": 247888.16785310418 }, { "content": "/// Simple second-pass after generating all signals. Find pairs of traffic signals very close to\n\n/// each other with 2 stages each, see if the primary movement of the first stages lead to each\n\n/// other, and flip the order of stages if not. This is often wrong when the most common movement is\n\n/// actually turning left then going straight (near Mercer for example), but not sure how we could\n\n/// know that without demand data.\n\npub fn synchronize(map: &mut Map) {\n\n let mut seen = HashSet::new();\n\n let mut pairs = Vec::new();\n\n let handmapped = traffic_signal_data::load_all_data().unwrap();\n\n for i in map.all_intersections() {\n\n if !i.is_traffic_signal() || seen.contains(&i.id) || handmapped.contains_key(&i.orig_id.0) {\n\n continue;\n\n }\n\n if let Some(list) = IntersectionCluster::autodetect(i.id, map) {\n\n let list = list.into_iter().collect::<Vec<_>>();\n\n if list.len() == 2\n\n && map.get_traffic_signal(list[0]).stages.len() == 2\n\n && map.get_traffic_signal(list[1]).stages.len() == 2\n\n {\n\n pairs.push((list[0], list[1]));\n\n seen.insert(list[0]);\n\n seen.insert(list[1]);\n\n }\n\n }\n\n }\n", "file_path": "map_model/src/make/traffic_signals/mod.rs", "rank": 27, "score": 246826.65498767985 }, { "content": "/// This import from GTFS:\n\n/// - is specific to Seattle, whose files don't seem to match https://developers.google.com/transit/gtfs/reference\n\n/// - is probably wrong\n\npub fn add_gtfs_schedules(map: &mut Map) {\n\n let city = CityName::seattle();\n\n // https://www.openstreetmap.org/relation/8616968 as an example, mapping to\n\n // https://kingcounty.gov/depts/transportation/metro/schedules-maps/route/048.aspx\n\n\n\n let mut trip_marker_to_route: BTreeMap<String, BusRouteID> = BTreeMap::new();\n\n for br in map.all_bus_routes() {\n\n if let Some(ref m) = br.gtfs_trip_marker {\n\n // Dunno what the :0 thing is\n\n trip_marker_to_route.insert(m.split(\":\").next().unwrap().to_string(), br.id);\n\n }\n\n }\n\n\n\n // Each route has a bunch of trips throughout the day\n\n let mut trip_marker_to_trips: MultiMap<String, String> = MultiMap::new();\n\n for rec in\n\n csv::Reader::from_reader(File::open(city.input_path(\"google_transit/trips.txt\")).unwrap())\n\n .deserialize()\n\n {\n\n let rec: TripRecord = rec.unwrap();\n", "file_path": "importer/src/seattle.rs", "rank": 28, "score": 246352.49542127497 }, { "content": "fn make_elevation(ctx: &EventCtx, color: Color, walking: bool, path: &Path, map: &Map) -> Widget {\n\n let mut pts: Vec<(Distance, Distance)> = Vec::new();\n\n let mut dist = Distance::ZERO;\n\n for step in path.get_steps() {\n\n if let PathStep::Turn(t) = step {\n\n pts.push((dist, map.get_i(t.parent).elevation));\n\n }\n\n dist += step.as_traversable().length(map);\n\n }\n\n // TODO Show roughly where we are in the trip; use distance covered by current path for this\n\n LinePlot::new(\n\n ctx,\n\n vec![Series {\n\n label: if walking {\n\n \"Elevation for walking\"\n\n } else {\n\n \"Elevation for biking\"\n\n }\n\n .to_string(),\n\n color,\n\n pts,\n\n }],\n\n PlotOptions::fixed(),\n\n )\n\n}\n\n\n", "file_path": "game/src/info/trip.rs", "rank": 29, "score": 245796.994557516 }, { "content": "pub fn draw_stage_number(\n\n app: &dyn AppLike,\n\n prerender: &Prerender,\n\n i: IntersectionID,\n\n idx: usize,\n\n batch: &mut GeomBatch,\n\n) {\n\n let radius = Distance::meters(1.0);\n\n let center = app.map().get_i(i).polygon.polylabel();\n\n batch.push(\n\n Color::hex(\"#5B5B5B\"),\n\n Circle::new(center, radius).to_polygon(),\n\n );\n\n batch.append(\n\n Text::from(Line(format!(\"{}\", idx + 1)).fg(Color::WHITE))\n\n .render_autocropped(prerender)\n\n .scale(0.075)\n\n .centered_on(center),\n\n );\n\n}\n\n\n", "file_path": "map_gui/src/render/traffic_signal.rs", "rank": 30, "score": 245246.8589101408 }, { "content": "pub fn draw_signal_stage(\n\n prerender: &Prerender,\n\n stage: &Stage,\n\n idx: usize,\n\n i: IntersectionID,\n\n time_left: Option<Duration>,\n\n batch: &mut GeomBatch,\n\n app: &dyn AppLike,\n\n signal_style: TrafficSignalStyle,\n\n) {\n\n let signal = app.map().get_traffic_signal(i);\n\n\n\n match signal_style {\n\n TrafficSignalStyle::BAP => {\n\n let mut dont_walk = BTreeSet::new();\n\n let mut crossed_roads = BTreeSet::new();\n\n for m in signal.movements.keys() {\n\n if m.crosswalk {\n\n dont_walk.insert(m);\n\n // TODO This is incorrect; some crosswalks hop over intermediate roads. How do\n", "file_path": "map_gui/src/render/traffic_signal.rs", "rank": 31, "score": 245246.8589101408 }, { "content": "/// Match OSM buildings to parcels, scraping the number of housing units.\n\n// TODO It's expensive to load the huge zoning_parcels.bin file for every map.\n\npub fn match_parcels_to_buildings(map: &mut Map, shapes: &ExtraShapes, timer: &mut Timer) {\n\n let mut parcels_with_housing: Vec<(Polygon, usize)> = Vec::new();\n\n // TODO We should refactor something like FindClosest, but for polygon containment\n\n // The quadtree's ID is just an index into parcels_with_housing.\n\n let mut quadtree: QuadTree<usize> = QuadTree::default(map.get_bounds().as_bbox());\n\n timer.start_iter(\"index all parcels\", shapes.shapes.len());\n\n for shape in &shapes.shapes {\n\n timer.next();\n\n if let Some(units) = shape\n\n .attributes\n\n .get(\"EXIST_UNITS\")\n\n .and_then(|x| x.parse::<usize>().ok())\n\n {\n\n if let Some(ring) = map\n\n .get_gps_bounds()\n\n .try_convert(&shape.points)\n\n .and_then(|pts| Ring::new(pts).ok())\n\n {\n\n let polygon = ring.to_polygon();\n\n quadtree\n", "file_path": "importer/src/seattle.rs", "rank": 32, "score": 243990.80849942862 }, { "content": "fn floodfill(map: &Map, start: RoadID) -> Zone {\n\n let match_constraints = map.get_r(start).access_restrictions.clone();\n\n let merge_zones = map.get_edits().merge_zones;\n\n let mut queue = vec![start];\n\n let mut members = BTreeSet::new();\n\n let mut borders = BTreeSet::new();\n\n while !queue.is_empty() {\n\n let current = queue.pop().unwrap();\n\n if members.contains(&current) {\n\n continue;\n\n }\n\n members.insert(current);\n\n for r in map.get_next_roads(current) {\n\n let r = map.get_r(r);\n\n if r.access_restrictions == match_constraints && merge_zones {\n\n queue.push(r.id);\n\n } else {\n\n borders.insert(map.get_r(current).common_endpt(r));\n\n }\n\n }\n\n }\n\n assert!(!members.is_empty());\n\n assert!(!borders.is_empty());\n\n Zone {\n\n members,\n\n borders,\n\n restrictions: match_constraints,\n\n }\n\n}\n", "file_path": "map_model/src/objects/zone.rs", "rank": 33, "score": 243290.08227670705 }, { "content": "pub fn unzoomed_agent_radius(vt: Option<VehicleType>) -> Distance {\n\n // Lane thickness is a little hard to see, so double it. Most of the time, the circles don't\n\n // leak out of the road too much.\n\n if vt.is_some() {\n\n 4.0 * NORMAL_LANE_THICKNESS\n\n } else {\n\n 4.0 * SIDEWALK_THICKNESS\n\n }\n\n}\n\n\n", "file_path": "map_gui/src/render/mod.rs", "rank": 34, "score": 242420.86215273026 }, { "content": "pub fn angle_from_arrow_keys(ctx: &EventCtx) -> Option<Angle> {\n\n let mut x: f64 = 0.0;\n\n let mut y: f64 = 0.0;\n\n if ctx.is_key_down(Key::LeftArrow) || ctx.is_key_down(Key::A) {\n\n x -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::RightArrow) || ctx.is_key_down(Key::D) {\n\n x += 1.0;\n\n }\n\n if ctx.is_key_down(Key::UpArrow) || ctx.is_key_down(Key::W) {\n\n y -= 1.0;\n\n }\n\n if ctx.is_key_down(Key::DownArrow) || ctx.is_key_down(Key::S) {\n\n y += 1.0;\n\n }\n\n\n\n if x == 0.0 && y == 0.0 {\n\n return None;\n\n }\n\n Some(Angle::new_rads(y.atan2(x)))\n\n}\n", "file_path": "santa/src/controls.rs", "rank": 35, "score": 242033.1265257657 }, { "content": "pub fn convert(opts: Options, timer: &mut abstutil::Timer) -> RawMap {\n\n let mut map = RawMap::blank(opts.name.clone());\n\n if let Some(ref path) = opts.clip {\n\n let pts = LonLat::read_osmosis_polygon(path).unwrap();\n\n let gps_bounds = GPSBounds::from(pts.clone());\n\n map.boundary_polygon = Ring::must_new(gps_bounds.convert(&pts)).to_polygon();\n\n map.gps_bounds = gps_bounds;\n\n }\n\n\n\n let extract = extract::extract_osm(&mut map, &opts, timer);\n\n let (amenities, pt_to_road) = split_ways::split_up_roads(&mut map, extract, timer);\n\n clip::clip_map(&mut map, timer);\n\n\n\n // Need to do a first pass of removing cul-de-sacs here, or we wind up with loop PolyLines when\n\n // doing the parking hint matching.\n\n abstutil::retain_btreemap(&mut map.roads, |r, _| r.i1 != r.i2);\n\n\n\n let all_routes = map.bus_routes.drain(..).collect::<Vec<_>>();\n\n let mut routes = Vec::new();\n\n for route in all_routes {\n", "file_path": "convert_osm/src/lib.rs", "rank": 36, "score": 241787.3171255681 }, { "content": "pub fn path_all_edits(name: &MapName) -> String {\n\n path(format!(\n\n \"player/edits/{}/{}/{}\",\n\n name.city.country, name.city.city, name.map\n\n ))\n\n}\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 37, "score": 241413.18282118926 }, { "content": "pub fn path_all_scenarios(name: &MapName) -> String {\n\n path(format!(\n\n \"system/{}/{}/scenarios/{}\",\n\n name.city.country, name.city.city, name.map\n\n ))\n\n}\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 38, "score": 241413.18282118926 }, { "content": "pub fn parent_path(path: &str) -> String {\n\n format!(\"{}\", std::path::Path::new(path).parent().unwrap().display())\n\n}\n", "file_path": "abstutil/src/utils.rs", "rank": 39, "score": 240896.42655667395 }, { "content": "fn validate_zones(map: &Map, steps: &Vec<PathStep>, req: &PathRequest) {\n\n let z1 = map.get_parent(req.start.lane()).get_zone(map);\n\n let z2 = map.get_parent(req.end.lane()).get_zone(map);\n\n\n\n for step in steps {\n\n if let PathStep::Turn(t) = step {\n\n if map\n\n .get_parent(t.src)\n\n .access_restrictions\n\n .allow_through_traffic\n\n .contains(req.constraints)\n\n && !map\n\n .get_parent(t.dst)\n\n .access_restrictions\n\n .allow_through_traffic\n\n .contains(req.constraints)\n\n {\n\n // Entering our destination zone is fine\n\n let into_zone = map.get_parent(t.dst).get_zone(map);\n\n if into_zone != z1 && into_zone != z2 {\n", "file_path": "map_model/src/pathfind/v1.rs", "rank": 40, "score": 237764.9934293487 }, { "content": "fn use_parking_hints(map: &mut RawMap, path: String, timer: &mut Timer) {\n\n timer.start(\"apply parking hints\");\n\n let shapes: ExtraShapes = abstio::read_binary(path, timer);\n\n\n\n // Match shapes with the nearest road + direction (true for forwards)\n\n let mut closest: FindClosest<(OriginalRoad, bool)> =\n\n FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, r) in &map.roads {\n\n if r.is_light_rail() || r.is_footway() || r.is_service() {\n\n continue;\n\n }\n\n let center = PolyLine::must_new(r.center_points.clone());\n\n closest.add(\n\n (*id, true),\n\n center.must_shift_right(DIRECTED_ROAD_THICKNESS).points(),\n\n );\n\n closest.add(\n\n (*id, false),\n\n center.must_shift_left(DIRECTED_ROAD_THICKNESS).points(),\n\n );\n", "file_path": "convert_osm/src/parking.rs", "rank": 41, "score": 237156.87832594218 }, { "content": "fn use_offstreet_parking(map: &mut RawMap, path: String, timer: &mut Timer) {\n\n timer.start(\"match offstreet parking points\");\n\n let shapes: ExtraShapes = abstio::read_binary(path, timer);\n\n\n\n let mut closest: FindClosest<osm::OsmID> = FindClosest::new(&map.gps_bounds.to_bounds());\n\n for (id, b) in &map.buildings {\n\n closest.add(*id, b.polygon.points());\n\n }\n\n\n\n // TODO Another function just to use ?. Try blocks would rock.\n\n let mut handle_shape: Box<dyn FnMut(kml::ExtraShape) -> Option<()>> = Box::new(|s| {\n\n assert_eq!(s.points.len(), 1);\n\n let pt = s.points[0].to_pt(&map.gps_bounds);\n\n let (id, _) = closest.closest_pt(pt, Distance::meters(50.0))?;\n\n // TODO Handle parking lots.\n\n if !map.buildings[&id].polygon.contains_pt(pt) {\n\n return None;\n\n }\n\n let name = s.attributes.get(\"DEA_FACILITY_NAME\")?.to_string();\n\n let num_stalls = s.attributes.get(\"DEA_STALLS\")?.parse::<usize>().ok()?;\n", "file_path": "convert_osm/src/parking.rs", "rank": 42, "score": 237156.87832594218 }, { "content": "pub fn path_camera_state(name: &MapName) -> String {\n\n path(format!(\n\n \"player/camera_state/{}/{}/{}.json\",\n\n name.city.country, name.city.city, name.map\n\n ))\n\n}\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 43, "score": 237056.05265370163 }, { "content": "fn draw_unwalkable_roads(ctx: &mut EventCtx, app: &App, opts: &Options) -> Drawable {\n\n let allow_shoulders = match opts {\n\n Options::Walking(ref opts) => opts.allow_shoulders,\n\n Options::Biking => {\n\n return Drawable::empty(ctx);\n\n }\n\n };\n\n\n\n let mut batch = GeomBatch::new();\n\n 'ROADS: for road in app.map.all_roads() {\n\n if road.is_light_rail() {\n\n continue;\n\n }\n\n for (_, _, lt) in road.lanes_ltr() {\n\n if lt == LaneType::Sidewalk || (lt == LaneType::Shoulder && allow_shoulders) {\n\n continue 'ROADS;\n\n }\n\n }\n\n // TODO Skip highways\n\n batch.push(Color::BLUE.alpha(0.5), road.get_thick_polygon(&app.map));\n\n }\n\n ctx.upload(batch)\n\n}\n", "file_path": "fifteen_min/src/viewer.rs", "rank": 44, "score": 234695.56363938164 }, { "content": "// (Epicenter, entire shape)\n\nfn cluster_jams(map: &Map, problems: Vec<(IntersectionID, Time)>) -> Vec<(Polygon, Polygon)> {\n\n let mut jams: Vec<Jam> = Vec::new();\n\n // The delay itself doesn't matter, as long as they're sorted.\n\n for (i, _) in problems {\n\n // Is this connected to an existing problem?\n\n if let Some(ref mut jam) = jams.iter_mut().find(|j| j.adjacent_to(map, i)) {\n\n jam.members.insert(i);\n\n } else {\n\n jams.push(Jam {\n\n epicenter: i,\n\n members: btreeset! { i },\n\n });\n\n }\n\n }\n\n\n\n jams.into_iter()\n\n .map(|jam| {\n\n (\n\n map.get_i(jam.epicenter).polygon.clone(),\n\n Polygon::convex_hull(jam.all_polygons(map)),\n", "file_path": "game/src/layer/traffic.rs", "rank": 45, "score": 234235.57992002412 }, { "content": "/// Exports a single road to Streetmix's format, returns the filename\n\npub fn export(r: RoadID, map: &Map) -> String {\n\n let path = format!(\"streetmix_export_{}.json\", r.0);\n\n let street = road(r, map);\n\n abstio::write_json(path.clone(), &street);\n\n path\n\n}\n\n\n", "file_path": "game/src/debug/streetmix.rs", "rank": 46, "score": 233125.5533390871 }, { "content": "fn calculate_turn_markings(map: &Map, lane: &Lane) -> Vec<Polygon> {\n\n if lane.length() < Distance::meters(7.0) {\n\n return Vec::new();\n\n }\n\n\n\n // Does this lane connect to every other possible outbound lane of the same type, excluding\n\n // U-turns to the same road? If so, then there's nothing unexpected to communicate.\n\n let i = map.get_i(lane.dst_i);\n\n if i.outgoing_lanes.iter().all(|l| {\n\n let l = map.get_l(*l);\n\n l.lane_type != lane.lane_type\n\n || l.parent == lane.parent\n\n || map\n\n .maybe_get_t(TurnID {\n\n parent: i.id,\n\n src: lane.id,\n\n dst: l.id,\n\n })\n\n .is_some()\n\n }) {\n", "file_path": "map_gui/src/render/lane.rs", "rank": 47, "score": 232617.9587049513 }, { "content": "fn calculate_parking_lines(lane: &Lane, map: &Map) -> Vec<Polygon> {\n\n // meters, but the dims get annoying below to remove\n\n let leg_length = Distance::meters(1.0);\n\n\n\n let mut result = Vec::new();\n\n let num_spots = lane.number_parking_spots(map.get_config());\n\n if num_spots > 0 {\n\n for idx in 0..=num_spots {\n\n let (pt, lane_angle) = lane\n\n .lane_center_pts\n\n .must_dist_along(map.get_config().street_parking_spot_length * (1.0 + idx as f64));\n\n let perp_angle = if map.get_config().driving_side == DrivingSide::Right {\n\n lane_angle.rotate_degs(270.0)\n\n } else {\n\n lane_angle.rotate_degs(90.0)\n\n };\n\n // Find the outside of the lane. Actually, shift inside a little bit, since the line\n\n // will have thickness, but shouldn't really intersect the adjacent line\n\n // when drawn.\n\n let t_pt = pt.project_away(lane.width * 0.4, perp_angle);\n", "file_path": "map_gui/src/render/lane.rs", "rank": 48, "score": 232617.9587049513 }, { "content": "pub fn adjust_private_parking(map: &mut Map, scenario: &Scenario) {\n\n for (b, count) in scenario.count_parked_cars_per_bldg().consume() {\n\n map.hack_override_offstreet_spots_individ(b, count);\n\n }\n\n map.save();\n\n}\n\n\n", "file_path": "importer/src/seattle.rs", "rank": 49, "score": 229895.29587238882 }, { "content": "/// Generate all driving and walking turns at an intersection, accounting for OSM turn restrictions.\n\npub fn make_all_turns(map: &Map, i: &Intersection) -> Vec<Turn> {\n\n let mut raw_turns: Vec<Turn> = Vec::new();\n\n raw_turns.extend(make_vehicle_turns(i, map));\n\n raw_turns.extend(crate::make::walking_turns::filter_turns(\n\n crate::make::walking_turns::make_walking_turns(map, i),\n\n map,\n\n i,\n\n ));\n\n let unique_turns = ensure_unique(raw_turns);\n\n\n\n let mut final_turns: Vec<Turn> = Vec::new();\n\n let mut filtered_turns: HashMap<LaneID, Vec<Turn>> = HashMap::new();\n\n for turn in unique_turns {\n\n if !does_turn_pass_restrictions(&turn, i, map) {\n\n continue;\n\n }\n\n\n\n if is_turn_allowed(&turn, map) {\n\n final_turns.push(turn);\n\n } else {\n", "file_path": "map_model/src/make/turns.rs", "rank": 50, "score": 229822.55238638795 }, { "content": "pub fn make_crosswalk(batch: &mut GeomBatch, turn: &Turn, map: &Map, cs: &ColorScheme) {\n\n if make_rainbow_crosswalk(batch, turn, map) {\n\n return;\n\n }\n\n\n\n // This size also looks better for shoulders\n\n let width = SIDEWALK_THICKNESS;\n\n // Start at least width out to not hit sidewalk corners. Also account for the thickness of the\n\n // crosswalk line itself. Center the lines inside these two boundaries.\n\n let boundary = width;\n\n let tile_every = width * 0.6;\n\n let line = {\n\n // The middle line in the crosswalk geometry is the main crossing line.\n\n let pts = turn.geom.points();\n\n if pts.len() < 3 {\n\n println!(\n\n \"Not rendering crosswalk for {}; its geometry was squished earlier\",\n\n turn.id\n\n );\n\n return;\n", "file_path": "map_gui/src/render/intersection.rs", "rank": 51, "score": 228504.44073011997 }, { "content": "fn degenerate(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let roads = map.get_i(i).get_sorted_incoming_roads(map);\n\n if roads.len() != 2 {\n\n return None;\n\n }\n\n let (r1, r2) = (roads[0], roads[1]);\n\n\n\n let mut ts = new(i, map);\n\n make_stages(\n\n &mut ts,\n\n map.config.driving_side,\n\n vec![vec![(vec![r1, r2], TurnType::Straight, PROTECTED)]],\n\n );\n\n Some(ts)\n\n}\n\n\n", "file_path": "map_model/src/make/traffic_signals/mod.rs", "rank": 52, "score": 227278.1902782637 }, { "content": "pub fn draw_occupants(details: &mut Details, app: &App, id: BuildingID, focus: Option<PersonID>) {\n\n // TODO Lots of fun ideas here. Have a deterministic simulation based on building ID and time\n\n // to have people \"realistically\" move around. Draw little floor plans.\n\n\n\n let mut ppl = app.primary.sim.bldg_to_people(id);\n\n let num_rows_cols = (ppl.len() as f64).sqrt().ceil() as usize;\n\n\n\n let ped_len = SIDEWALK_THICKNESS.inner_meters() / 2.0;\n\n let separation = ped_len * 1.5;\n\n\n\n let total_width_height = (num_rows_cols as f64) * (ped_len + separation);\n\n let top_left = app\n\n .primary\n\n .map\n\n .get_b(id)\n\n .label_center\n\n .offset(-total_width_height / 2.0, -total_width_height / 2.0);\n\n\n\n // TODO Current thing is inefficient and can easily wind up outside the building.\n\n\n", "file_path": "game/src/info/building.rs", "rank": 53, "score": 227250.9165431084 }, { "content": "pub fn path<I: AsRef<str>>(p: I) -> String {\n\n let p = p.as_ref();\n\n if p.starts_with(\"player/\") {\n\n format!(\"{}/{}\", *ROOT_PLAYER_DIR, p)\n\n } else {\n\n format!(\"{}/{}\", *ROOT_DIR, p)\n\n }\n\n}\n\n\n\n/// A single city is identified using this.\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct CityName {\n\n /// A two letter lowercase country code, from https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2.\n\n /// To represent imaginary/test cities, use the code `zz`.\n\n pub country: String,\n\n /// The name of the city, in filename-friendly form -- for example, \"tel_aviv\".\n\n pub city: String,\n\n}\n\n\n\nimpl CityName {\n", "file_path": "abstio/src/abst_paths.rs", "rank": 54, "score": 226521.4906780455 }, { "content": "fn half_signal(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n if map.get_i(i).roads.len() != 2 {\n\n return None;\n\n }\n\n\n\n let mut ts = new(i, map);\n\n let mut vehicle_stage = Stage::new();\n\n let mut ped_stage = Stage::new();\n\n for (id, movement) in &ts.movements {\n\n if id.crosswalk {\n\n ped_stage.edit_movement(movement, TurnPriority::Protected);\n\n } else {\n\n vehicle_stage.edit_movement(movement, TurnPriority::Protected);\n\n }\n\n }\n\n vehicle_stage.stage_type = StageType::Fixed(Duration::minutes(1));\n\n ped_stage.stage_type = StageType::Fixed(Duration::seconds(10.0));\n\n\n\n ts.stages = vec![vehicle_stage, ped_stage];\n\n Some(ts)\n\n}\n\n\n", "file_path": "map_model/src/make/traffic_signals/mod.rs", "rank": 55, "score": 224799.74396705895 }, { "content": "fn three_way(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let roads = map.get_i(i).get_sorted_incoming_roads(map);\n\n if roads.len() != 3 {\n\n return None;\n\n }\n\n let mut ts = new(i, map);\n\n\n\n // Picture a T intersection. Use turn angles to figure out the \"main\" two roads.\n\n let straight = ts\n\n .movements\n\n .values()\n\n .find(|g| g.turn_type == TurnType::Straight)?;\n\n let (north, south) = (straight.id.from.id, straight.id.to.id);\n\n let east = roads\n\n .into_iter()\n\n .find(|r| *r != north && *r != south)\n\n .unwrap();\n\n\n\n // Two-stage with no protected lefts, right turn on red, turning cars yield to peds\n\n make_stages(\n", "file_path": "map_model/src/make/traffic_signals/mod.rs", "rank": 56, "score": 224799.74396705895 }, { "content": "// TODO This assumes the lanes change direction only at one point. A two-way cycletrack right at\n\n// the border will look a bit off.\n\nfn calculate_border_arrows(i: &Intersection, r: &Road, map: &Map) -> Vec<Polygon> {\n\n let mut result = Vec::new();\n\n\n\n let mut width_fwd = Distance::ZERO;\n\n let mut width_back = Distance::ZERO;\n\n for (l, dir, _) in r.lanes_ltr() {\n\n if dir == Direction::Fwd {\n\n width_fwd += map.get_l(l).width;\n\n } else {\n\n width_back += map.get_l(l).width;\n\n }\n\n }\n\n let center = r.get_dir_change_pl(map);\n\n\n\n // These arrows should point from the void to the road\n\n if !i.outgoing_lanes.is_empty() {\n\n let (line, width) = if r.dst_i == i.id {\n\n (\n\n center.last_line().shift_left(width_back / 2.0).reverse(),\n\n width_back,\n", "file_path": "map_gui/src/render/intersection.rs", "rank": 57, "score": 224404.6524147736 }, { "content": "/// Generate Crosswalk and SharedSidewalkCorner (places where two sidewalks directly meet) turns\n\npub fn make_walking_turns(map: &Map, i: &Intersection) -> Vec<Turn> {\n\n if i.is_footway(map) {\n\n return make_footway_turns(map, i);\n\n }\n\n\n\n let driving_side = map.config.driving_side;\n\n let all_roads = map.all_roads();\n\n let lanes = map.all_lanes();\n\n\n\n let roads: Vec<&Road> = i\n\n .get_roads_sorted_by_incoming_angle(all_roads)\n\n .into_iter()\n\n .map(|id| &all_roads[id.0])\n\n .collect();\n\n let mut result: Vec<Turn> = Vec::new();\n\n\n\n // I'm a bit confused when to do -1 and +1 honestly, but this works in practice. Angle sorting\n\n // may be a little backwards.\n\n let idx_offset = if driving_side == DrivingSide::Right {\n\n -1\n", "file_path": "map_model/src/make/walking_turns.rs", "rank": 58, "score": 224315.99005702144 }, { "content": "// Transforms all zones into the map's coordinate space, no matter how far out-of-bounds they are.\n\nfn parse_zones(gps_bounds: &GPSBounds, path: String) -> Result<HashMap<String, Polygon>> {\n\n let mut zones = HashMap::new();\n\n\n\n let bytes = abstio::slurp_file(path)?;\n\n let raw_string = std::str::from_utf8(&bytes)?;\n\n let geojson = raw_string.parse::<geojson::GeoJson>()?;\n\n\n\n if let geojson::GeoJson::FeatureCollection(collection) = geojson {\n\n for feature in collection.features {\n\n let zone = feature\n\n .property(\"geo_code\")\n\n .and_then(|x| x.as_str())\n\n .ok_or_else(|| anyhow!(\"no geo_code\"))?\n\n .to_string();\n\n if let Some(geom) = feature.geometry {\n\n if let geojson::Value::MultiPolygon(mut raw_polygons) = geom.value {\n\n if raw_polygons.len() != 1 {\n\n // We'll just one of them arbitrarily\n\n warn!(\n\n \"Zone {} has a multipolygon with {} members\",\n", "file_path": "importer/src/uk.rs", "rank": 59, "score": 223298.75830567756 }, { "content": "/// When the PermanentMapEdits format changes, add a transformation here to automatically convert\n\n/// edits written with the old format.\n\n///\n\n/// This problem is often solved with something like protocol buffers, but the resulting proto\n\n/// usually winds up with permanent legacy fields, unless the changes are purely additive. For\n\n/// example, protobufs wouldn't have helped with the fix_intersection_ids problem. Explicit\n\n/// transformation is easier!\n\npub fn upgrade(mut value: Value, map: &Map) -> Result<PermanentMapEdits> {\n\n // c46a74f10f4f1976a48aa8642ac11717d74b262c added an explicit version field. There are a few\n\n // changes before that.\n\n if value.get(\"version\").is_none() {\n\n // I don't remember the previous schema change before this. If someone files a bug and has\n\n // an older file, can add support for it then.\n\n fix_offset(&mut value);\n\n fix_intersection_ids(&mut value);\n\n\n\n value\n\n .as_object_mut()\n\n .unwrap()\n\n .insert(\"version\".to_string(), Value::Number(0.into()));\n\n }\n\n if value[\"version\"] == Value::Number(0.into()) {\n\n fix_road_direction(&mut value);\n\n value\n\n .as_object_mut()\n\n .unwrap()\n\n .insert(\"version\".to_string(), Value::Number(1.into()));\n", "file_path": "map_model/src/edits/compat.rs", "rank": 60, "score": 222597.67700528004 }, { "content": "fn make_controls(ctx: &mut EventCtx, app: &App, opts: &Options, legend: Option<Widget>) -> Panel {\n\n let (total_ppl, ppl_in_bldg, ppl_off_map) = app.primary.sim.num_ppl();\n\n\n\n let mut col = vec![\n\n header(\n\n ctx,\n\n &format!(\"Population: {}\", prettyprint_usize(total_ppl)),\n\n ),\n\n Widget::row(vec![\n\n Widget::row(vec![\n\n Image::from_path(\"system/assets/tools/home.svg\").into_widget(ctx),\n\n Line(prettyprint_usize(ppl_in_bldg))\n\n .small()\n\n .into_widget(ctx),\n\n ]),\n\n Line(format!(\"Off-map: {}\", prettyprint_usize(ppl_off_map)))\n\n .small()\n\n .into_widget(ctx),\n\n ])\n\n .centered(),\n", "file_path": "game/src/layer/population.rs", "rank": 61, "score": 222468.13278242399 }, { "content": "pub fn path_edits(name: &MapName, edits_name: &str) -> String {\n\n path(format!(\n\n \"player/edits/{}/{}/{}/{}.json\",\n\n name.city.country, name.city.city, name.map, edits_name\n\n ))\n\n}\n", "file_path": "abstio/src/abst_paths.rs", "rank": 62, "score": 222456.53110126755 }, { "content": "pub fn path_scenario(name: &MapName, scenario_name: &str) -> String {\n\n // TODO Getting complicated. Sometimes we're trying to load, so we should look for .bin, then\n\n // .json. But when we're writing a custom scenario, we actually want to write a .bin.\n\n let bin = path(format!(\n\n \"system/{}/{}/scenarios/{}/{}.bin\",\n\n name.city.country, name.city.city, name.map, scenario_name\n\n ));\n\n let json = path(format!(\n\n \"system/{}/{}/scenarios/{}/{}.json\",\n\n name.city.country, name.city.city, name.map, scenario_name\n\n ));\n\n if file_exists(&bin) {\n\n return bin;\n\n }\n\n if file_exists(&json) {\n\n return json;\n\n }\n\n bin\n\n}\n", "file_path": "abstio/src/abst_paths.rs", "rank": 63, "score": 222456.53110126755 }, { "content": "// TODO Need to filter out extraneous crosswalks. Why weren't they being created before?\n\npub fn _make_walking_turns_v2(map: &Map, i: &Intersection) -> Vec<Turn> {\n\n let driving_side = map.config.driving_side;\n\n let all_roads = map.all_roads();\n\n let all_lanes = map.all_lanes();\n\n\n\n // Consider all roads in counter-clockwise order. Every road has up to two sidewalks. Gather\n\n // those in order, remembering what roads don't have them.\n\n let mut lanes: Vec<Option<&Lane>> = Vec::new();\n\n let mut num_sidewalks = 0;\n\n for r in i.get_roads_sorted_by_incoming_angle(all_roads) {\n\n let r = &all_roads[r.0];\n\n let mut fwd = None;\n\n let mut back = None;\n\n for (l, dir, lt) in r.lanes_ltr() {\n\n if lt == LaneType::Sidewalk || lt == LaneType::Shoulder {\n\n if dir == Direction::Fwd {\n\n fwd = Some(&all_lanes[l.0]);\n\n } else {\n\n back = Some(&all_lanes[l.0]);\n\n }\n", "file_path": "map_model/src/make/walking_turns.rs", "rank": 64, "score": 221735.82011838458 }, { "content": "fn create_zones(map: &Map, input: HashMap<String, Polygon>) -> HashMap<String, Zone> {\n\n let all_borders = MapBorders::new(map);\n\n let mut zones = HashMap::new();\n\n for (name, polygon) in input {\n\n let mut overlapping_area = 0.0;\n\n for p in polygon.intersection(map.get_boundary_polygon()) {\n\n overlapping_area += p.area();\n\n }\n\n // Sometimes this is slightly over 100%, because funky things happen with the polygon\n\n // intersection.\n\n let pct_overlap = (overlapping_area / polygon.area()).min(1.0);\n\n\n\n // If the zone doesn't intersect our map at all, totally skip it.\n\n if pct_overlap == 0.0 {\n\n continue;\n\n }\n\n zones.insert(\n\n name,\n\n Zone {\n\n polygon,\n", "file_path": "popdat/src/od.rs", "rank": 65, "score": 220254.339903143 }, { "content": "fn four_way_two_stage(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let roads = map.get_i(i).get_sorted_incoming_roads(map);\n\n if roads.len() != 4 {\n\n return None;\n\n }\n\n\n\n // Just to refer to these easily, label with directions. Imagine an axis-aligned four-way.\n\n let (north, west, south, east) = (roads[0], roads[1], roads[2], roads[3]);\n\n\n\n // Two-stage with no protected lefts, right turn on red, turning cars yielding to peds\n\n let mut ts = new(i, map);\n\n make_stages(\n\n &mut ts,\n\n map.config.driving_side,\n\n vec![\n\n vec![\n\n (vec![north, south], TurnType::Straight, PROTECTED),\n\n (vec![north, south], TurnType::Right, YIELD),\n\n (vec![north, south], TurnType::Left, YIELD),\n\n (vec![east, west], TurnType::Right, YIELD),\n", "file_path": "map_model/src/make/traffic_signals/mod.rs", "rank": 66, "score": 220133.37014851085 }, { "content": "fn make_signal(map: &Map, id: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let mut ts = new(id, map);\n\n if let Some(other) = three_way_three_stage(map, id) {\n\n ts.stages = other.stages;\n\n } else if let Some(other) = four_way_four_stage(map, id) {\n\n ts.stages = other.stages;\n\n }\n\n ts.convert_to_ped_scramble_without_promotion();\n\n // We don't always get a valid traffic signal from the default 3-way and 4-way. When we don't\n\n // we need to try assembling stages with a more complex algorithm.\n\n if ts.validate().is_err() {\n\n if let Some(other) = multi_way_stages(map, id) {\n\n ts.stages = other.stages;\n\n ts.convert_to_ped_scramble_without_promotion();\n\n }\n\n }\n\n if let Err(err) = ts.validate() {\n\n // when all else fails, use stage per road and all-walk stage at the end\n\n debug!(\"multi-way validation_error={} ts={:#?}\", err, ts);\n\n ts = stage_per_road(map, id);\n\n ts.convert_to_ped_scramble();\n\n }\n\n return Some(ts);\n\n}\n\n\n", "file_path": "map_model/src/make/traffic_signals/lagging_green.rs", "rank": 67, "score": 220133.37014851085 }, { "content": "fn four_way_four_stage(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let roads = map.get_i(i).get_sorted_incoming_roads(map);\n\n if roads.len() != 4 {\n\n return None;\n\n }\n\n\n\n // Just to refer to these easily, label with directions. Imagine an axis-aligned four-way.\n\n let (north, west, south, east) = (roads[0], roads[1], roads[2], roads[3]);\n\n\n\n // Four-stage with protected lefts, right turn on red (except for the protected lefts),\n\n // turning cars yield to peds\n\n let mut ts = new(i, map);\n\n make_stages(\n\n &mut ts,\n\n map.config.driving_side,\n\n vec![\n\n vec![\n\n (vec![north, south], TurnType::Straight, PROTECTED),\n\n (vec![north, south], TurnType::Right, YIELD),\n\n (vec![east, west], TurnType::Right, YIELD),\n", "file_path": "map_model/src/make/traffic_signals/mod.rs", "rank": 68, "score": 220133.37014851085 }, { "content": "pub fn path_popdat() -> String {\n\n path(\"input/us/seattle/popdat.bin\")\n\n}\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 69, "score": 219655.62713873136 }, { "content": "pub fn deserialize_nodemap<\n\n 'de,\n\n D: Deserializer<'de>,\n\n T: Deserialize<'de> + Copy + Ord + Debug + Serialize,\n\n>(\n\n d: D,\n\n) -> Result<NodeMap<T>, D::Error> {\n\n let inner = <InnerNodeMap<T>>::deserialize(d)?;\n\n let id_to_node = inner.id_to_node;\n\n let mut node_to_id = BTreeMap::new();\n\n for (id, node) in id_to_node.iter().enumerate() {\n\n node_to_id.insert(*node, id);\n\n }\n\n\n\n Ok(NodeMap {\n\n node_to_id,\n\n id_to_node,\n\n })\n\n}\n", "file_path": "map_model/src/pathfind/node_map.rs", "rank": 70, "score": 219018.90272094196 }, { "content": "pub fn path_prebaked_results(name: &MapName, scenario_name: &str) -> String {\n\n path(format!(\n\n \"system/{}/{}/prebaked_results/{}/{}.bin\",\n\n name.city.country, name.city.city, name.map, scenario_name\n\n ))\n\n}\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 71, "score": 218685.64583849508 }, { "content": "/// Converts a RawMap to a Map.\n\npub fn raw_to_map(\n\n name: &MapName,\n\n build_ch: bool,\n\n keep_bldg_tags: bool,\n\n timer: &mut Timer,\n\n) -> map_model::Map {\n\n timer.start(format!(\"Raw->Map for {}\", name.describe()));\n\n let raw: map_model::raw::RawMap = abstio::read_binary(abstio::path_raw_map(name), timer);\n\n let map = map_model::Map::create_from_raw(raw, build_ch, keep_bldg_tags, timer);\n\n timer.start(\"save map\");\n\n map.save();\n\n timer.stop(\"save map\");\n\n timer.stop(format!(\"Raw->Map for {}\", name.describe()));\n\n\n\n // TODO Just sticking this here for now\n\n if name.map == \"huge_seattle\" || name == &MapName::new(\"gb\", \"leeds\", \"huge\") {\n\n timer.start(\"generating city manifest\");\n\n abstio::write_binary(\n\n abstio::path(format!(\n\n \"system/{}/{}/city.bin\",\n", "file_path": "importer/src/utils.rs", "rank": 72, "score": 218093.81259018416 }, { "content": "/// Build stages. First find roads that are straight across, they are either one-way or two-way.\n\n/// For one-way, add any right or left turns, thus completing the stage. For two-way, two\n\n/// stages will be added. The first stage has protected straight, and right and yield left.\n\n/// The second stage has protected left. Lastly, sometimes oncomming left turns can't both\n\n/// be protected, if this occurs the 2nd stage will have one direction protected and the\n\n/// other yeild and a 3rd, inverse, stage will be added which has the other direction's left\n\n/// protected and other yield. Finally, any turns which weren't assigned, because there\n\n/// are no straights or there are more than just pairs of straight intersections, are assigned a\n\n/// stage. These, too are handled as pairs until one remains, which is handled as a one-way.\n\nfn multi_way_stages(map: &Map, id: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let mut ts = new(id, map);\n\n let (mut right, mut left, straight, mut roads) = movements(&ts);\n\n let (one_way, two_way) = straight_types(&straight);\n\n for m in &one_way {\n\n let mut stage = Stage::new();\n\n stage.protected_movements.insert(m.clone());\n\n for t in turns(&m.from.id, &right) {\n\n stage.protected_movements.insert(t.clone());\n\n }\n\n for t in turns(&m.from.id, &left) {\n\n stage.protected_movements.insert(t.clone());\n\n }\n\n add_stage(&mut ts, stage);\n\n roads.remove(&m.from.id);\n\n }\n\n for (m1, m2) in &two_way {\n\n let mut stage1 = Stage::new();\n\n let mut stage2 = Stage::new();\n\n // Insert the straight movements, followed by the right and then the left.\n", "file_path": "map_model/src/make/traffic_signals/lagging_green.rs", "rank": 73, "score": 217933.71008068626 }, { "content": "fn four_way_four_stage(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let roads = map.get_i(i).get_sorted_incoming_roads(map);\n\n if roads.len() != 4 {\n\n return None;\n\n }\n\n\n\n // Just to refer to these easily, label with directions. Imagine an axis-aligned four-way.\n\n let (north, west, south, east) = (roads[0], roads[1], roads[2], roads[3]);\n\n\n\n // Four-stage with protected lefts, right turn on red (except for the protected lefts),\n\n // turning cars yield to peds\n\n let mut ts = new(i, map);\n\n make_stages(\n\n &mut ts,\n\n map.config.driving_side,\n\n vec![\n\n vec![\n\n (vec![north, south], TurnType::Straight, PROTECTED),\n\n (vec![north, south], TurnType::Left, YIELD),\n\n (vec![north, south], TurnType::Right, PROTECTED),\n", "file_path": "map_model/src/make/traffic_signals/lagging_green.rs", "rank": 74, "score": 217933.71008068626 }, { "content": "fn three_way_three_stage(map: &Map, i: IntersectionID) -> Option<ControlTrafficSignal> {\n\n let roads = map.get_i(i).get_sorted_incoming_roads(map);\n\n if roads.len() != 3 {\n\n return None;\n\n }\n\n let mut ts = new(i, map);\n\n\n\n // Picture a T intersection. Use turn angles to figure out the \"main\" two roads.\n\n let straight = ts\n\n .movements\n\n .values()\n\n .find(|g| g.turn_type == TurnType::Straight)?;\n\n let (north, south) = (straight.id.from.id, straight.id.to.id);\n\n let east = roads\n\n .into_iter()\n\n .find(|r| *r != north && *r != south)\n\n .unwrap();\n\n\n\n // Three-stage with protected lefts, right turn on red\n\n make_stages(\n", "file_path": "map_model/src/make/traffic_signals/lagging_green.rs", "rank": 75, "score": 217933.71008068626 }, { "content": "// TODO This needs to update turn restrictions too\n\npub fn clip_map(map: &mut RawMap, timer: &mut Timer) {\n\n timer.start(\"clipping map to boundary\");\n\n\n\n // So we can use retain_btreemap without borrowing issues\n\n let boundary_polygon = map.boundary_polygon.clone();\n\n let boundary_ring = Ring::must_new(boundary_polygon.points().clone());\n\n\n\n // This is kind of indirect and slow, but first pass -- just remove roads that start or end\n\n // outside the boundary polygon.\n\n retain_btreemap(&mut map.roads, |_, r| {\n\n let first_in = boundary_polygon.contains_pt(r.center_points[0]);\n\n let last_in = boundary_polygon.contains_pt(*r.center_points.last().unwrap());\n\n let light_rail_ok = if r.is_light_rail() {\n\n // Make sure it's in the boundary somewhere\n\n r.center_points\n\n .iter()\n\n .any(|pt| boundary_polygon.contains_pt(*pt))\n\n } else {\n\n false\n\n };\n", "file_path": "convert_osm/src/clip.rs", "rank": 76, "score": 215460.2756555022 }, { "content": "/// Attempt to snap separately mapped cycleways to main roads. Emit extra KML files to debug later.\n\npub fn snap_cycleways(map: &RawMap, timer: &mut Timer) {\n\n // TODO The output here is nondeterministic and I haven't figured out why. Instead of spurious\n\n // data diffs, just totally disable this experiment for now. Will fix when this becomes active\n\n // work again.\n\n if true {\n\n return;\n\n }\n\n\n\n let mut cycleways = BTreeMap::new();\n\n for shape in\n\n abstio::read_binary::<ExtraShapes>(map.name.city.input_path(\"footways.bin\"), timer).shapes\n\n {\n\n // Just cycleways for now. This same general strategy should later work for sidewalks,\n\n // tramways, and blockface parking too.\n\n if shape.attributes.get(\"highway\") == Some(&\"cycleway\".to_string()) {\n\n cycleways.insert(\n\n WayID(shape.attributes[osm::OSM_WAY_ID].parse().unwrap()),\n\n shape,\n\n );\n\n }\n", "file_path": "convert_osm/src/snappy.rs", "rank": 77, "score": 215297.71524937204 }, { "content": "pub fn find_next_file(orig: String) -> Option<String> {\n\n let files = list_dir(parent_path(&orig));\n\n files.into_iter().find(|f| *f > orig)\n\n}\n\n\n", "file_path": "abstio/src/io.rs", "rank": 78, "score": 214256.2635925822 }, { "content": "/// Keeps file extensions\n\npub fn find_prev_file(orig: String) -> Option<String> {\n\n let mut files = list_dir(parent_path(&orig));\n\n files.reverse();\n\n files.into_iter().find(|f| *f < orig)\n\n}\n\n\n", "file_path": "abstio/src/io.rs", "rank": 79, "score": 214256.2635925822 }, { "content": "/// Returns full paths\n\npub fn list_dir(path: String) -> Vec<String> {\n\n let mut files: Vec<String> = Vec::new();\n\n match std::fs::read_dir(&path) {\n\n Ok(iter) => {\n\n for entry in iter {\n\n files.push(entry.unwrap().path().to_str().unwrap().to_string());\n\n }\n\n }\n\n Err(ref e) if e.kind() == std::io::ErrorKind::NotFound => {}\n\n Err(e) => panic!(\"Couldn't read_dir {:?}: {}\", path, e),\n\n };\n\n files.sort();\n\n files\n\n}\n\n\n", "file_path": "abstio/src/io_native.rs", "rank": 80, "score": 214231.17874223823 }, { "content": "fn add_extra_buildings(map: &mut RawMap, path: &str) -> Result<()> {\n\n // TODO Refactor code that just extracts polygons from geojson.\n\n let mut polygons = Vec::new();\n\n\n\n let bytes = abstio::slurp_file(path)?;\n\n let raw_string = std::str::from_utf8(&bytes)?;\n\n let geojson = raw_string.parse::<geojson::GeoJson>()?;\n\n\n\n if let geojson::GeoJson::FeatureCollection(collection) = geojson {\n\n for feature in collection.features {\n\n if let Some(geom) = feature.geometry {\n\n if let geojson::Value::Polygon(raw_pts) = geom.value {\n\n // TODO Handle holes, and also, refactor this!\n\n let gps_pts: Vec<LonLat> = raw_pts[0]\n\n .iter()\n\n .map(|pt| LonLat::new(pt[0], pt[1]))\n\n .collect();\n\n if let Some(pts) = map.gps_bounds.try_convert(&gps_pts) {\n\n if let Ok(ring) = Ring::new(pts) {\n\n polygons.push(ring.to_polygon());\n", "file_path": "convert_osm/src/lib.rs", "rank": 81, "score": 213643.00892982172 }, { "content": "pub fn delete_file<I: AsRef<str>>(path: I) {\n\n // TODO\n\n warn!(\"Not deleting {}\", path.as_ref());\n\n}\n", "file_path": "abstio/src/io_web.rs", "rank": 82, "score": 213025.84037511263 }, { "content": "/// Idempotent\n\npub fn delete_file<I: AsRef<str>>(path: I) {\n\n let path = path.as_ref();\n\n if std::fs::remove_file(path).is_ok() {\n\n println!(\"Deleted {}\", path);\n\n } else {\n\n println!(\"{} doesn't exist, so not deleting it\", path);\n\n }\n\n}\n\n\n\n// TODO I'd like to get rid of this and just use Timer.read_file, but external libraries consume\n\n// the reader. :\\\n\npub struct FileWithProgress {\n\n inner: BufReader<File>,\n\n\n\n path: String,\n\n processed_bytes: usize,\n\n total_bytes: usize,\n\n started_at: Instant,\n\n last_printed_at: Instant,\n\n}\n", "file_path": "abstio/src/io_native.rs", "rank": 83, "score": 213025.84037511263 }, { "content": "pub fn basename<I: AsRef<str>>(path: I) -> String {\n\n std::path::Path::new(path.as_ref())\n\n .file_stem()\n\n .unwrap()\n\n .to_os_string()\n\n .into_string()\n\n .unwrap()\n\n}\n\n\n", "file_path": "abstutil/src/utils.rs", "rank": 84, "score": 212522.01555464056 }, { "content": "pub fn prettyprint_usize(x: usize) -> String {\n\n let num = format!(\"{}\", x);\n\n let mut result = String::new();\n\n let mut i = num.len();\n\n for c in num.chars() {\n\n result.push(c);\n\n i -= 1;\n\n if i > 0 && i % 3 == 0 {\n\n result.push(',');\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "abstutil/src/utils.rs", "rank": 85, "score": 211445.03094337016 }, { "content": "/// Starting from some number of total people living in a polygonal area, randomly distribute them\n\n/// to residential buildings within that area. Returns a list of homes with the number of residents\n\n/// in each.\n\npub fn distribute_population_to_homes(\n\n polygon: geo::Polygon<f64>,\n\n population: usize,\n\n map: &Map,\n\n rng: &mut XorShiftRng,\n\n) -> Vec<(BuildingID, usize)> {\n\n let map_boundary = geo::Polygon::from(map.get_boundary_polygon().clone());\n\n let bldgs: Vec<map_model::BuildingID> = map\n\n .all_buildings()\n\n .into_iter()\n\n .filter(|b| {\n\n polygon.contains(&geo::Point::from(b.label_center)) && b.bldg_type.has_residents()\n\n })\n\n .map(|b| b.id)\n\n .collect();\n\n\n\n // If the area is partly out-of-bounds, then scale down the number of residents linearly\n\n // based on area of the overlapping part of the polygon.\n\n use geo_booleanop::boolean::BooleanOp;\n\n let pct_overlap = polygon.intersection(&map_boundary).unsigned_area() / polygon.unsigned_area();\n", "file_path": "popdat/src/distribute_people.rs", "rank": 86, "score": 211011.60771349963 }, { "content": "// Could be caused by closing intersections\n\npub fn check_sidewalk_connectivity(\n\n ctx: &mut EventCtx,\n\n app: &mut App,\n\n cmd: EditCmd,\n\n) -> Option<Box<dyn State<App>>> {\n\n let orig_edits = app.primary.map.get_edits().clone();\n\n let (_, disconnected_before) =\n\n connectivity::find_scc(&app.primary.map, PathConstraints::Pedestrian);\n\n\n\n let mut edits = orig_edits.clone();\n\n edits.commands.push(cmd);\n\n app.primary.map.try_apply_edits(edits);\n\n\n\n let (_, disconnected_after) =\n\n connectivity::find_scc(&app.primary.map, PathConstraints::Pedestrian);\n\n app.primary.map.must_apply_edits(orig_edits);\n\n\n\n let newly_disconnected = disconnected_after\n\n .difference(&disconnected_before)\n\n .collect::<Vec<_>>();\n", "file_path": "game/src/edit/validate.rs", "rank": 87, "score": 210996.58927791816 }, { "content": "/// Finalize importing of buildings, mostly by matching them to the nearest sidewalk.\n\npub fn make_all_buildings(\n\n input: &BTreeMap<osm::OsmID, RawBuilding>,\n\n map: &Map,\n\n keep_bldg_tags: bool,\n\n timer: &mut Timer,\n\n) -> Vec<Building> {\n\n timer.start(\"convert buildings\");\n\n let mut center_per_bldg: BTreeMap<osm::OsmID, HashablePt2D> = BTreeMap::new();\n\n let mut query: HashSet<HashablePt2D> = HashSet::new();\n\n timer.start_iter(\"get building center points\", input.len());\n\n for (id, b) in input {\n\n timer.next();\n\n let center = b.polygon.center().to_hashable();\n\n center_per_bldg.insert(*id, center);\n\n query.insert(center);\n\n }\n\n\n\n // equiv_pos could be a little closer, so use two buffers\n\n let sidewalk_buffer = Distance::meters(7.5);\n\n let sidewalk_pts = match_points_to_lanes(\n", "file_path": "map_model/src/make/buildings.rs", "rank": 88, "score": 209431.89156064924 }, { "content": "/// This returns the pathfinding cost of crossing one road and turn. This is also expressed in\n\n/// units of time. It factors in the ideal time to cross the space, along with penalties for\n\n/// entering an access-restricted zone, taking an unprotected turn, and so on.\n\npub fn vehicle_cost(\n\n dr: DirectedRoadID,\n\n mvmnt: MovementID,\n\n constraints: PathConstraints,\n\n params: &RoutingParams,\n\n map: &Map,\n\n) -> Duration {\n\n // TODO Creating the consolidated polyline sometimes fails. It's rare, so just workaround\n\n // temporarily by pretending the turn is 1m long.\n\n let (mvmnt_length, mvmnt_turn_type) = mvmnt\n\n .get(map)\n\n .map(|m| (m.geom.length(), m.turn_type))\n\n .unwrap_or((Distance::meters(1.0), TurnType::Straight));\n\n let max_speed = match constraints {\n\n PathConstraints::Car | PathConstraints::Bus | PathConstraints::Train => None,\n\n PathConstraints::Bike => Some(crate::MAX_BIKE_SPEED),\n\n PathConstraints::Pedestrian => unreachable!(),\n\n };\n\n let t1 = map.get_r(dr.id).center_pts.length()\n\n / Traversable::max_speed_along_road(dr, max_speed, constraints, map);\n", "file_path": "map_model/src/pathfind/vehicles.rs", "rank": 89, "score": 209425.93572295824 }, { "content": "// Returns a legend\n\npub fn make_heatmap(\n\n ctx: &mut EventCtx,\n\n batch: &mut GeomBatch,\n\n bounds: &Bounds,\n\n pts: Vec<Pt2D>,\n\n opts: &HeatmapOptions,\n\n) -> Widget {\n\n // 7 colors, 8 labels\n\n let num_colors = 7;\n\n let gradient = match opts.color_scheme.as_ref() {\n\n \"Turbo\" => colorous::TURBO,\n\n \"Inferno\" => colorous::INFERNO,\n\n \"Warm\" => colorous::WARM,\n\n \"Cool\" => colorous::COOL,\n\n \"Oranges\" => colorous::ORANGES,\n\n \"Spectral\" => colorous::SPECTRAL,\n\n _ => unreachable!(),\n\n };\n\n let colors: Vec<Color> = (0..num_colors)\n\n .map(|i| {\n", "file_path": "map_gui/src/tools/heatmap.rs", "rank": 90, "score": 209425.93572295824 }, { "content": "/// Describe all public transit routes and keep under version control to spot diffs easily.\n\nfn dump_route_goldenfile(map: &map_model::Map) -> Result<()> {\n\n let path = abstio::path(format!(\n\n \"route_goldenfiles/{}.txt\",\n\n map.get_name().as_filename()\n\n ));\n\n let mut f = File::create(path)?;\n\n for br in map.all_bus_routes() {\n\n writeln!(\n\n f,\n\n \"{} from {} to {:?}\",\n\n br.osm_rel_id, br.start, br.end_border\n\n )?;\n\n for bs in &br.stops {\n\n let bs = map.get_bs(*bs);\n\n writeln!(\n\n f,\n\n \" {}: {} driving, {} sidewalk\",\n\n bs.name, bs.driving_pos, bs.sidewalk_pos\n\n )?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/src/main.rs", "rank": 91, "score": 209307.28641081753 }, { "content": "fn validate_restrictions(map: &Map, steps: &Vec<PathStep>) {\n\n for triple in steps.windows(5) {\n\n if let (PathStep::Lane(l1), PathStep::Lane(l2), PathStep::Lane(l3)) =\n\n (triple[0], triple[2], triple[4])\n\n {\n\n let from = map.get_parent(l1);\n\n let via = map.get_l(l2).parent;\n\n let to = map.get_l(l3).parent;\n\n\n\n for (dont_via, dont_to) in &from.complicated_turn_restrictions {\n\n if via == *dont_via && to == *dont_to {\n\n panic!(\n\n \"Some path does illegal uber-turn: {} -> {} -> {}\",\n\n l1, l2, l3\n\n );\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "map_model/src/pathfind/v1.rs", "rank": 92, "score": 208646.8602802612 }, { "content": "fn validate_continuity(map: &Map, steps: &Vec<PathStep>) {\n\n if steps.is_empty() {\n\n panic!(\"Empty path\");\n\n }\n\n for pair in steps.windows(2) {\n\n let from = match pair[0] {\n\n PathStep::Lane(id) => map.get_l(id).last_pt(),\n\n PathStep::ContraflowLane(id) => map.get_l(id).first_pt(),\n\n PathStep::Turn(id) => map.get_t(id).geom.last_pt(),\n\n };\n\n let to = match pair[1] {\n\n PathStep::Lane(id) => map.get_l(id).first_pt(),\n\n PathStep::ContraflowLane(id) => map.get_l(id).last_pt(),\n\n PathStep::Turn(id) => map.get_t(id).geom.first_pt(),\n\n };\n\n let len = from.dist_to(to);\n\n if len > EPSILON_DIST {\n\n println!(\"All steps in invalid path:\");\n\n for s in steps {\n\n match s {\n", "file_path": "map_model/src/pathfind/v1.rs", "rank": 93, "score": 208646.8602802612 }, { "content": "fn make_bike_lane_scenario(map: &Map) -> ScenarioGenerator {\n\n let mut s = ScenarioGenerator::empty(\"car vs bike contention\");\n\n s.border_spawn_over_time.push(BorderSpawnOverTime {\n\n num_peds: 0,\n\n num_cars: 10,\n\n num_bikes: 10,\n\n percent_use_transit: 0.0,\n\n start_time: Time::START_OF_DAY,\n\n stop_time: Time::START_OF_DAY + Duration::seconds(10.0),\n\n start_from_border: map.find_i_by_osm_id(osm::NodeID(3005680098)).unwrap(),\n\n goal: Some(TripEndpoint::Bldg(\n\n map.find_b_by_osm_id(bldg(217699501)).unwrap(),\n\n )),\n\n });\n\n s\n\n}\n\n\n", "file_path": "game/src/sandbox/gameplay/tutorial.rs", "rank": 94, "score": 208502.51720099265 }, { "content": "/// Some roads might be totally disconnected from the largest clump because of how the map's\n\n/// bounding polygon was drawn, or bad map data, or which roads are filtered from OSM. Remove them.\n\npub fn remove_disconnected_roads(map: &mut RawMap, timer: &mut Timer) {\n\n timer.start(\"removing disconnected roads\");\n\n // This is a simple floodfill, not Tarjan's. Assumes all roads bidirectional.\n\n // All the usizes are indices into the original list of roads\n\n\n\n let mut next_roads: MultiMap<osm::NodeID, OriginalRoad> = MultiMap::new();\n\n for id in map.roads.keys() {\n\n next_roads.insert(id.i1, *id);\n\n next_roads.insert(id.i2, *id);\n\n }\n\n\n\n let mut partitions: Vec<Vec<OriginalRoad>> = Vec::new();\n\n let mut unvisited_roads: BTreeSet<OriginalRoad> = map\n\n .roads\n\n .iter()\n\n .filter_map(|(id, r)| if r.is_light_rail() { None } else { Some(*id) })\n\n .collect();\n\n\n\n while !unvisited_roads.is_empty() {\n\n let mut queue_roads: Vec<OriginalRoad> = vec![*unvisited_roads.iter().next().unwrap()];\n", "file_path": "map_model/src/make/remove_disconnected.rs", "rank": 95, "score": 207724.1699086582 }, { "content": "/// Merge tiny \"roads\" that're actually just part of a complicated intersection. Returns all\n\n/// surviving intersections adjacent to one of these merged roads.\n\npub fn merge_short_roads(map: &mut RawMap) -> BTreeSet<NodeID> {\n\n let mut merged = BTreeSet::new();\n\n\n\n let mut queue: VecDeque<OriginalRoad> = VecDeque::new();\n\n for r in map.roads.keys() {\n\n queue.push_back(*r);\n\n\n\n // TODO Remove after improving this heuristic.\n\n if false && connects_dual_carriageway(map, r) {\n\n debug!(\"{} connects dual carriageways\", r);\n\n }\n\n }\n\n\n\n while !queue.is_empty() {\n\n let id = queue.pop_front().unwrap();\n\n\n\n // The road might've been deleted\n\n if !map.roads.contains_key(&id) {\n\n continue;\n\n }\n", "file_path": "map_model/src/make/merge_intersections.rs", "rank": 96, "score": 207310.23754765777 }, { "content": "pub fn path_all_saves(name: &MapName, edits_name: &str, run_name: &str) -> String {\n\n path(format!(\n\n \"player/saves/{}/{}/{}/{}_{}\",\n\n name.city.country, name.city.city, name.map, edits_name, run_name\n\n ))\n\n}\n\n\n\n// Input data (For developers to build maps, not needed at runtime)\n\n\n", "file_path": "abstio/src/abst_paths.rs", "rank": 97, "score": 206406.1063114886 }, { "content": "pub fn pathfind_avoiding_roads(\n\n req: PathRequest,\n\n avoid: BTreeSet<RoadID>,\n\n map: &Map,\n\n) -> Option<(Path, Duration)> {\n\n assert_eq!(req.constraints, PathConstraints::Car);\n\n let mut graph = DiGraphMap::new();\n\n for dr in map.all_directed_roads_for(req.constraints) {\n\n if avoid.contains(&dr.id) {\n\n continue;\n\n }\n\n for mvmnt in map.get_movements_for(dr, req.constraints) {\n\n graph.add_edge(mvmnt.from, mvmnt.to, mvmnt);\n\n }\n\n }\n\n\n\n calc_path(graph, req, map.routing_params(), map)\n\n}\n\n\n", "file_path": "map_model/src/pathfind/dijkstra.rs", "rank": 98, "score": 205473.2276849893 }, { "content": "pub fn build_graph_for_vehicles(\n\n map: &Map,\n\n constraints: PathConstraints,\n\n) -> DiGraphMap<DirectedRoadID, MovementID> {\n\n let mut graph = DiGraphMap::new();\n\n for dr in map.all_directed_roads_for(constraints) {\n\n for mvmnt in map.get_movements_for(dr, constraints) {\n\n graph.add_edge(mvmnt.from, mvmnt.to, mvmnt);\n\n }\n\n }\n\n graph\n\n}\n\n\n", "file_path": "map_model/src/pathfind/dijkstra.rs", "rank": 99, "score": 205473.2276849893 } ]
Rust
src/objects/mod.rs
hinton-lang/hinton
796ae395ce45240676875b7ddeddb9b5e54016b2
use crate::built_in::{NativeBoundMethod, NativeFn}; use crate::core::chunk::Chunk; use crate::objects::class_obj::*; use crate::objects::dictionary_obj::*; use crate::objects::iter_obj::IterObject; use std::cell::RefCell; use std::fmt; use std::fmt::Formatter; use std::rc::Rc; pub mod class_obj; pub mod dictionary_obj; pub mod indexing; pub mod iter_obj; mod native_operations; #[derive(Clone)] pub struct RangeObject { pub min: i64, pub max: i64, } #[derive(Clone)] pub struct FuncObject { pub defaults: Vec<Object>, pub min_arity: u8, pub max_arity: u8, pub chunk: Chunk, pub name: String, pub up_val_count: usize, } impl Default for FuncObject { fn default() -> Self { Self { defaults: vec![], min_arity: 0, max_arity: 0, chunk: Chunk::new(), name: String::from(""), up_val_count: 0, } } } impl fmt::Display for FuncObject { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { if self.name == "fn" { write!(f, "<Func '<lambda>' at {:p}>", &*self as *const _) } else { write!(f, "<Func '{}' at {:p}>", &self.name, &*self as *const _) } } } impl FuncObject { pub fn bound_method(f: Rc<RefCell<FuncObject>>, i: Rc<RefCell<InstanceObject>>) -> Object { Object::BoundMethod(BoundMethod { receiver: i, method: ClosureObject { function: f, up_values: vec![], }, }) } } #[derive(Clone)] pub struct NativeFuncObj { pub name: String, pub min_arity: u8, pub max_arity: u8, pub body: NativeFn, } impl fmt::Display for NativeFuncObj { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { write!(f, "<Func '{}' at {:p}>", self.name, &self.body as *const _) } } #[derive(Clone)] pub struct NativeMethodObj { pub class_name: String, pub method_name: String, pub value: Box<Object>, pub min_arity: u8, pub max_arity: u8, pub body: NativeBoundMethod, } impl fmt::Display for NativeMethodObj { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { write!( f, "<Method '{}.{}' at {:p}>", self.class_name, self.method_name, &self.body as *const _ ) } } #[derive(Clone)] pub struct ClosureObject { pub function: Rc<RefCell<FuncObject>>, pub up_values: Vec<Rc<RefCell<UpValRef>>>, } impl fmt::Display for ClosureObject { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { write!(f, "{}", self.function.borrow()) } } impl ClosureObject { pub fn into_bound_method(self, c: Rc<RefCell<InstanceObject>>) -> Object { Object::BoundMethod(BoundMethod { receiver: c, method: self, }) } } #[derive(Clone)] pub enum UpValRef { Open(usize), Closed(Object), } impl UpValRef { pub fn is_open_at(&self, index: usize) -> bool { match self { UpValRef::Closed(_) => false, UpValRef::Open(i) => *i == index, } } } #[derive(Clone)] pub enum Object { Array(Rc<RefCell<Vec<Object>>>), Bool(bool), BoundMethod(BoundMethod), BoundNativeMethod(NativeMethodObj), Class(Rc<RefCell<ClassObject>>), Closure(ClosureObject), Dict(DictObject), Float(f64), Function(Rc<RefCell<FuncObject>>), Instance(Rc<RefCell<InstanceObject>>), Int(i64), Iter(Rc<RefCell<IterObject>>), Native(Box<NativeFuncObj>), Null, Range(RangeObject), String(String), Tuple(Rc<Vec<Object>>), } impl From<NativeFuncObj> for Object { fn from(o: NativeFuncObj) -> Self { Object::Native(Box::new(o)) } } impl From<NativeMethodObj> for Object { fn from(o: NativeMethodObj) -> Self { Object::BoundNativeMethod(o) } } impl From<FuncObject> for Object { fn from(o: FuncObject) -> Self { Object::Function(Rc::new(RefCell::new(o))) } } impl From<ClassObject> for Object { fn from(o: ClassObject) -> Self { Object::Class(Rc::new(RefCell::new(o))) } } impl From<InstanceObject> for Object { fn from(o: InstanceObject) -> Self { Object::Instance(Rc::new(RefCell::new(o))) } } impl From<String> for Object { fn from(o: String) -> Self { Object::String(o) } } impl From<&str> for Object { fn from(o: &str) -> Self { Object::String(o.to_string()) } } impl From<usize> for Object { fn from(o: usize) -> Self { Object::Int(o as i64) } } pub fn obj_vectors_equal(v1: &[Object], v2: &[Object]) -> bool { if v1.len() != v2.len() { false } else { for (i, o) in v1.iter().enumerate() { if o != &v2[i] { return false; } } true } } impl Object { pub fn type_name(&self) -> String { return match self { Self::Array(_) => String::from("Array"), Self::Bool(_) => String::from("Bool"), Self::Dict(_) => String::from("Dict"), Self::Float(_) => String::from("Float"), Self::Function(_) | Self::Native(_) | Self::Closure(_) | Self::BoundMethod(_) | Self::BoundNativeMethod(_) => String::from("Function"), Self::Int(_) => String::from("Int"), Self::Iter(_) => String::from("Iter"), Self::Null => String::from("Null"), Self::Range(_) => String::from("Range"), Self::String(_) => String::from("String"), Self::Tuple(_) => String::from("Tuple"), Self::Class(c) => c.borrow().name.clone(), Self::Instance(i) => i.borrow().class.borrow().name.clone(), }; } pub fn is_int(&self) -> bool { matches!(self, Object::Int(_)) } pub fn is_float(&self) -> bool { matches!(self, Object::Float(_)) } pub fn is_bool(&self) -> bool { matches!(self, Object::Bool(_)) } pub fn is_falsey(&self) -> bool { match self { Self::Null => true, Self::Bool(val) => !val, Self::Int(x) if *x == 0i64 => true, Self::Float(x) if *x == 0f64 => true, _ => false, } } pub fn as_int(&self) -> Option<i64> { match self { Object::Int(v) => Some(*v), Object::Bool(b) => { if *b { Some(1i64) } else { Some(0i64) } } _ => None, } } pub fn as_float(&self) -> Option<f64> { match self { Object::Float(v) => Some(*v), _ => None, } } pub fn as_bool(&self) -> Option<bool> { match self { Object::Bool(v) => Some(*v), _ => None, } } #[cfg(feature = "show_bytecode")] pub fn as_function(&self) -> Option<&Rc<RefCell<FuncObject>>> { match self { Object::Function(v) => Some(v), _ => None, } } } impl<'a> fmt::Display for Object { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Object::Int(ref inner) => write!(f, "\x1b[38;5;81m{}\x1b[0m", inner), Object::Instance(ref inner) => write!(f, "{}", inner.borrow()), Object::Native(ref inner) => write!(f, "{}", inner), Object::String(ref inner) => write!(f, "{}", inner), Object::Bool(inner) => write!(f, "\x1b[38;5;3m{}\x1b[0m", if inner { "true" } else { "false" }), Object::Iter(ref inner) => write!(f, "{}", inner.borrow()), Object::Function(ref inner) => write!(f, "{}", inner.borrow()), Object::Closure(ref inner) => write!(f, "{}", inner), Object::BoundMethod(ref inner) => write!(f, "{}", inner), Object::BoundNativeMethod(ref inner) => write!(f, "{}", inner), Object::Null => f.write_str("\x1b[37;1mnull\x1b[0m"), Object::Dict(ref inner) => write!(f, "{}", inner), Object::Float(ref inner) => { let fractional = if inner.fract() == 0.0 { ".0" } else { "" }; write!(f, "\x1b[38;5;81m{}{}\x1b[0m", inner, fractional) } Object::Range(ref inner) => { write!( f, "[\x1b[38;5;81m{}\x1b[0m..\x1b[38;5;81m{}\x1b[0m]", inner.min, inner.max ) } Object::Class(ref inner) => { let prt_str = format!("{:p}", &*inner.borrow() as *const _); fmt::Display::fmt(&format!("<Class '{}' at {}>", inner.borrow().name, prt_str), f) } Object::Array(ref inner) => { let arr = &inner.borrow(); let mut arr_str = String::from("["); for (idx, obj) in arr.iter().enumerate() { if idx == arr.len() - 1 { arr_str += &(format!("{}", obj))[..] } else { arr_str += &(format!("{}, ", obj))[..]; } } arr_str += "]"; write!(f, "{}", arr_str) } Object::Tuple(ref inner) => { let mut arr_str = String::from("("); for (idx, obj) in inner.iter().enumerate() { if idx == inner.len() - 1 { arr_str += &(format!("{}", obj))[..] } else { arr_str += &(format!("{}, ", obj))[..]; } } arr_str += ")"; write!(f, "{}", arr_str) } } } }
use crate::built_in::{NativeBoundMethod, NativeFn}; use crate::core::chunk::Chunk; use crate::objects::class_obj::*; use crate::objects::dictionary_obj::*; use crate::objects::iter_obj::IterObject; use std::cell::RefCell; use std::fmt; use std::fmt::Formatter; use std::rc::Rc; pub mod class_obj; pub mod dictionary_obj; pub mod indexing; pub mod iter_obj; mod native_operations; #[derive(Clone)] pub struct RangeObject { pub min: i64, pub max: i64, } #[derive(Clone)] pub struct FuncObject { pub defaults: Vec<Object>, pub min_arity: u8, pub max_arity: u8, pub chunk: Chunk, pub name: String, pub up_val_count: usize, } impl Default for FuncObject { fn default() -> Self { Self { defaults: vec![], min_arity: 0, max_arity: 0, chunk: Chunk::new(), name: String::from(""), up_val_count: 0, } } } impl fmt::Display for FuncObject { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { if self.name == "fn" { write!(f, "<Func '<lambda>' at {:p}>", &*self as *const _) } else { write!(f, "<Func '{}' at {:p}>", &self.name, &*self as *const _) } } } impl FuncObject { pub fn bound_method(f: Rc<RefCell<FuncObject>>, i: Rc<RefCell<InstanceObject>>) -> Object { Object::BoundMethod(BoundMethod { receiver: i, method: ClosureObject { function: f, up_values: vec![], }, }) } } #[derive(Clone)] pub struct NativeFuncObj { pub name: String, pub min_arity: u8, pub max_arity: u8, pub body: NativeFn, } impl fmt::Display for NativeFuncObj { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { write!(f, "<Func '{}' at {:p}>", self.name, &self.body as *const _) } } #[derive(Clone)] pub struct NativeMethodObj { pub class_name: String, pub method_name: String, pub value: Box<Object>, pub min_arity: u8, pub max_arity: u8, pub body: NativeBoundMethod, } impl fmt::Display for NativeMethodObj { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { write!( f, "<Method '{}.{}' at {:p}>", self.class_name, self.method_name, &self.body as *const _ ) } } #[derive(Clone)] pub struct ClosureObject { pub function: Rc<RefCell<FuncObject>>, pub up_values: Vec<Rc<RefCell<UpValRef>>>, } impl fmt::Display for ClosureObject { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), fmt::Error> { write!(f, "{}", self.function.borrow()) } } impl ClosureObject {
} #[derive(Clone)] pub enum UpValRef { Open(usize), Closed(Object), } impl UpValRef { pub fn is_open_at(&self, index: usize) -> bool { match self { UpValRef::Closed(_) => false, UpValRef::Open(i) => *i == index, } } } #[derive(Clone)] pub enum Object { Array(Rc<RefCell<Vec<Object>>>), Bool(bool), BoundMethod(BoundMethod), BoundNativeMethod(NativeMethodObj), Class(Rc<RefCell<ClassObject>>), Closure(ClosureObject), Dict(DictObject), Float(f64), Function(Rc<RefCell<FuncObject>>), Instance(Rc<RefCell<InstanceObject>>), Int(i64), Iter(Rc<RefCell<IterObject>>), Native(Box<NativeFuncObj>), Null, Range(RangeObject), String(String), Tuple(Rc<Vec<Object>>), } impl From<NativeFuncObj> for Object { fn from(o: NativeFuncObj) -> Self { Object::Native(Box::new(o)) } } impl From<NativeMethodObj> for Object { fn from(o: NativeMethodObj) -> Self { Object::BoundNativeMethod(o) } } impl From<FuncObject> for Object { fn from(o: FuncObject) -> Self { Object::Function(Rc::new(RefCell::new(o))) } } impl From<ClassObject> for Object { fn from(o: ClassObject) -> Self { Object::Class(Rc::new(RefCell::new(o))) } } impl From<InstanceObject> for Object { fn from(o: InstanceObject) -> Self { Object::Instance(Rc::new(RefCell::new(o))) } } impl From<String> for Object { fn from(o: String) -> Self { Object::String(o) } } impl From<&str> for Object { fn from(o: &str) -> Self { Object::String(o.to_string()) } } impl From<usize> for Object { fn from(o: usize) -> Self { Object::Int(o as i64) } } pub fn obj_vectors_equal(v1: &[Object], v2: &[Object]) -> bool { if v1.len() != v2.len() { false } else { for (i, o) in v1.iter().enumerate() { if o != &v2[i] { return false; } } true } } impl Object { pub fn type_name(&self) -> String { return match self { Self::Array(_) => String::from("Array"), Self::Bool(_) => String::from("Bool"), Self::Dict(_) => String::from("Dict"), Self::Float(_) => String::from("Float"), Self::Function(_) | Self::Native(_) | Self::Closure(_) | Self::BoundMethod(_) | Self::BoundNativeMethod(_) => String::from("Function"), Self::Int(_) => String::from("Int"), Self::Iter(_) => String::from("Iter"), Self::Null => String::from("Null"), Self::Range(_) => String::from("Range"), Self::String(_) => String::from("String"), Self::Tuple(_) => String::from("Tuple"), Self::Class(c) => c.borrow().name.clone(), Self::Instance(i) => i.borrow().class.borrow().name.clone(), }; } pub fn is_int(&self) -> bool { matches!(self, Object::Int(_)) } pub fn is_float(&self) -> bool { matches!(self, Object::Float(_)) } pub fn is_bool(&self) -> bool { matches!(self, Object::Bool(_)) } pub fn is_falsey(&self) -> bool { match self { Self::Null => true, Self::Bool(val) => !val, Self::Int(x) if *x == 0i64 => true, Self::Float(x) if *x == 0f64 => true, _ => false, } } pub fn as_int(&self) -> Option<i64> { match self { Object::Int(v) => Some(*v), Object::Bool(b) => { if *b { Some(1i64) } else { Some(0i64) } } _ => None, } } pub fn as_float(&self) -> Option<f64> { match self { Object::Float(v) => Some(*v), _ => None, } } pub fn as_bool(&self) -> Option<bool> { match self { Object::Bool(v) => Some(*v), _ => None, } } #[cfg(feature = "show_bytecode")] pub fn as_function(&self) -> Option<&Rc<RefCell<FuncObject>>> { match self { Object::Function(v) => Some(v), _ => None, } } } impl<'a> fmt::Display for Object { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { match *self { Object::Int(ref inner) => write!(f, "\x1b[38;5;81m{}\x1b[0m", inner), Object::Instance(ref inner) => write!(f, "{}", inner.borrow()), Object::Native(ref inner) => write!(f, "{}", inner), Object::String(ref inner) => write!(f, "{}", inner), Object::Bool(inner) => write!(f, "\x1b[38;5;3m{}\x1b[0m", if inner { "true" } else { "false" }), Object::Iter(ref inner) => write!(f, "{}", inner.borrow()), Object::Function(ref inner) => write!(f, "{}", inner.borrow()), Object::Closure(ref inner) => write!(f, "{}", inner), Object::BoundMethod(ref inner) => write!(f, "{}", inner), Object::BoundNativeMethod(ref inner) => write!(f, "{}", inner), Object::Null => f.write_str("\x1b[37;1mnull\x1b[0m"), Object::Dict(ref inner) => write!(f, "{}", inner), Object::Float(ref inner) => { let fractional = if inner.fract() == 0.0 { ".0" } else { "" }; write!(f, "\x1b[38;5;81m{}{}\x1b[0m", inner, fractional) } Object::Range(ref inner) => { write!( f, "[\x1b[38;5;81m{}\x1b[0m..\x1b[38;5;81m{}\x1b[0m]", inner.min, inner.max ) } Object::Class(ref inner) => { let prt_str = format!("{:p}", &*inner.borrow() as *const _); fmt::Display::fmt(&format!("<Class '{}' at {}>", inner.borrow().name, prt_str), f) } Object::Array(ref inner) => { let arr = &inner.borrow(); let mut arr_str = String::from("["); for (idx, obj) in arr.iter().enumerate() { if idx == arr.len() - 1 { arr_str += &(format!("{}", obj))[..] } else { arr_str += &(format!("{}, ", obj))[..]; } } arr_str += "]"; write!(f, "{}", arr_str) } Object::Tuple(ref inner) => { let mut arr_str = String::from("("); for (idx, obj) in inner.iter().enumerate() { if idx == inner.len() - 1 { arr_str += &(format!("{}", obj))[..] } else { arr_str += &(format!("{}, ", obj))[..]; } } arr_str += ")"; write!(f, "{}", arr_str) } } } }
pub fn into_bound_method(self, c: Rc<RefCell<InstanceObject>>) -> Object { Object::BoundMethod(BoundMethod { receiver: c, method: self, }) }
function_block-full_function
[ { "content": "#[cfg(feature = \"show_bytecode\")]\n\npub fn disassemble_func_scope(chunk: &Chunk, natives: &[String], primitives: &[String], name: &str) {\n\n // prints this chunk's name\n\n println!(\"==== {} ====\", name);\n\n\n\n let mut current_line = 0;\n\n\n\n let mut idx = 0;\n\n while idx < chunk.len() {\n\n let code = chunk.get_byte(idx);\n\n let line_info = chunk.get_line_info(idx);\n\n\n\n // Prints a line number or a vertical bar indicating that the\n\n // current instruction is in the same line as the previous one.\n\n if line_info.0 != current_line {\n\n print!(\"{:>05}\\t\", line_info.0);\n\n current_line = line_info.0;\n\n } else {\n\n print!(\" |\\t\")\n\n }\n\n\n", "file_path": "src/core/bytecode.rs", "rank": 0, "score": 264353.06332231953 }, { "content": "/// Gets the length of a Hinton string.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn len(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::from(verify_string_object!(this, \"len\").len()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/string.rs", "rank": 1, "score": 241079.48090958377 }, { "content": "/// Creates a copy of a string and lowercase it.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn to_lower(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::from(\n\n verify_string_object!(this, \"to_lower\").to_lowercase(),\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/string.rs", "rank": 2, "score": 241079.48090958374 }, { "content": "/// Converts a Hinton integer into a Hinton string.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn to_string(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::from(format!(\"{}\", verify_int_object!(this, \"to_string\"))))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 3, "score": 241079.3467798797 }, { "content": "/// Takes an i64 integer and converts it into an object index. This allows indexing objects with\n\n/// negative integers.\n\n///\n\n/// # Parameters\n\n/// - `x`: The positive or negative index.\n\n/// - `len`: The length of the object.\n\n///\n\n/// # Returns\n\n/// - `Option<usize>`: Return Some(usize) if the index is within the bounds of the object's length\n\n/// or `None` otherwise.\n\npub fn to_bounded_index(x: i64, len: usize) -> Option<usize> {\n\n if x >= 0 && (x as usize) < len {\n\n Some(x as usize)\n\n } else if x < 0 && (i64::abs(x) as usize <= len) {\n\n Some(len - i64::abs(x) as usize)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/objects/indexing.rs", "rank": 4, "score": 236941.6648821286 }, { "content": "/// Get the ith character in a Hinton string.\n\n///\n\n/// # Parameters\n\n/// - `str`: A reference to the underlying `String` in a Hinton string.\n\n/// - `index`: A Hinton object that will serve as the index of the string. For example, this object\n\n/// could be a Hinton integer, or a Hinton range.\n\n///\n\n/// # Returns\n\n/// - `Result<Object, ObjectOprErrType>`: Returns `Ok(Object)` with a Hinton Object if the index is\n\n/// within bounds. Returns `Err(ObjectOprErrType)` if there was an error while indexing the string.\n\nfn subscript_string(str: &str, index: &Object) -> Result<Object, ObjectOprErrType> {\n\n match index {\n\n // Indexing type: String[Int]\n\n Object::Int(idx) => {\n\n let chars: Vec<char> = str.chars().collect();\n\n\n\n if let Some(pos) = to_bounded_index(*idx, chars.len()) {\n\n if let Some(val) = chars.get(pos) {\n\n return Ok(Object::from(val.to_string()));\n\n }\n\n }\n\n }\n\n // Indexing type: String[Bool]\n\n Object::Bool(val) => {\n\n let chars: Vec<char> = str.chars().collect();\n\n let pos = (if *val { 1 } else { 0 }) as usize;\n\n\n\n if let Some(val) = chars.get(pos) {\n\n return Ok(Object::from(val.to_string()));\n\n }\n", "file_path": "src/objects/indexing.rs", "rank": 5, "score": 223169.87398436735 }, { "content": "/// Gets the index of the first occurrence of an object in this Hinton array.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn index_of(vm: &mut VM, this: Object, args: Vec<Object>) -> RuntimeResult {\n\n let obj = args[0].clone();\n\n\n\n match verify_array_object!(this, \"index_of\")\n\n .borrow_mut()\n\n .iter()\n\n .position(|x| x == &obj)\n\n {\n\n Some(i) => vm.push_stack(Object::Int(i as i64)),\n\n None => vm.push_stack(Object::Null),\n\n }\n\n}\n\n\n", "file_path": "src/built_in/primitives/array.rs", "rank": 6, "score": 221002.62947397554 }, { "content": "/// Gets the index of the first occurrence of an object in this Hinton tuple.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The tuple object.\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn index_of(vm: &mut VM, this: Object, args: Vec<Object>) -> RuntimeResult {\n\n let obj = args[0].clone();\n\n\n\n match verify_tuple_object!(this, \"index_of\")\n\n .iter()\n\n .position(|x| x == &obj)\n\n {\n\n Some(i) => vm.push_stack(Object::Int(i as i64)),\n\n None => vm.push_stack(Object::Null),\n\n }\n\n}\n\n\n", "file_path": "src/built_in/primitives/tuple.rs", "rank": 7, "score": 221002.62947397554 }, { "content": "/// Checks if a string starts with another string.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn starts_with(vm: &mut VM, this: Object, args: Vec<Object>) -> RuntimeResult {\n\n match args[0].clone() {\n\n Object::String(s) => vm.push_stack(Object::Bool(\n\n verify_string_object!(this, \"ends_with\").starts_with(&s),\n\n )),\n\n other => RuntimeResult::Error {\n\n error: RuntimeErrorType::TypeError,\n\n message: format!(\n\n \"Expected argument of type 'String'. Got '{}' instead.\",\n\n other.type_name()\n\n ),\n\n },\n\n }\n\n}\n", "file_path": "src/built_in/primitives/string.rs", "rank": 8, "score": 220981.46212144502 }, { "content": "/// Checks if a string ends with another string.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn ends_with(vm: &mut VM, this: Object, args: Vec<Object>) -> RuntimeResult {\n\n match args[0].clone() {\n\n Object::String(s) => vm.push_stack(Object::Bool(\n\n verify_string_object!(this, \"ends_with\").ends_with(&s),\n\n )),\n\n other => RuntimeResult::Error {\n\n error: RuntimeErrorType::TypeError,\n\n message: format!(\n\n \"Expected argument of type 'String'. Got '{}' instead.\",\n\n other.type_name()\n\n ),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/built_in/primitives/string.rs", "rank": 9, "score": 220981.46212144505 }, { "content": "/// Get the ith object in a Hinton range.\n\n///\n\n/// # Parameters\n\n/// - `range`: A reference to the underlying `RangeObject` in a Hinton range.\n\n/// - `index`: A Hinton object that will serve as the index of the range. For example, this object\n\n/// could be a Hinton integer, or a Hinton range.\n\n///\n\n/// # Returns\n\n/// - `Result<Object, ObjectOprErrType>`: Returns `Ok(Object)` with a Hinton Object if the index is\n\n/// within bounds. Returns `Err(ObjectOprErrType)` if there was an error while indexing the range.\n\nfn subscript_range(range: &RangeObject, index: &Object) -> Result<Object, ObjectOprErrType> {\n\n match index {\n\n // Indexing type: Range[Int]\n\n Object::Int(idx) => {\n\n let min = range.min;\n\n let max = range.max;\n\n\n\n if let Some(pos) = to_bounded_index(*idx, i64::abs(max - min) as usize) {\n\n return if max - min > 0 {\n\n Ok(Object::Int(min + pos as i64))\n\n } else {\n\n Ok(Object::Int(min - pos as i64))\n\n };\n\n }\n\n }\n\n // Indexing type: Range[Bool]\n\n Object::Bool(val) => {\n\n let idx = (if *val { 1 } else { 0 }) as i64;\n\n let min = range.min;\n\n let max = range.max;\n", "file_path": "src/objects/indexing.rs", "rank": 10, "score": 217143.93785658473 }, { "content": "/// Computes the absolute value of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn abs(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(verify_int_object!(this, \"abs\").abs()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 11, "score": 214536.47019984032 }, { "content": "/// Pops an object from this Hinton array.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn pop(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n match verify_array_object!(this, \"pop\").borrow_mut().pop() {\n\n Some(o) => vm.push_stack(o),\n\n None => RuntimeResult::Error {\n\n error: RuntimeErrorType::IndexError,\n\n message: \"Attempted to pop from an empty array.\".to_string(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/built_in/primitives/array.rs", "rank": 12, "score": 214533.32927502712 }, { "content": "/// Gets the length of a Hinton tuple.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The tuple object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn len(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::from(verify_tuple_object!(this, \"len\").len()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/tuple.rs", "rank": 13, "score": 214532.937212069 }, { "content": "/// Gets the length of a Hinton array.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn len(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::from(verify_array_object!(this, \"len\").borrow().len()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/array.rs", "rank": 14, "score": 214532.93721206902 }, { "content": "/// Converts this Hinton tuple into a Hinton array.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The tuple object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn to_array(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n let tup = &*verify_tuple_object!(this, \"to_tuple\");\n\n vm.push_stack(Object::Array(Rc::new(RefCell::new(tup.clone()))))\n\n}\n", "file_path": "src/built_in/primitives/tuple.rs", "rank": 15, "score": 214532.8392298751 }, { "content": "/// Converts this Hinton array into a Hinton tuple.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn to_tuple(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n let arr = verify_array_object!(this, \"to_tuple\");\n\n let tuple = arr.borrow_mut().clone();\n\n vm.push_stack(Object::Tuple(Rc::new(tuple)))\n\n}\n", "file_path": "src/built_in/primitives/array.rs", "rank": 16, "score": 214532.83922987507 }, { "content": "/// Counts the number of ones in the binary representation of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn count_ones(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(\n\n verify_int_object!(this, \"count_ones\").count_ones() as i64\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 17, "score": 210701.37634213865 }, { "content": "/// Counts the number of zeros in the binary representation of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn count_zeros(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(\n\n verify_int_object!(this, \"count_zeros\").count_zeros() as i64\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 18, "score": 210701.37634213862 }, { "content": "/// Counts the number of leading ones in the binary representation of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn leading_ones(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(\n\n verify_int_object!(this, \"leading_ones\").leading_ones() as i64,\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 19, "score": 210701.28335910372 }, { "content": "/// Counts the number of trailing ones in the binary representation of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn trailing_ones(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(\n\n verify_int_object!(this, \"trailing_ones\").trailing_ones() as i64,\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 20, "score": 210701.28335910375 }, { "content": "/// Counts the number of trailing zeros in the binary representation of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn trailing_zeros(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(\n\n verify_int_object!(this, \"trailing_zeros\").trailing_zeros() as i64,\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 21, "score": 210701.28335910372 }, { "content": "/// Counts the number of leading zeros in the binary representation of this Hinton integer.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn leading_zeros(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Int(\n\n verify_int_object!(this, \"leading_zeros\").leading_zeros() as i64,\n\n ))\n\n}\n\n\n", "file_path": "src/built_in/primitives/int.rs", "rank": 22, "score": 210701.28335910375 }, { "content": "/// Computes the number of bits needed to represent this Hinton integer in binary form.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The integer object.\n\n/// * `_`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn bit_len(vm: &mut VM, this: Object, _: Vec<Object>) -> RuntimeResult {\n\n let i = verify_int_object!(this, \"bit_len\");\n\n vm.push_stack(Object::Int((i.abs() as f64).log2().ceil() as i64))\n\n}\n", "file_path": "src/built_in/primitives/int.rs", "rank": 23, "score": 210701.1919549371 }, { "content": "/// Computes the arcsine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn asin(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.asin()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 24, "score": 210700.8014456089 }, { "content": "/// Computes the sine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn sin(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.sin()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 25, "score": 210700.8014456089 }, { "content": "/// Computes the cosine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn cos(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.cos()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 26, "score": 210700.8014456089 }, { "content": "/// Computes the tangent of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn tan(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.tan()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 27, "score": 210700.8014456089 }, { "content": "/// Computes the arctangent of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn atan(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.atan()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 28, "score": 210700.8014456089 }, { "content": "/// Computes the arccosine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn acos(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.acos()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 29, "score": 210700.8014456089 }, { "content": "/// Computes the hyperbolic arctangent of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn atanh(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.atanh()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 30, "score": 210700.69605896543 }, { "content": "/// Computes the cube root of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn cbrt(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.cbrt()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 31, "score": 210700.6960589654 }, { "content": "/// Computes the hyperbolic sine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn sinh(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.sinh()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 32, "score": 210700.6960589654 }, { "content": "/// Computes the square root of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn sqrt(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.sqrt()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 33, "score": 210700.69605896543 }, { "content": "/// Computes the hyperbolic arcsine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn asinh(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.asinh()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 34, "score": 210700.69605896543 }, { "content": "/// Computes the hyperbolic tangent of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn tanh(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.tanh()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 35, "score": 210700.6960589654 }, { "content": "/// Computes the hyperbolic cosine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn cosh(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.cosh()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 36, "score": 210700.6960589654 }, { "content": "/// Computes the hyperbolic arccosine of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn acosh(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.acosh()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 37, "score": 210700.69605896543 }, { "content": "/// Computes the arctangent of the quotient of two Hinton numbers.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn atan2(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg1 = check_int_or_float!(args[0].clone());\n\n let arg2 = check_int_or_float!(args[1].clone());\n\n vm.push_stack(Object::Float(arg1.atan2(arg2)))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 38, "score": 210700.59272017077 }, { "content": "/// Computes the exponent (e<sup>x</sup>) of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn exp(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg.exp()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 39, "score": 210700.59272017077 }, { "content": "/// Computes the logarithm of a Hinton number with an arbitrary base.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn log(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg1 = check_int_or_float!(args[0].clone());\n\n let arg2 = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg1.log(arg2)))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 40, "score": 210700.59272017077 }, { "content": "/// Computes the log-base-10 (log<sub>10</sub>(x)) of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn log10(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg1 = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg1.log10()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 41, "score": 210700.39195191898 }, { "content": "/// Computes the log-base-2 (log<sub>2</sub>(x)) of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn log2(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg1 = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg1.log2()))\n\n}\n\n\n", "file_path": "src/built_in/primitives/math.rs", "rank": 42, "score": 210700.391951919 }, { "content": "/// Computes the natural logarithm (log<sub>e</sub>(x)) of a Hinton number.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `_`: *Not applicable.*\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn ln(vm: &mut VM, _: Object, args: Vec<Object>) -> RuntimeResult {\n\n let arg1 = check_int_or_float!(args[0].clone());\n\n vm.push_stack(Object::Float(arg1.ln()))\n\n}\n", "file_path": "src/built_in/primitives/math.rs", "rank": 43, "score": 210700.391951919 }, { "content": "/// Implements the `random(...)` native function for Hinton, which computes a random number\n\n/// between [0, 1).\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_random(vm: &mut VM, _: Vec<Object>) -> RuntimeResult {\n\n vm.push_stack(Object::Float(rand::random()))\n\n}\n", "file_path": "src/built_in/natives.rs", "rank": 44, "score": 208721.9092723477 }, { "content": "/// Implements the `clock()` native function for Hinton, which retrieves the current time from\n\n/// the Unix Epoch time.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_clock(vm: &mut VM, _: Vec<Object>) -> RuntimeResult {\n\n let now = SystemTime::now().duration_since(SystemTime::UNIX_EPOCH);\n\n\n\n match now {\n\n Ok(t) => {\n\n let time = t.as_millis();\n\n vm.push_stack(Object::Int(time as i64))\n\n }\n\n Err(_) => RuntimeResult::Error {\n\n error: RuntimeErrorType::Internal,\n\n message: String::from(\"System's time before UNIX EPOCH.\"),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 45, "score": 208721.63498233713 }, { "content": "#[cfg(feature = \"show_raw_bytecode\")]\n\npub fn print_raw(chunk: &Chunk, name: &str) {\n\n let mut i = 0;\n\n\n\n // prints this chunk's name\n\n println!(\"==== {} ====\", name);\n\n\n\n while i < chunk.len() {\n\n let instr = chunk.get_byte(i);\n\n print!(\"{:#04X} \", instr as u8);\n\n\n\n if (i + 1) % 8 == 0 {\n\n println!();\n\n }\n\n\n\n i += 1;\n\n }\n\n\n\n println!(\"\\n\\nChunk Size: {}\", i);\n\n println!(\"================\\n\");\n\n}\n\n\n\n/// Disassembles the chunk, printing each instruction, and its related information.\n", "file_path": "src/core/bytecode.rs", "rank": 46, "score": 208589.52935361682 }, { "content": "/// Converts a Hinton object into an Iterable object.\n\n///\n\n/// # Arguments\n\n/// * `o`: The object to be converted to an iterable.\n\n///\n\n/// # Returns:\n\n/// Result<Object, RuntimeResult>\n\npub fn make_iter(o: Object) -> Result<Object, RuntimeResult> {\n\n match o {\n\n Object::String(_) | Object::Array(_) | Object::Range(_) | Object::Tuple(_) => {\n\n Ok(Object::Iter(Rc::new(RefCell::new(IterObject {\n\n iter: Box::new(o),\n\n index: 0,\n\n }))))\n\n }\n\n // If the object is already an iterable, return that same object.\n\n Object::Iter(_) => Ok(o),\n\n _ => Err(RuntimeResult::Error {\n\n error: RuntimeErrorType::TypeError,\n\n message: format!(\"Cannot create iterable from '{}'.\", o.type_name()),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "src/objects/iter_obj.rs", "rank": 47, "score": 206382.7292682619 }, { "content": "/// Get the ith object in a Hinton array.\n\n///\n\n/// # Parameters\n\n/// - `arr`: A reference to the underlying `Vec<Object>` in a Hinton Array.\n\n/// - `index`: A Hinton object that will serve as the index of the array. For example, this object\n\n/// could be a Hinton integer, or a Hinton range.\n\n///\n\n/// # Returns\n\n/// - `Result<Object, ObjectOprErrType>`: Returns `Ok(Object)` with a Hinton Object if the index is\n\n/// within bounds. Returns `Err(ObjectOprErrType)` if there was an error while indexing the array.\n\nfn subscript_array(arr: &[Object], index: &Object) -> Result<Object, ObjectOprErrType> {\n\n match index {\n\n // Indexing type: Array[Int]\n\n Object::Int(idx) => {\n\n if let Some(pos) = to_bounded_index(*idx, arr.len()) {\n\n if let Some(val) = arr.get(pos) {\n\n return Ok(val.clone());\n\n }\n\n }\n\n }\n\n // Indexing type: Array[Bool]\n\n Object::Bool(val) => {\n\n let pos = (if *val { 1 } else { 0 }) as usize;\n\n if let Some(val) = arr.get(pos) {\n\n return Ok(val.clone());\n\n }\n\n }\n\n // Indexing type: Array[Range]\n\n Object::Range(_) => {\n\n unimplemented!(\"Array indexing with ranges.\")\n", "file_path": "src/objects/indexing.rs", "rank": 48, "score": 204225.1551738548 }, { "content": "/// Get the ith object in a Hinton tuple.\n\n///\n\n/// # Parameters\n\n/// - `tup`: A reference to the underlying `Vec<Object>` in a Hinton tuple.\n\n/// - `index`: A Hinton object that will serve as the index of the tuple. For example, this object\n\n/// could be a Hinton integer, or a Hinton range.\n\n///\n\n/// # Returns\n\n/// - `Result<Object, ObjectOprErrType>`: Returns `Ok(Object)` with a Hinton Object if the index is\n\n/// within bounds. Returns `Err(ObjectOprErrType)` if there was an error while indexing the tuple.\n\nfn subscript_tuple(tup: &[Object], index: &Object) -> Result<Object, ObjectOprErrType> {\n\n match index {\n\n // Indexing type: Tuple[Int]\n\n Object::Int(idx) => {\n\n if let Some(pos) = to_bounded_index(*idx, tup.len()) {\n\n if let Some(val) = tup.get(pos) {\n\n return Ok(val.clone());\n\n }\n\n }\n\n }\n\n // Indexing type: Tuple[Bool]\n\n Object::Bool(val) => {\n\n let pos = (if *val { 1 } else { 0 }) as usize;\n\n\n\n if let Some(val) = tup.get(pos) {\n\n return Ok(val.clone());\n\n }\n\n }\n\n // Indexing type: Tuple[Range]\n\n Object::Range(_) => {\n", "file_path": "src/objects/indexing.rs", "rank": 49, "score": 204225.1551738548 }, { "content": "/// Gets the value associated with a key in a Hinton dictionary.\n\n///\n\n/// # Parameters\n\n/// - `dict`: A reference to the underlying `HashMap` in a Hinton dictionary.\n\n/// - `index`: A Hinton object that will serve as the index of the dictionary. For example, this\n\n/// object could be a Hinton string, or a Hinton range.\n\n///\n\n/// # Returns\n\n/// - `Result<Object, ObjectOprErrType>`: Returns `Ok(Object)` with a Hinton Object if the key\n\n/// exists in the dictionary. Returns `Err(ObjectOprErrType)` otherwise.\n\nfn subscript_dictionary(dict: &DictObject, index: &Object) -> Result<Object, ObjectOprErrType> {\n\n return match index {\n\n Object::String(key) => dict.get_prop(key),\n\n // Indexing type: Range[Range]\n\n Object::Range(_) => {\n\n unimplemented!(\"Range indexing with ranges.\")\n\n }\n\n _ => Err(ObjectOprErrType::TypeError(format!(\n\n \"Dictionaries can only be indexed by a String or Range. Found '{}' instead.\",\n\n index.type_name()\n\n ))),\n\n };\n\n}\n", "file_path": "src/objects/indexing.rs", "rank": 50, "score": 198122.02265236617 }, { "content": "/// Pushes an object into this Hinton array.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `this`: The array object.\n\n/// * `args`: A vector of objects that will serve as arguments to this method call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn push(vm: &mut VM, this: Object, args: Vec<Object>) -> RuntimeResult {\n\n let obj = args[0].clone();\n\n verify_array_object!(this, \"push\").borrow_mut().push(obj);\n\n\n\n vm.push_stack(Object::Null)\n\n}\n\n\n", "file_path": "src/built_in/primitives/array.rs", "rank": 51, "score": 195214.9887025869 }, { "content": "/// Implements the `print(...)` native function for Hinton, which prints a value to the console.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_print(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n println!(\"{}\", args[0]);\n\n vm.push_stack(Object::Null)\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 52, "score": 187578.26774978923 }, { "content": "/// Implements the `iter(...)` native function for Hinton, which converts the give object to an\n\n/// iterable object.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_iter(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n match make_iter(args[0].clone()) {\n\n Ok(o) => vm.push_stack(o),\n\n Err(e) => e,\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 53, "score": 187575.83798373383 }, { "content": "/// Implements the `next(...)` native function for Hinton, which retrieves the next item in an\n\n/// iterable object.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_next(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n match &args[0] {\n\n Object::Iter(iter) => match get_next_in_iter(iter) {\n\n Ok(o) => vm.push_stack(o),\n\n Err(e) => e,\n\n },\n\n _ => RuntimeResult::Error {\n\n error: RuntimeErrorType::TypeError,\n\n message: format!(\"Object of type '{}' is not iterable.\", args[0].type_name()),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 54, "score": 187575.41980347378 }, { "content": "/// Implements the `assert(...)` native function for Hinton, which checks that the first argument\n\n/// of the function call is truthy.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_assert(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n let value = args[0].clone();\n\n\n\n if !value.is_falsey() {\n\n vm.push_stack(Object::Null)\n\n } else {\n\n let message = if args.len() == 2 {\n\n args[1].clone()\n\n } else {\n\n Object::String(String::from(\"Assertion failed on a falsey value.\"))\n\n };\n\n\n\n RuntimeResult::Error {\n\n error: RuntimeErrorType::AssertionError,\n\n message: format!(\"{}\", message),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 55, "score": 187574.88932002478 }, { "content": "/// Implements the `input(...)` native function for Hinton, which gets user input from the console.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_input(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n print!(\"{}\", args[0]);\n\n\n\n // Print the programmer-provided message\n\n match io::Write::flush(&mut io::stdout()) {\n\n Ok(_) => {\n\n let mut input = String::new();\n\n // Get the user's input\n\n match io::stdin().read_line(&mut input) {\n\n Ok(_) => {\n\n input.pop(); // remove added newline\n\n vm.push_stack(Object::String(input))\n\n }\n\n Err(e) => RuntimeResult::Error {\n\n error: RuntimeErrorType::Internal,\n\n message: format!(\"Failed to read input. IO failed read line. {}\", e),\n\n },\n\n }\n\n }\n\n Err(e) => RuntimeResult::Error {\n\n error: RuntimeErrorType::Internal,\n\n message: format!(\"Failed to read input. IO failed flush. {}\", e),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 56, "score": 187574.61367592067 }, { "content": "/// Implements the `assert_eq(...)` native function for Hinton, which checks that\n\n/// the first two arguments of the function call ARE equal.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_assert_eq(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n let value1 = args[0].clone();\n\n let value2 = args[1].clone();\n\n\n\n if value1 == value2 {\n\n vm.push_stack(Object::Null)\n\n } else {\n\n let message = if args.len() == 3 {\n\n args[2].clone()\n\n } else {\n\n Object::String(String::from(\"Assertion values are not equal.\"))\n\n };\n\n\n\n RuntimeResult::Error {\n\n error: RuntimeErrorType::AssertionError,\n\n message: format!(\"{}\", message),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 57, "score": 183744.2040437244 }, { "content": "/// Implements the `assert_ne(...)` native function for Hinton, which checks that the first two\n\n/// arguments of the function call are NOT equal.\n\n///\n\n/// # Arguments\n\n/// * `vm`: A mutable reference to the virtual machine.\n\n/// * `args`: A vector of objects that will serve as arguments to this function call.\n\n///\n\n/// # Returns:\n\n/// RuntimeResult\n\nfn native_assert_ne(vm: &mut VM, args: Vec<Object>) -> RuntimeResult {\n\n let value1 = args[0].clone();\n\n let value2 = args[1].clone();\n\n\n\n if value1 != value2 {\n\n vm.push_stack(Object::Null)\n\n } else {\n\n let message = if args.len() == 3 {\n\n args[2].clone()\n\n } else {\n\n Object::String(String::from(\"Assertion values are equal.\"))\n\n };\n\n\n\n RuntimeResult::Error {\n\n error: RuntimeErrorType::AssertionError,\n\n message: format!(\"{}\", message),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/built_in/natives.rs", "rank": 58, "score": 183744.2040437244 }, { "content": "/// Gets the next item in a Hinton iterator.\n\n///\n\n/// # Arguments\n\n/// * `o`: A mutable, counted reference into the iterator.\n\n///\n\n/// # Returns:\n\n/// Result<Object, RuntimeResult>\n\npub fn get_next_in_iter(o: &Rc<RefCell<IterObject>>) -> Result<Object, RuntimeResult> {\n\n let mut iter = o.borrow_mut();\n\n let current_index = Object::Int(iter.index as i64);\n\n\n\n match iter.iter.subscript(&current_index) {\n\n Ok(o) => {\n\n iter.index += 1;\n\n Ok(o)\n\n }\n\n Err(_) => Err(RuntimeResult::Error {\n\n error: RuntimeErrorType::StopIteration,\n\n message: String::from(\"End of Iterator.\"),\n\n }),\n\n }\n\n}\n", "file_path": "src/objects/iter_obj.rs", "rank": 59, "score": 179261.2802995696 }, { "content": "/// Reports an error list coming from the parser or compiler.\n\n///\n\n/// # Parameters\n\n/// - `filepath`: The file path of where the errors occurred.\n\n/// - `errors`: An `ErrorList` containing the errors.\n\n/// - `source`: A reference to the source contents.\n\npub fn report_errors_list(filepath: &Path, errors: Vec<ErrorReport>, source: &str) {\n\n let source_lines: Vec<&str> = source.split('\\n').collect();\n\n\n\n for error in errors.iter() {\n\n eprintln!(\"{}\", error.message);\n\n print_error_source(\n\n filepath,\n\n error.line,\n\n error.column,\n\n error.lexeme_len,\n\n &source_lines,\n\n );\n\n }\n\n\n\n eprintln!(\"\\x1b[31;1mERROR:\\x1b[0m Aborted execution due to previous errors.\");\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 61, "score": 118551.82994070195 }, { "content": "/// Prints a snippet of the source line associated with an error.\n\n///\n\n/// # Parameters\n\n/// - `line_num`: The source line number of the error.\n\n/// - `col`: The source column number of the error.\n\n/// - `len`: The length of the token that produced the error.\n\n/// - `src`: A reference to the source error line.\n\npub fn print_error_snippet(line_num: usize, col: usize, len: usize, src: &str) {\n\n let front_pad = (f64::log10(line_num as f64).floor() + 1f64) as usize;\n\n // +2 for one extra space at the front and one at the back\n\n let whitespace_pad_size = \" \".repeat(front_pad + 2);\n\n\n\n // Compute the column of the error with trimmed whitespaces from the source line.\n\n let mut removed_whitespace = 0;\n\n for c in src.chars() {\n\n if c == ' ' {\n\n removed_whitespace += 1;\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n let col = col - removed_whitespace;\n\n let trimmed_source = src.trim();\n\n\n\n if !trimmed_source.is_empty() {\n\n eprintln!(\"{}|\", whitespace_pad_size);\n\n eprint!(\" {} | \", line_num);\n\n eprintln!(\"{}\", trimmed_source);\n\n eprint!(\"{}|\", whitespace_pad_size);\n\n eprintln!(\" {}\\x1b[31;1m{}\\x1b[0m\", \" \".repeat(col), \"^\".repeat(len));\n\n }\n\n\n\n eprintln!()\n\n}\n\n\n", "file_path": "src/errors.rs", "rank": 62, "score": 117103.15669244938 }, { "content": "#[test]\n\nfn base_func_has_no_defaults() {\n\n let program = match Parser::parse(\"\") {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n match Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()) {\n\n Ok(res) => {\n\n if !res.defaults.is_empty() {\n\n panic!(\"Base function in script should have 0 default parameters.\")\n\n }\n\n }\n\n Err(_) => panic!(\"Compiler Had Errors.\"),\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 63, "score": 115445.71462945748 }, { "content": "/// Throws a runtime error to the console.\n\n///\n\n/// # Parameters\n\n/// - `vm`: A reference to the virtual machine.\n\n/// - `error`: The generated error.\n\n/// - `message`: The error message to be displayed.\n\n/// - `source`: The program's source text.\n\npub fn report_runtime_error(vm: &VM, error: RuntimeErrorType, message: String, source: &str) {\n\n let source_lines: Vec<&str> = source.split('\\n').collect();\n\n\n\n let frame = vm.current_frame();\n\n let f = match &frame.callee {\n\n CallFrameType::Closure(c) => c.function.borrow(),\n\n CallFrameType::Function(f) => f.borrow(),\n\n CallFrameType::Method(m) => m.method.function.borrow(),\n\n };\n\n let line = f.chunk.get_line_info(frame.ip - 1);\n\n\n\n let error_name = match error {\n\n RuntimeErrorType::ArgumentError => \"ArgumentError\",\n\n RuntimeErrorType::AssertionError => \"AssertionError\",\n\n RuntimeErrorType::IndexError => \"IndexError\",\n\n RuntimeErrorType::InstanceError => \"InstanceError\",\n\n RuntimeErrorType::Internal => \"InternalError\",\n\n RuntimeErrorType::KeyError => \"KeyError\",\n\n RuntimeErrorType::RecursionError => \"RecursionError\",\n\n RuntimeErrorType::ReferenceError => \"ReferenceError\",\n", "file_path": "src/errors.rs", "rank": 64, "score": 115194.69852191093 }, { "content": "#[derive(Clone, Copy)]\n\nstruct LoopScope {\n\n /// The position of the loop's start in the chunk.\n\n position: usize,\n\n /// The scope depth of the body of the loop.\n\n scope_depth: usize,\n\n /// The type of loop this represents.\n\n loop_type: LoopType,\n\n}\n\n\n\n/// The types of loops available in Hinton. This is useful when \\\n\n/// compiling break statements to emit extra `POP` operations \\\n\n/// based on the loop type.\n", "file_path": "src/compiler/mod.rs", "rank": 65, "score": 98716.80552878325 }, { "content": "/// Represents a break statement, which is associated with a loop.\n\nstruct BreakScope {\n\n /// The loop scope associated with this break statement.\n\n parent_loop: LoopScope,\n\n /// The position of the break's instruction in the chunk.\n\n chunk_pos: usize,\n\n}\n\n\n\n/// Represents a loop statement at compile time. Used primarily by\n\n/// break statements to know which loops to break.\n", "file_path": "src/compiler/mod.rs", "rank": 66, "score": 98716.80552878325 }, { "content": "pub trait HTPrimitive {\n\n /// Gets the name class name of this Hinton primitive.\n\n fn name(&self) -> String;\n\n\n\n /// Gets the non-static members of this Hinton primitive.\n\n fn members(&mut self) -> &mut HashMap<String, ClassField>;\n\n\n\n /// Gets the static members of this Hinton primitive.\n\n fn statics(&mut self) -> &mut HashMap<String, ClassField>;\n\n\n\n /// Gets the default class object stored in this Hinton primitive.\n\n fn default() -> ClassObject;\n\n\n\n /// Binds a non-static method to this Hinton primitive.\n\n ///\n\n /// # Arguments\n\n /// * `name`: The name of the method.\n\n /// * `arity`: The method's arity.\n\n /// * `body`: The method's body.\n\n ///\n", "file_path": "src/built_in/primitives/mod.rs", "rank": 67, "score": 90808.75357281038 }, { "content": "/// Maps a keyword string to a token type.\n\n///\n\n/// # Parameters\n\n/// - `id`: The identifier's string name.\n\n///\n\n/// # Returns\n\n/// `TokenType`: The type of token matched for given identifier name.\n\npub fn make_identifier_type(id: &str) -> TokenType {\n\n match id {\n\n \"and\" => TokenType::LOGIC_AND,\n\n \"as\" => TokenType::AS_OPERATOR,\n\n \"break\" => TokenType::BREAK_KW,\n\n \"class\" => TokenType::CLASS_KW,\n\n \"const\" => TokenType::CONST_KW,\n\n \"continue\" => TokenType::CONTINUE_KW,\n\n \"else\" => TokenType::ELSE_KW,\n\n \"enum\" => TokenType::ENUM_KW,\n\n \"equals\" => TokenType::LOGIC_EQ,\n\n \"false\" => TokenType::FALSE,\n\n \"fn\" => TokenType::FN_LAMBDA_KW,\n\n \"for\" => TokenType::FOR_KW,\n\n \"func\" => TokenType::FUNC_KW,\n\n \"if\" => TokenType::IF_KW,\n\n \"in\" => TokenType::IN_KW,\n\n \"is\" => TokenType::LOGIC_IS,\n\n \"mod\" => TokenType::MODULUS,\n\n \"new\" => TokenType::NEW_KW,\n", "file_path": "src/core/tokens.rs", "rank": 68, "score": 89161.08195682471 }, { "content": "#[test]\n\nfn base_func_has_no_arity() {\n\n let program = match Parser::parse(\"\") {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n match Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()) {\n\n Ok(res) => {\n\n if res.min_arity != 0u8 && res.max_arity != 0u8 {\n\n panic!(\"Base function in script should have 0 parameters.\")\n\n }\n\n }\n\n Err(_) => panic!(\"Compiler Had Errors.\"),\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 69, "score": 81606.12097553974 }, { "content": "#[test]\n\nfn panic_on_unterminated_string() {\n\n if Parser::parse(\"\\\"hello world\").is_ok() {\n\n panic!(\"Should emit error on unterminated strings.\")\n\n }\n\n\n\n if Parser::parse(\"\\'hello world\").is_ok() {\n\n panic!(\"Should emit error on unterminated strings.\")\n\n }\n\n\n\n if Parser::parse(\"\\\"I\\\"m over here!\\\";\").is_ok() {\n\n panic!(\"Should emit error on unterminated strings.\")\n\n }\n\n\n\n if Parser::parse(\"'I'm over here!';\").is_ok() {\n\n panic!(\"Should emit error on unterminated strings.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/parser.rs", "rank": 70, "score": 81501.6949473448 }, { "content": "#[test]\n\nfn error_if_return_outside_func() {\n\n let program = match Parser::parse(\"while true { return false; }\") {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n if Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()).is_ok() {\n\n panic!(\"Compiler should emit error when returning from outside of function.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 71, "score": 79135.58592624856 }, { "content": "#[test]\n\nfn functions_have_access_to_global_vars() {\n\n let src = \"\n\n var global = \\\"some value\\\";\n\n\n\n func my_function() {\n\n print(global);\n\n }\n\n \";\n\n\n\n let program = match Parser::parse(src) {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n if Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()).is_err() {\n\n panic!(\"Functions should have access to global declarations.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 72, "score": 79053.41017306925 }, { "content": "#[test]\n\nfn allow_double_quoted_strings() {\n\n if Parser::parse(\"\\\"I am going to the moon tomorrow.\\\";\").is_err() {\n\n panic!(\"Should allow double-quoted strings.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/parser.rs", "rank": 73, "score": 79035.02605967366 }, { "content": "#[test]\n\nfn allow_single_quoted_strings() {\n\n if Parser::parse(\"'The sky is green!';\").is_err() {\n\n panic!(\"Should allow single-quoted strings.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/parser.rs", "rank": 74, "score": 79035.02605967366 }, { "content": "#[test]\n\nfn test_const_pool_no_duplicate_items() {\n\n let src = \"8.9;\".repeat(500);\n\n\n\n let program = match Parser::parse(src.as_str()) {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n match Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()) {\n\n Ok(res) => {\n\n if res.chunk.get_pool_size() != 1 {\n\n panic!(\"Items in the constant pool should not be duplicated.\")\n\n }\n\n }\n\n Err(_) => panic!(\"Compiler Had Errors.\"),\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 75, "score": 76886.92876450847 }, { "content": "#[test]\n\nfn allow_escaped_double_quoted_strings() {\n\n if Parser::parse(\"\\\"He said told me to \\\\\\\"stay quiet\\\\\\\", yesterday.\\\";\").is_err() {\n\n panic!(\"Should allow escaped double-quotes.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/parser.rs", "rank": 76, "score": 76744.48327304826 }, { "content": "#[test]\n\nfn allow_escaped_single_quoted_strings() {\n\n if Parser::parse(\"'I\\\\\\'m over here!';\").is_err() {\n\n panic!(\"Should allow escaped single-quoted strings.\")\n\n }\n\n}\n", "file_path": "src/tests/parser.rs", "rank": 77, "score": 76744.48327304826 }, { "content": "\n\n if let Some(pos) = to_bounded_index(idx, i64::abs(max - min) as usize) {\n\n return if max - min > 0 {\n\n Ok(Object::Int(min + pos as i64))\n\n } else {\n\n Ok(Object::Int(min - pos as i64))\n\n };\n\n }\n\n }\n\n // Indexing type: Range[Range]\n\n Object::Range(_) => {\n\n unimplemented!(\"Range indexing with ranges.\")\n\n }\n\n _ => {\n\n return Err(ObjectOprErrType::TypeError(format!(\n\n \"Range index must be an Int or a Range. Found '{}' instead.\",\n\n index.type_name()\n\n )))\n\n }\n\n }\n\n\n\n Err(ObjectOprErrType::IndexError(String::from(\n\n \"Range index out of bounds.\",\n\n )))\n\n}\n\n\n", "file_path": "src/objects/indexing.rs", "rank": 78, "score": 74761.21846809864 }, { "content": "use crate::errors::ObjectOprErrType;\n\nuse crate::objects::dictionary_obj::DictObject;\n\nuse crate::objects::{Object, RangeObject};\n\n\n\nimpl Object {\n\n /// Defines the indexing operation of Hinton objects.\n\n pub fn subscript(&self, index: &Object) -> Result<Object, ObjectOprErrType> {\n\n match self {\n\n Object::Array(arr) => subscript_array(&arr.borrow(), index),\n\n Object::Tuple(tup) => subscript_tuple(&tup, index),\n\n Object::String(str) => subscript_string(&str, index),\n\n Object::Range(range) => subscript_range(range, index),\n\n Object::Dict(dict) => subscript_dictionary(dict, index),\n\n _ => Err(ObjectOprErrType::TypeError(format!(\n\n \"Cannot index object of type '{}'.\",\n\n self.type_name()\n\n ))),\n\n }\n\n }\n\n}\n", "file_path": "src/objects/indexing.rs", "rank": 79, "score": 74757.31849314779 }, { "content": " }\n\n // Indexing type: String[Range]\n\n Object::Range(_) => {\n\n unimplemented!(\"String indexing with ranges.\")\n\n }\n\n _ => {\n\n return Err(ObjectOprErrType::TypeError(format!(\n\n \"String index must be an Int or a Range. Found '{}' instead.\",\n\n index.type_name()\n\n )))\n\n }\n\n }\n\n\n\n Err(ObjectOprErrType::IndexError(String::from(\n\n \"String index out of bounds.\",\n\n )))\n\n}\n\n\n", "file_path": "src/objects/indexing.rs", "rank": 80, "score": 74748.54184782394 }, { "content": " }\n\n _ => {\n\n return Err(ObjectOprErrType::TypeError(format!(\n\n \"Array index must be an Int or a Range. Found '{}' instead.\",\n\n index.type_name()\n\n )))\n\n }\n\n }\n\n Err(ObjectOprErrType::IndexError(String::from(\n\n \"Array index out of bounds.\",\n\n )))\n\n}\n\n\n", "file_path": "src/objects/indexing.rs", "rank": 81, "score": 74746.67735827983 }, { "content": " unimplemented!(\"Tuple indexing with ranges.\")\n\n }\n\n _ => {\n\n return Err(ObjectOprErrType::TypeError(format!(\n\n \"Tuple index must be an Int or a Range. Found '{}' instead.\",\n\n index.type_name()\n\n )))\n\n }\n\n }\n\n\n\n Err(ObjectOprErrType::IndexError(String::from(\n\n \"Tuple index out of bounds.\",\n\n )))\n\n}\n\n\n", "file_path": "src/objects/indexing.rs", "rank": 82, "score": 74746.44339056035 }, { "content": "\n\n/// Takes an i64 integer and converts it into an object index. This allows indexing objects with\n\n/// negative integers.\n\n///\n\n/// # Parameters\n\n/// - `x`: The positive or negative index.\n\n/// - `len`: The length of the object.\n\n///\n\n/// # Returns\n\n/// - `Option<usize>`: Return Some(usize) if the index is within the bounds of the object's length\n\n/// or `None` otherwise.\n", "file_path": "src/objects/indexing.rs", "rank": 83, "score": 74743.8282413155 }, { "content": "#[test]\n\nfn allow_return_inside_loop_inside_func() {\n\n let program = match Parser::parse(\"func my_func(x) { while x { return false; } }\") {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n if Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()).is_err() {\n\n panic!(\"Compiler should allow returning from loop inside function.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 84, "score": 74705.48024536873 }, { "content": "#[test]\n\nfn error_if_break_inside_func_inside_loop() {\n\n let program = match Parser::parse(\"while true { func my_func() { break; } }\") {\n\n Ok(ast) => ast,\n\n Err(_) => panic!(\"Parser Had Errors.\"),\n\n };\n\n\n\n if Compiler::compile_ast(&PathBuf::new(), &program, &BuiltIn::default()).is_ok() {\n\n panic!(\"Compiler should emit error when breaking outside inside a function inside a loop.\")\n\n }\n\n}\n\n\n", "file_path": "src/tests/compiler.rs", "rank": 85, "score": 74705.48024536873 } ]
Rust
crates/eww/src/script_var_handler.rs
RianFuro/eww
e1558965ff45f72c35b7aeed15774381f32e0165
use std::collections::HashMap; use crate::{ app, config::{create_script_var_failed_warn, script_var}, }; use anyhow::*; use app::DaemonCommand; use eww_shared_util::VarName; use nix::{ sys::signal, unistd::{setpgid, Pid}, }; use simplexpr::dynval::DynVal; use tokio::{ io::{AsyncBufReadExt, BufReader}, sync::mpsc::UnboundedSender, }; use tokio_util::sync::CancellationToken; use yuck::config::script_var_definition::{ListenScriptVar, PollScriptVar, ScriptVarDefinition, VarSource}; pub fn init(evt_send: UnboundedSender<DaemonCommand>) -> ScriptVarHandlerHandle { let (msg_send, mut msg_recv) = tokio::sync::mpsc::unbounded_channel(); let handle = ScriptVarHandlerHandle { msg_send }; std::thread::spawn(move || { let rt = tokio::runtime::Runtime::new().expect("Failed to initialize tokio runtime for script var handlers"); rt.block_on(async { let _: Result<_> = try { let mut handler = ScriptVarHandler { listen_handler: ListenVarHandler::new(evt_send.clone())?, poll_handler: PollVarHandler::new(evt_send)?, }; crate::loop_select_exiting! { Some(msg) = msg_recv.recv() => match msg { ScriptVarHandlerMsg::AddVar(var) => { handler.add(var).await; } ScriptVarHandlerMsg::Stop(name) => { handler.stop_for_variable(&name)?; } ScriptVarHandlerMsg::StopAll => { handler.stop_all(); } }, else => break, }; }; }) }); handle } pub struct ScriptVarHandlerHandle { msg_send: UnboundedSender<ScriptVarHandlerMsg>, } impl ScriptVarHandlerHandle { pub fn add(&self, script_var: ScriptVarDefinition) { crate::print_result_err!( "while forwarding instruction to script-var handler", self.msg_send.send(ScriptVarHandlerMsg::AddVar(script_var)) ); } pub fn stop_for_variable(&self, name: VarName) { crate::print_result_err!( "while forwarding instruction to script-var handler", self.msg_send.send(ScriptVarHandlerMsg::Stop(name)), ); } pub fn stop_all(&self) { crate::print_result_err!( "while forwarding instruction to script-var handler", self.msg_send.send(ScriptVarHandlerMsg::StopAll) ); } } #[derive(Debug, Eq, PartialEq)] enum ScriptVarHandlerMsg { AddVar(ScriptVarDefinition), Stop(VarName), StopAll, } struct ScriptVarHandler { listen_handler: ListenVarHandler, poll_handler: PollVarHandler, } impl ScriptVarHandler { async fn add(&mut self, script_var: ScriptVarDefinition) { match script_var { ScriptVarDefinition::Poll(var) => self.poll_handler.start(var).await, ScriptVarDefinition::Listen(var) => self.listen_handler.start(var).await, }; } fn stop_for_variable(&mut self, name: &VarName) -> Result<()> { log::debug!("Stopping script var process for variable {}", name); self.listen_handler.stop_for_variable(name); self.poll_handler.stop_for_variable(name); Ok(()) } fn stop_all(&mut self) { log::debug!("Stopping script-var-handlers"); self.listen_handler.stop_all(); self.poll_handler.stop_all(); } } struct PollVarHandler { evt_send: UnboundedSender<DaemonCommand>, poll_handles: HashMap<VarName, CancellationToken>, } impl PollVarHandler { fn new(evt_send: UnboundedSender<DaemonCommand>) -> Result<Self> { let handler = PollVarHandler { evt_send, poll_handles: HashMap::new() }; Ok(handler) } async fn start(&mut self, var: PollScriptVar) { if self.poll_handles.contains_key(&var.name) { return; } log::debug!("starting poll var {}", &var.name); let cancellation_token = CancellationToken::new(); self.poll_handles.insert(var.name.clone(), cancellation_token.clone()); let evt_send = self.evt_send.clone(); tokio::spawn(async move { let result: Result<_> = try { evt_send.send(app::DaemonCommand::UpdateVars(vec![(var.name.clone(), run_poll_once(&var)?)]))?; }; if let Err(err) = result { crate::error_handling_ctx::print_error(err); } crate::loop_select_exiting! { _ = cancellation_token.cancelled() => break, _ = tokio::time::sleep(var.interval) => { let result: Result<_> = try { evt_send.send(app::DaemonCommand::UpdateVars(vec![(var.name.clone(), run_poll_once(&var)?)]))?; }; if let Err(err) = result { crate::error_handling_ctx::print_error(err); } } } }); } fn stop_for_variable(&mut self, name: &VarName) { if let Some(token) = self.poll_handles.remove(name) { log::debug!("stopped poll var {}", name); token.cancel() } } fn stop_all(&mut self) { self.poll_handles.drain().for_each(|(_, token)| token.cancel()); } } fn run_poll_once(var: &PollScriptVar) -> Result<DynVal> { match &var.command { VarSource::Shell(span, command) => { script_var::run_command(command).map_err(|e| anyhow!(create_script_var_failed_warn(*span, &var.name, &e.to_string()))) } VarSource::Function(x) => x().map_err(|e| anyhow!(e)), } } impl Drop for PollVarHandler { fn drop(&mut self) { self.stop_all(); } } struct ListenVarHandler { evt_send: UnboundedSender<DaemonCommand>, listen_process_handles: HashMap<VarName, CancellationToken>, } impl ListenVarHandler { fn new(evt_send: UnboundedSender<DaemonCommand>) -> Result<Self> { let handler = ListenVarHandler { evt_send, listen_process_handles: HashMap::new() }; Ok(handler) } async fn start(&mut self, var: ListenScriptVar) { log::debug!("starting listen-var {}", &var.name); let cancellation_token = CancellationToken::new(); self.listen_process_handles.insert(var.name.clone(), cancellation_token.clone()); let evt_send = self.evt_send.clone(); tokio::spawn(async move { crate::try_logging_errors!(format!("Executing listen var-command {}", &var.command) => { let mut handle = unsafe { tokio::process::Command::new("sh") .args(&["-c", &var.command]) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) .stdin(std::process::Stdio::null()) .pre_exec(|| { let _ = setpgid(Pid::from_raw(0), Pid::from_raw(0)); Ok(()) }).spawn()? }; let mut stdout_lines = BufReader::new(handle.stdout.take().unwrap()).lines(); let mut stderr_lines = BufReader::new(handle.stderr.take().unwrap()).lines(); crate::loop_select_exiting! { _ = handle.wait() => break, _ = cancellation_token.cancelled() => break, Ok(Some(line)) = stdout_lines.next_line() => { let new_value = DynVal::from_string(line.to_owned()); evt_send.send(DaemonCommand::UpdateVars(vec![(var.name.to_owned(), new_value)]))?; } Ok(Some(line)) = stderr_lines.next_line() => { log::warn!("stderr of `{}`: {}", var.name, line); } else => break, } terminate_handle(handle).await; }); }); } fn stop_for_variable(&mut self, name: &VarName) { if let Some(token) = self.listen_process_handles.remove(name) { log::debug!("stopped listen-var {}", name); token.cancel(); } } fn stop_all(&mut self) { self.listen_process_handles.drain().for_each(|(_, token)| token.cancel()); } } impl Drop for ListenVarHandler { fn drop(&mut self) { self.stop_all(); } } async fn terminate_handle(mut child: tokio::process::Child) { if let Some(id) = child.id() { let _ = signal::killpg(Pid::from_raw(id as i32), signal::SIGTERM); tokio::select! { _ = child.wait() => {}, _ = tokio::time::sleep(std::time::Duration::from_secs(10)) => { let _ = child.kill().await; } }; } else { let _ = child.kill().await; } }
use std::collections::HashMap; use crate::{ app, config::{create_script_var_failed_warn, script_var}, }; use anyhow::*; use app::DaemonCommand; use eww_shared_util::VarName; use nix::{ sys::signal, unistd::{setpgid, Pid}, }; use simplexpr::dynval::DynVal; use tokio::{ io::{AsyncBufReadExt, BufReader}, sync::mpsc::UnboundedSender, }; use tokio_util::sync::CancellationToken; use yuck::config::script_var_definition::{ListenScriptVar, PollScriptVar, ScriptVarDefinition, VarSource}; pub fn init(evt_send: UnboundedSender<DaemonCommand>) -> ScriptVarHandlerHandle { let (msg_send, mut msg_recv) = tokio::sync::mpsc::unbounded_channel(); let handle = ScriptVarHandlerHandle { msg_send }; std::thread::spawn(move || { let rt = tokio::runtime::Runtime::new().expect("Failed to initialize tokio runtime for script var handlers"); rt.block_on(async { let _: Result<_> = try { let mut handler = ScriptVarHandler { listen_handler: ListenVarHandler::new(evt_send.clone())?, poll_handler: PollVarHandler::new(evt_send)?, }; crate::loop_select_exiting! { Some(msg) = msg_recv.recv() => match msg { ScriptVarHandlerMsg::AddVar(var) => { handler.add(var).await; } ScriptVarHandlerMsg::Stop(name) => { handler.stop_for_variable(&name)?; } ScriptVarHandlerMsg::StopAll => { handler.stop_all(); } }, else => break, }; }; }) }); handle } pub struct ScriptVarHandlerHandle { msg_send: UnboundedSender<ScriptVarHandlerMsg>, } impl ScriptVarHandlerHandle {
pub fn stop_for_variable(&self, name: VarName) { crate::print_result_err!( "while forwarding instruction to script-var handler", self.msg_send.send(ScriptVarHandlerMsg::Stop(name)), ); } pub fn stop_all(&self) { crate::print_result_err!( "while forwarding instruction to script-var handler", self.msg_send.send(ScriptVarHandlerMsg::StopAll) ); } } #[derive(Debug, Eq, PartialEq)] enum ScriptVarHandlerMsg { AddVar(ScriptVarDefinition), Stop(VarName), StopAll, } struct ScriptVarHandler { listen_handler: ListenVarHandler, poll_handler: PollVarHandler, } impl ScriptVarHandler { async fn add(&mut self, script_var: ScriptVarDefinition) { match script_var { ScriptVarDefinition::Poll(var) => self.poll_handler.start(var).await, ScriptVarDefinition::Listen(var) => self.listen_handler.start(var).await, }; } fn stop_for_variable(&mut self, name: &VarName) -> Result<()> { log::debug!("Stopping script var process for variable {}", name); self.listen_handler.stop_for_variable(name); self.poll_handler.stop_for_variable(name); Ok(()) } fn stop_all(&mut self) { log::debug!("Stopping script-var-handlers"); self.listen_handler.stop_all(); self.poll_handler.stop_all(); } } struct PollVarHandler { evt_send: UnboundedSender<DaemonCommand>, poll_handles: HashMap<VarName, CancellationToken>, } impl PollVarHandler { fn new(evt_send: UnboundedSender<DaemonCommand>) -> Result<Self> { let handler = PollVarHandler { evt_send, poll_handles: HashMap::new() }; Ok(handler) } async fn start(&mut self, var: PollScriptVar) { if self.poll_handles.contains_key(&var.name) { return; } log::debug!("starting poll var {}", &var.name); let cancellation_token = CancellationToken::new(); self.poll_handles.insert(var.name.clone(), cancellation_token.clone()); let evt_send = self.evt_send.clone(); tokio::spawn(async move { let result: Result<_> = try { evt_send.send(app::DaemonCommand::UpdateVars(vec![(var.name.clone(), run_poll_once(&var)?)]))?; }; if let Err(err) = result { crate::error_handling_ctx::print_error(err); } crate::loop_select_exiting! { _ = cancellation_token.cancelled() => break, _ = tokio::time::sleep(var.interval) => { let result: Result<_> = try { evt_send.send(app::DaemonCommand::UpdateVars(vec![(var.name.clone(), run_poll_once(&var)?)]))?; }; if let Err(err) = result { crate::error_handling_ctx::print_error(err); } } } }); } fn stop_for_variable(&mut self, name: &VarName) { if let Some(token) = self.poll_handles.remove(name) { log::debug!("stopped poll var {}", name); token.cancel() } } fn stop_all(&mut self) { self.poll_handles.drain().for_each(|(_, token)| token.cancel()); } } fn run_poll_once(var: &PollScriptVar) -> Result<DynVal> { match &var.command { VarSource::Shell(span, command) => { script_var::run_command(command).map_err(|e| anyhow!(create_script_var_failed_warn(*span, &var.name, &e.to_string()))) } VarSource::Function(x) => x().map_err(|e| anyhow!(e)), } } impl Drop for PollVarHandler { fn drop(&mut self) { self.stop_all(); } } struct ListenVarHandler { evt_send: UnboundedSender<DaemonCommand>, listen_process_handles: HashMap<VarName, CancellationToken>, } impl ListenVarHandler { fn new(evt_send: UnboundedSender<DaemonCommand>) -> Result<Self> { let handler = ListenVarHandler { evt_send, listen_process_handles: HashMap::new() }; Ok(handler) } async fn start(&mut self, var: ListenScriptVar) { log::debug!("starting listen-var {}", &var.name); let cancellation_token = CancellationToken::new(); self.listen_process_handles.insert(var.name.clone(), cancellation_token.clone()); let evt_send = self.evt_send.clone(); tokio::spawn(async move { crate::try_logging_errors!(format!("Executing listen var-command {}", &var.command) => { let mut handle = unsafe { tokio::process::Command::new("sh") .args(&["-c", &var.command]) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::piped()) .stdin(std::process::Stdio::null()) .pre_exec(|| { let _ = setpgid(Pid::from_raw(0), Pid::from_raw(0)); Ok(()) }).spawn()? }; let mut stdout_lines = BufReader::new(handle.stdout.take().unwrap()).lines(); let mut stderr_lines = BufReader::new(handle.stderr.take().unwrap()).lines(); crate::loop_select_exiting! { _ = handle.wait() => break, _ = cancellation_token.cancelled() => break, Ok(Some(line)) = stdout_lines.next_line() => { let new_value = DynVal::from_string(line.to_owned()); evt_send.send(DaemonCommand::UpdateVars(vec![(var.name.to_owned(), new_value)]))?; } Ok(Some(line)) = stderr_lines.next_line() => { log::warn!("stderr of `{}`: {}", var.name, line); } else => break, } terminate_handle(handle).await; }); }); } fn stop_for_variable(&mut self, name: &VarName) { if let Some(token) = self.listen_process_handles.remove(name) { log::debug!("stopped listen-var {}", name); token.cancel(); } } fn stop_all(&mut self) { self.listen_process_handles.drain().for_each(|(_, token)| token.cancel()); } } impl Drop for ListenVarHandler { fn drop(&mut self) { self.stop_all(); } } async fn terminate_handle(mut child: tokio::process::Child) { if let Some(id) = child.id() { let _ = signal::killpg(Pid::from_raw(id as i32), signal::SIGTERM); tokio::select! { _ = child.wait() => {}, _ = tokio::time::sleep(std::time::Duration::from_secs(10)) => { let _ = child.kill().await; } }; } else { let _ = child.kill().await; } }
pub fn add(&self, script_var: ScriptVarDefinition) { crate::print_result_err!( "while forwarding instruction to script-var handler", self.msg_send.send(ScriptVarHandlerMsg::AddVar(script_var)) ); }
function_block-function_prefix_line
[ { "content": "pub fn initial_value(var: &ScriptVarDefinition) -> Result<DynVal> {\n\n match var {\n\n ScriptVarDefinition::Poll(x) => match &x.initial_value {\n\n Some(value) => Ok(value.clone()),\n\n None => match &x.command {\n\n VarSource::Function(f) => f()\n\n .map_err(|err| anyhow!(err))\n\n .with_context(|| format!(\"Failed to compute initial value for {}\", &var.name())),\n\n VarSource::Shell(span, command) => {\n\n run_command(command).map_err(|e| anyhow!(create_script_var_failed_warn(*span, var.name(), &e.to_string())))\n\n }\n\n },\n\n },\n\n\n\n ScriptVarDefinition::Listen(var) => Ok(var.initial_value.clone()),\n\n }\n\n}\n\n\n", "file_path": "crates/eww/src/config/script_var.rs", "rank": 1, "score": 260190.33509219586 }, { "content": "pub fn stringify_diagnostic(mut diagnostic: codespan_reporting::diagnostic::Diagnostic<usize>) -> anyhow::Result<String> {\n\n diagnostic.labels.drain_filter(|label| Span(label.range.start, label.range.end, label.file_id).is_dummy());\n\n\n\n let mut config = term::Config::default();\n\n let mut chars = Chars::box_drawing();\n\n chars.single_primary_caret = '─';\n\n config.chars = chars;\n\n config.chars.note_bullet = '→';\n\n let mut buf = Vec::new();\n\n let mut writer = term::termcolor::Ansi::new(&mut buf);\n\n let files = YUCK_FILES.read().unwrap();\n\n term::emit(&mut writer, &config, &*files, &diagnostic)?;\n\n Ok(String::from_utf8(buf)?)\n\n}\n", "file_path": "crates/eww/src/error_handling_ctx.rs", "rank": 3, "score": 230103.73131626975 }, { "content": "pub fn print_error(err: anyhow::Error) {\n\n match anyhow_err_to_diagnostic(&err) {\n\n Some(diag) => match stringify_diagnostic(diag) {\n\n Ok(diag) => {\n\n eprintln!(\"{}\", diag);\n\n }\n\n Err(_) => {\n\n log::error!(\"{:?}\", err);\n\n }\n\n },\n\n None => {\n\n log::error!(\"{:?}\", err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/eww/src/error_handling_ctx.rs", "rank": 5, "score": 221111.47607127758 }, { "content": "pub fn format_error(err: &anyhow::Error) -> String {\n\n for err in err.chain() {\n\n format!(\"chain: {}\", err);\n\n }\n\n anyhow_err_to_diagnostic(err).and_then(|diag| stringify_diagnostic(diag).ok()).unwrap_or_else(|| format!(\"{:?}\", err))\n\n}\n\n\n", "file_path": "crates/eww/src/error_handling_ctx.rs", "rank": 6, "score": 214142.5178773678 }, { "content": "pub fn anyhow_err_to_diagnostic(err: &anyhow::Error) -> Option<Diagnostic<usize>> {\n\n if let Some(err) = err.downcast_ref::<DiagError>() {\n\n Some(err.diag.clone())\n\n } else if let Some(err) = err.downcast_ref::<AstError>() {\n\n Some(err.to_diagnostic())\n\n } else if let Some(err) = err.downcast_ref::<ConversionError>() {\n\n Some(err.to_diagnostic())\n\n } else if let Some(err) = err.downcast_ref::<ValidationError>() {\n\n Some(err.to_diagnostic())\n\n } else if let Some(err) = err.downcast_ref::<EvalError>() {\n\n Some(err.to_diagnostic())\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n// pub fn print_diagnostic(diagnostic: codespan_reporting::diagnostic::Diagnostic<usize>) {\n\n// match stringify_diagnostic(diagnostic.clone()) {\n\n// Ok(diag) => {\n\n// eprintln!(\"{}\", diag);\n\n//}\n\n// Err(_) => {\n\n// log::error!(\"{:?}\", diagnostic);\n\n//}\n\n\n", "file_path": "crates/eww/src/error_handling_ctx.rs", "rank": 9, "score": 209107.3727584941 }, { "content": "pub fn get_inbuilt_vars() -> HashMap<VarName, ScriptVarDefinition> {\n\n builtin_vars! {Duration::new(2, 0),\n\n // @desc EWW_TEMPS - Heat of the components in Celcius\n\n \"EWW_TEMPS\" => || Ok(DynVal::from(get_temperatures())),\n\n\n\n // @desc EWW_RAM - Information on ram and swap usage in kB.\n\n \"EWW_RAM\" => || Ok(DynVal::from(get_ram())),\n\n\n\n // @desc EWW_DISK - Information on on all mounted partitions (Might report inaccurately on some filesystems, like btrfs)\\nExample: `{EWW_DISK[\"/\"]}`\n\n \"EWW_DISK\" => || Ok(DynVal::from(get_disks())),\n\n\n\n // @desc EWW_BATTERY - Battery capacity in procent of the main battery\n\n \"EWW_BATTERY\" => || Ok(DynVal::from(\n\n match get_battery_capacity() {\n\n Err(e) => {\n\n log::error!(\"Couldn't get the battery capacity: {:?}\", e);\n\n \"Error: Check `eww log` for more details\".to_string()\n\n }\n\n Ok(o) => o,\n\n }\n\n )),\n\n\n\n // @desc EWW_CPU - Information on the CPU cores: frequency and usage (No MacOS support)\n\n \"EWW_CPU\" => || Ok(DynVal::from(get_cpus())),\n\n\n\n // @desc EWW_NET - Bytes up/down on all interfaces\n\n \"EWW_NET\" => || Ok(DynVal::from(net())),\n\n }\n\n}\n", "file_path": "crates/eww/src/config/inbuilt.rs", "rank": 10, "score": 197879.18612002907 }, { "content": "pub fn create_script_var_failed_warn(span: Span, var_name: &VarName, error_output: &str) -> DiagError {\n\n DiagError::new(gen_diagnostic! {\n\n kind = Severity::Warning,\n\n msg = format!(\"The script for the `{}`-variable exited unsuccessfully\", var_name),\n\n label = span => \"Defined here\",\n\n note = error_output,\n\n })\n\n}\n\n\n", "file_path": "crates/eww/src/config/script_var.rs", "rank": 12, "score": 187168.7684974826 }, { "content": "/// Run a command and get the output\n\npub fn run_command(cmd: &str) -> Result<DynVal> {\n\n log::debug!(\"Running command: {}\", cmd);\n\n let command = Command::new(\"/bin/sh\").arg(\"-c\").arg(cmd).output()?;\n\n if !command.status.success() {\n\n bail!(\"Failed with output:\\n{}\", String::from_utf8(command.stderr)?);\n\n }\n\n let output = String::from_utf8(command.stdout)?;\n\n let output = output.trim_matches('\\n');\n\n Ok(DynVal::from(output))\n\n}\n", "file_path": "crates/eww/src/config/script_var.rs", "rank": 13, "score": 185986.79019191934 }, { "content": "pub fn clear_files() {\n\n *YUCK_FILES.write().unwrap() = YuckFiles::new();\n\n}\n\n\n", "file_path": "crates/eww/src/error_handling_ctx.rs", "rank": 14, "score": 180115.9936716832 }, { "content": "pub fn validate_variables_in_widget_use(\n\n defs: &HashMap<String, WidgetDefinition>,\n\n variables: &HashSet<VarName>,\n\n widget: &WidgetUse,\n\n is_in_definition: bool,\n\n) -> Result<(), ValidationError> {\n\n let matching_definition = defs.get(&widget.name);\n\n if let Some(matching_def) = matching_definition {\n\n let missing_arg = matching_def\n\n .expected_args\n\n .iter()\n\n .find(|expected| !expected.optional && !widget.attrs.attrs.contains_key(&expected.name));\n\n if let Some(missing_arg) = missing_arg {\n\n return Err(ValidationError::MissingAttr {\n\n widget_name: widget.name.clone(),\n\n arg_name: missing_arg.name.clone(),\n\n arg_list_span: Some(matching_def.args_span),\n\n use_span: widget.attrs.span,\n\n });\n\n }\n", "file_path": "crates/yuck/src/config/validate.rs", "rank": 15, "score": 177229.38007662434 }, { "content": "/// Load an [EwwConfig] from a given file, resetting and applying the global YuckFiles object in [error_handling_ctx].\n\npub fn read_from_file(path: impl AsRef<Path>) -> Result<EwwConfig> {\n\n error_handling_ctx::clear_files();\n\n EwwConfig::read_from_file(&mut error_handling_ctx::YUCK_FILES.write().unwrap(), path)\n\n}\n\n\n\n/// Eww configuration structure.\n\n#[derive(Debug, Clone)]\n\npub struct EwwConfig {\n\n widgets: HashMap<String, WidgetDefinition>,\n\n windows: HashMap<String, EwwWindowDefinition>,\n\n initial_variables: HashMap<VarName, DynVal>,\n\n script_vars: HashMap<VarName, ScriptVarDefinition>,\n\n\n\n // Links variable which affect state (active/inactive) of poll var to those poll variables\n\n poll_var_links: HashMap<VarName, Vec<VarName>>,\n\n}\n\n\n\nimpl Default for EwwConfig {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "crates/eww/src/config/eww_config.rs", "rank": 16, "score": 168976.55152283236 }, { "content": "pub fn create_pair() -> (DaemonResponseSender, tokio::sync::mpsc::UnboundedReceiver<DaemonResponse>) {\n\n let (sender, recv) = tokio::sync::mpsc::unbounded_channel();\n\n (DaemonResponseSender(sender), recv)\n\n}\n\n\n\nimpl DaemonResponseSender {\n\n pub fn send_success(&self, s: String) -> Result<()> {\n\n self.0.send(DaemonResponse::Success(s)).context(\"Failed to send success response from application thread\")\n\n }\n\n\n\n pub fn send_failure(&self, s: String) -> Result<()> {\n\n self.0.send(DaemonResponse::Failure(s)).context(\"Failed to send failure response from application thread\")\n\n }\n\n\n\n /// Given a list of errors, respond with an error value if there are any errors, and respond with success otherwise.\n\n pub fn respond_with_error_list(&self, errors: impl IntoIterator<Item = anyhow::Error>) -> Result<()> {\n\n let errors = errors.into_iter().map(|e| error_handling_ctx::format_error(&e)).join(\"\\n\");\n\n if errors.is_empty() {\n\n self.send_success(String::new())\n\n } else {\n", "file_path": "crates/eww/src/daemon_response.rs", "rank": 17, "score": 164842.30893556576 }, { "content": "fn initialize_window(\n\n monitor_geometry: gdk::Rectangle,\n\n root_widget: gtk::Widget,\n\n window_def: config::EwwWindowDefinition,\n\n) -> Result<EwwWindow> {\n\n let window = display_backend::initialize_window(&window_def, monitor_geometry)\n\n .with_context(|| format!(\"monitor {} is unavailable\", window_def.monitor_number.unwrap()))?;\n\n\n\n window.set_title(&format!(\"Eww - {}\", window_def.name));\n\n window.set_position(gtk::WindowPosition::None);\n\n window.set_gravity(gdk::Gravity::Center);\n\n\n\n if let Some(geometry) = window_def.geometry {\n\n let actual_window_rect = get_window_rectangle(geometry, monitor_geometry);\n\n window.set_size_request(actual_window_rect.width, actual_window_rect.height);\n\n window.set_default_size(actual_window_rect.width, actual_window_rect.height);\n\n }\n\n window.set_decorated(false);\n\n window.set_skip_taskbar_hint(true);\n\n window.set_skip_pager_hint(true);\n", "file_path": "crates/eww/src/app.rs", "rank": 18, "score": 163614.34615750366 }, { "content": "/// Replace all env-var references of the format `\"something ${foo}\"` in a string\n\n/// by the actual env-variables. If the env-var isn't found, will replace the\n\n/// reference with an empty string.\n\npub fn replace_env_var_references(input: String) -> String {\n\n regex!(r\"\\$\\{([^\\s]*)\\}\")\n\n .replace_all(&input, |var_name: &regex::Captures| std::env::var(var_name.get(1).unwrap().as_str()).unwrap_or_default())\n\n .into_owned()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::replace_env_var_references;\n\n use std;\n\n\n\n #[test]\n\n fn test_replace_env_var_references() {\n\n let scss = \"$test: ${USER};\";\n\n\n\n assert_eq!(\n\n replace_env_var_references(String::from(scss)),\n\n format!(\"$test: {};\", std::env::var(\"USER\").unwrap_or_default())\n\n )\n\n }\n\n}\n", "file_path": "crates/eww/src/util.rs", "rank": 19, "score": 162501.49149464382 }, { "content": "/// get a single ast node from a list of asts, returning an Err if the length is not exactly 1.\n\npub fn require_single_toplevel(span: Span, mut asts: Vec<Ast>) -> AstResult<Ast> {\n\n match asts.len() {\n\n 0 => Err(AstError::MissingNode(span)),\n\n 1 => Ok(asts.remove(0)),\n\n _ => Err(AstError::TooManyNodes(asts.get(1).unwrap().span().to(asts.last().unwrap().span()), 1)),\n\n }\n\n}\n\n\n\nmacro_rules! test_parser {\n\n ($($text:literal),*) => {{\n\n let p = parser::AstParser::new();\n\n use lexer::Lexer;\n\n\n\n ::insta::with_settings!({sort_maps => true}, {\n\n $(\n\n ::insta::assert_debug_snapshot!(p.parse(0, Lexer::new(0, $text.to_string())));\n\n )*\n\n });\n\n }}\n\n}\n\n\n", "file_path": "crates/yuck/src/parser/mod.rs", "rank": 20, "score": 158801.57130021427 }, { "content": "pub fn do_server_call(stream: &mut UnixStream, action: &opts::ActionWithServer) -> Result<Option<DaemonResponse>> {\n\n log::debug!(\"Forwarding options to server\");\n\n stream.set_nonblocking(false).context(\"Failed to set stream to non-blocking\")?;\n\n\n\n let message_bytes = bincode::serialize(&action)?;\n\n\n\n stream.write(&(message_bytes.len() as u32).to_be_bytes()).context(\"Failed to send command size header to IPC stream\")?;\n\n\n\n stream.write_all(&message_bytes).context(\"Failed to write command to IPC stream\")?;\n\n\n\n let mut buf = Vec::new();\n\n stream.set_read_timeout(Some(std::time::Duration::from_millis(100))).context(\"Failed to set read timeout\")?;\n\n stream.read_to_end(&mut buf).context(\"Error reading response from server\")?;\n\n\n\n Ok(if buf.is_empty() {\n\n None\n\n } else {\n\n let buf = bincode::deserialize(&buf)?;\n\n Some(buf)\n\n })\n\n}\n", "file_path": "crates/eww/src/client.rs", "rank": 21, "score": 154522.90095218428 }, { "content": "pub fn handle_client_only_action(paths: &EwwPaths, action: ActionClientOnly) -> Result<()> {\n\n match action {\n\n ActionClientOnly::Logs => {\n\n std::process::Command::new(\"tail\")\n\n .args([\"-f\", paths.get_log_file().to_string_lossy().as_ref()].iter())\n\n .stdin(Stdio::null())\n\n .spawn()?\n\n .wait()?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/eww/src/client.rs", "rank": 22, "score": 150931.0322763631 }, { "content": "pub fn parse_stringlit(\n\n span: Span,\n\n mut segs: Vec<Sp<StrLitSegment>>,\n\n) -> Result<SimplExpr, lalrpop_util::ParseError<usize, Token, LexicalError>> {\n\n let file_id = span.2;\n\n let parser = crate::simplexpr_parser::ExprParser::new();\n\n\n\n if segs.len() == 1 {\n\n let (lo, seg, hi) = segs.remove(0);\n\n let span = Span(lo, hi, file_id);\n\n match seg {\n\n StrLitSegment::Literal(lit) => Ok(SimplExpr::Literal(DynVal(lit, span))),\n\n StrLitSegment::Interp(toks) => {\n\n let token_stream = toks.into_iter().map(Ok);\n\n parser.parse(file_id, token_stream)\n\n }\n\n }\n\n } else {\n\n let elems = segs\n\n .into_iter()\n", "file_path": "crates/simplexpr/src/parser/lalrpop_helpers.rs", "rank": 23, "score": 145366.28117537813 }, { "content": "pub fn validate_widget_definition(\n\n other_defs: &HashMap<String, WidgetDefinition>,\n\n globals: &HashSet<VarName>,\n\n def: &WidgetDefinition,\n\n) -> Result<(), ValidationError> {\n\n let mut variables_in_scope = globals.clone();\n\n for arg in def.expected_args.iter() {\n\n variables_in_scope.insert(VarName(arg.name.to_string()));\n\n }\n\n\n\n validate_variables_in_widget_use(other_defs, &variables_in_scope, &def.widget, true)\n\n}\n\n\n", "file_path": "crates/yuck/src/config/validate.rs", "rank": 24, "score": 145366.2811753781 }, { "content": "/// Parse consecutive `:keyword value` pairs from an expression iterator into an [Attributes].\n\nfn parse_key_values(iter: &mut AstIterator<impl Iterator<Item = Ast>>, fail_on_dangling_kw: bool) -> AstResult<Attributes> {\n\n let mut data = HashMap::new();\n\n let mut attrs_span = iter.remaining_span.point_span();\n\n loop {\n\n match iter.next() {\n\n Some(Ast::Keyword(key_span, kw)) => match iter.next() {\n\n Some(value) => {\n\n attrs_span.1 = iter.remaining_span.0;\n\n let attr_value = AttrEntry { key_span, value };\n\n data.insert(AttrName(kw), attr_value);\n\n }\n\n None => {\n\n if fail_on_dangling_kw {\n\n return Err(AstError::DanglingKeyword(key_span, kw));\n\n } else {\n\n iter.iter.put_back(Ast::Keyword(key_span, kw));\n\n attrs_span.1 = iter.remaining_span.0;\n\n return Ok(Attributes::new(attrs_span, data));\n\n }\n\n }\n", "file_path": "crates/yuck/src/parser/ast_iterator.rs", "rank": 25, "score": 145355.17971251253 }, { "content": "pub fn initialize_server(paths: EwwPaths, action: Option<DaemonCommand>) -> Result<ForkResult> {\n\n let (ui_send, mut ui_recv) = tokio::sync::mpsc::unbounded_channel();\n\n\n\n std::env::set_current_dir(&paths.get_config_dir())\n\n .with_context(|| format!(\"Failed to change working directory to {}\", paths.get_config_dir().display()))?;\n\n\n\n log::info!(\"Loading paths: {}\", &paths);\n\n\n\n let read_config = config::read_from_file(&paths.get_yuck_path());\n\n\n\n let eww_config = match read_config {\n\n Ok(config) => config,\n\n Err(err) => {\n\n error_handling_ctx::print_error(err);\n\n config::EwwConfig::default()\n\n }\n\n };\n\n\n\n let fork_result = do_detach(&paths.get_log_file())?;\n\n\n", "file_path": "crates/eww/src/server.rs", "rank": 26, "score": 142902.54115299674 }, { "content": "pub fn get_window_rectangle(geometry: WindowGeometry, screen_rect: gdk::Rectangle) -> gdk::Rectangle {\n\n let (offset_x, offset_y) = geometry.offset.relative_to(screen_rect.width, screen_rect.height);\n\n let (width, height) = geometry.size.relative_to(screen_rect.width, screen_rect.height);\n\n let x = screen_rect.x + offset_x + geometry.anchor_point.x.alignment_to_coordinate(width, screen_rect.width);\n\n let y = screen_rect.y + offset_y + geometry.anchor_point.y.alignment_to_coordinate(height, screen_rect.height);\n\n gdk::Rectangle { x, y, width, height }\n\n}\n", "file_path": "crates/eww/src/app.rs", "rank": 27, "score": 142682.54899618923 }, { "content": "pub fn validate(config: &Config, additional_globals: Vec<VarName>) -> Result<(), ValidationError> {\n\n let var_names = std::iter::empty()\n\n .chain(additional_globals.iter().cloned())\n\n .chain(config.script_vars.keys().cloned())\n\n .chain(config.var_definitions.keys().cloned())\n\n .collect();\n\n for window in config.window_definitions.values() {\n\n validate_variables_in_widget_use(&config.widget_definitions, &var_names, &window.widget, false)?;\n\n }\n\n for def in config.widget_definitions.values() {\n\n validate_widget_definition(&config.widget_definitions, &var_names, def)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/yuck/src/config/validate.rs", "rank": 28, "score": 142585.56996029866 }, { "content": "/// Notify all listening tasks of the termination of the eww application process.\n\npub fn send_exit() -> Result<()> {\n\n (APPLICATION_EXIT_SENDER).send(()).context(\"Failed to send exit lifecycle event\")?;\n\n Ok(())\n\n}\n\n\n\n/// Yields Ok(()) on application termination. Await on this in all long-running tasks\n\n/// and perform any cleanup if necessary.\n\npub async fn recv_exit() -> Result<()> {\n\n (APPLICATION_EXIT_SENDER).subscribe().recv().await.context(\"Failed to receive lifecycle event\")\n\n}\n\n\n\n/// Select in a loop, breaking once a application termination event (see `crate::application_lifecycle`) is received.\n\n#[macro_export]\n\nmacro_rules! loop_select_exiting {\n\n ($($content:tt)*) => {\n\n loop {\n\n tokio::select! {\n\n Ok(()) = crate::application_lifecycle::recv_exit() => {\n\n break;\n\n }\n\n $($content)*\n\n }\n\n }\n\n };\n\n}\n", "file_path": "crates/eww/src/application_lifecycle.rs", "rank": 29, "score": 142038.6491454294 }, { "content": "pub fn net() -> String {\n\n let mut c = SYSTEM.lock().unwrap();\n\n c.refresh_networks_list();\n\n let interfaces = format!(\n\n \"{{ {} }}\",\n\n &c.get_networks()\n\n .iter()\n\n .map(|a| format!(r#\"\"{}\": {{ \"NET_UP\": {}, \"NET_DOWN\": {} }}\"#, a.0, a.1.get_transmitted(), a.1.get_received()))\n\n .join(\",\"),\n\n );\n\n interfaces\n\n}\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 30, "score": 142038.6491454294 }, { "content": "pub fn generate_generic_widget_node(\n\n defs: &HashMap<String, WidgetDefinition>,\n\n local_env: &HashMap<VarName, SimplExpr>,\n\n w: WidgetUse,\n\n) -> AstResult<Box<dyn WidgetNode>> {\n\n if let Some(def) = defs.get(&w.name) {\n\n if !w.children.is_empty() {\n\n return Err(AstError::TooManyNodes(w.children_span(), 0).note(\"User-defined widgets cannot be given children.\"));\n\n }\n\n\n\n let mut new_local_env = w\n\n .attrs\n\n .attrs\n\n .into_iter()\n\n .map(|(name, value)| Ok((VarName(name.0), value.value.as_simplexpr()?.resolve_one_level(local_env))))\n\n .collect::<AstResult<HashMap<VarName, _>>>()?;\n\n\n\n // handle default value for optional arguments\n\n for expected in def.expected_args.iter().filter(|x| x.optional) {\n\n let var_name = VarName(expected.name.clone().0);\n", "file_path": "crates/eww/src/widgets/widget_node.rs", "rank": 31, "score": 141195.61597122886 }, { "content": "pub fn get_cpus() -> String {\n\n let mut c = SYSTEM.lock().unwrap();\n\n c.refresh_cpu();\n\n let processors = c.get_processors();\n\n format!(\n\n r#\"{{ \"cores\": [{}], \"avg\": {} }}\"#,\n\n processors\n\n .iter()\n\n .map(|a| format!(\n\n r#\"{{\"core\": \"{}\", \"freq\": {}, \"usage\": {:.0}}}\"#,\n\n a.get_name(),\n\n a.get_frequency(),\n\n a.get_cpu_usage()\n\n ))\n\n .join(\",\"),\n\n processors.iter().map(|a| a.get_cpu_usage()).avg()\n\n )\n\n}\n\n\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 32, "score": 139904.53581962135 }, { "content": "pub fn get_temperatures() -> String {\n\n let mut c = SYSTEM.lock().unwrap();\n\n c.refresh_components_list();\n\n c.refresh_components();\n\n format!(\n\n \"{{ {} }}\",\n\n c.get_components()\n\n .iter()\n\n .map(|c| format!(r#\"\"{}\": {}\"#, c.get_label().to_uppercase().replace(\" \", \"_\"), c.get_temperature()))\n\n .join(\",\")\n\n )\n\n}\n\n\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 33, "score": 139904.53581962135 }, { "content": "pub fn get_ram() -> String {\n\n let mut c = SYSTEM.lock().unwrap();\n\n c.refresh_memory();\n\n\n\n let total_memory = c.get_total_memory();\n\n let available_memory = c.get_available_memory();\n\n let used_memory = total_memory as f32 - available_memory as f32;\n\n format!(\n\n r#\"{{\"total_mem\": {}, \"free_mem\": {}, \"total_swap\": {}, \"free_swap\": {}, \"available_mem\": {}, \"used_mem\": {}, \"used_mem_perc\": {}}}\"#,\n\n total_memory,\n\n c.get_free_memory(),\n\n c.get_total_swap(),\n\n c.get_free_swap(),\n\n available_memory,\n\n used_memory,\n\n (used_memory / total_memory as f32) * 100f32,\n\n )\n\n}\n\n\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 34, "score": 139904.53581962135 }, { "content": "pub fn get_disks() -> String {\n\n let mut c = SYSTEM.lock().unwrap();\n\n c.refresh_disks_list();\n\n\n\n format!(\n\n \"{{ {} }}\",\n\n c.get_disks()\n\n .iter()\n\n .map(|c| {\n\n let total_space = c.get_total_space();\n\n let available_space = c.get_available_space();\n\n let used_space = total_space - available_space;\n\n format!(\n\n r#\"\"{}\": {{\"name\": {:?}, \"total\": {}, \"free\": {}, \"used\": {}, \"used_perc\": {}}}\"#,\n\n c.get_mount_point().display(),\n\n c.get_name(),\n\n total_space,\n\n available_space,\n\n used_space,\n\n (used_space as f32 / total_space as f32) * 100f32,\n\n )\n\n })\n\n .join(\",\")\n\n )\n\n}\n\n\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 35, "score": 139904.53581962135 }, { "content": "fn listen_for_daemon_response(mut recv: DaemonResponseReceiver) {\n\n let rt = tokio::runtime::Builder::new_current_thread().enable_time().build().expect(\"Failed to initialize tokio runtime\");\n\n rt.block_on(async {\n\n if let Ok(Some(response)) = tokio::time::timeout(Duration::from_millis(100), recv.recv()).await {\n\n println!(\"{}\", response);\n\n }\n\n })\n\n}\n\n\n", "file_path": "crates/eww/src/main.rs", "rank": 44, "score": 134768.46487807002 }, { "content": "#[cfg(not(target_os = \"macos\"))]\n\n#[cfg(not(target_os = \"linux\"))]\n\npub fn get_battery_capacity() -> Result<u8> {\n\n anyhow!(\"Eww doesn't support your OS for getting the battery capacity\")\n\n}\n\n\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 45, "score": 132980.22502598038 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn get_battery_capacity() -> Result<String> {\n\n let mut current = 0_f64;\n\n let mut total = 0_f64;\n\n let mut json = String::from('{');\n\n for i in\n\n std::path::Path::new(\"/sys/class/power_supply\").read_dir().context(\"Couldn't read /sys/class/power_supply directory\")?\n\n {\n\n let i = i?.path();\n\n if i.is_dir() {\n\n // some ugly hack because if let Some(a) = a && Some(b) = b doesn't work yet\n\n if let (Ok(o), Ok(s)) = (read_to_string(i.join(\"capacity\")), read_to_string(i.join(\"status\"))) {\n\n json.push_str(&format!(\n\n r#\"{:?}: {{ \"status\": \"{}\", \"capacity\": {} }},\"#,\n\n i.file_name().context(\"couldn't convert file name to rust string\")?,\n\n s.replace(\"\\n\", \"\"),\n\n o.replace(\"\\n\", \"\")\n\n ));\n\n if let (Ok(t), Ok(c), Ok(v)) = (\n\n read_to_string(i.join(\"charge_full\")),\n\n read_to_string(i.join(\"charge_now\")),\n", "file_path": "crates/eww/src/config/system_stats.rs", "rank": 46, "score": 132980.22502598038 }, { "content": "/// Check if a eww server is currently running by trying to send a ping message to it.\n\nfn check_server_running(socket_path: impl AsRef<Path>) -> bool {\n\n let response = net::UnixStream::connect(socket_path)\n\n .ok()\n\n .and_then(|mut stream| client::do_server_call(&mut stream, &opts::ActionWithServer::Ping).ok());\n\n response.is_some()\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct EwwPaths {\n\n log_file: PathBuf,\n\n ipc_socket_file: PathBuf,\n\n config_dir: PathBuf,\n\n}\n\n\n\nimpl EwwPaths {\n\n pub fn from_config_dir<P: AsRef<Path>>(config_dir: P) -> Result<Self> {\n\n let config_dir = config_dir.as_ref();\n\n if config_dir.is_file() {\n\n bail!(\"Please provide the path to the config directory, not a file within it\")\n\n }\n", "file_path": "crates/eww/src/main.rs", "rank": 47, "score": 128581.34822669487 }, { "content": "/// read an scss file, replace all environment variable references within it and\n\n/// then parse it into css.\n\npub fn parse_scss_from_file(path: &Path) -> Result<String> {\n\n let config_dir = path.parent().context(\"Given SCSS file has no parent directory?!\")?;\n\n let scss_file_content =\n\n std::fs::read_to_string(path).with_context(|| format!(\"Given SCSS File Doesnt Exist! {}\", path.display()))?;\n\n let file_content = replace_env_var_references(scss_file_content);\n\n let grass_config = grass::Options::default().load_path(config_dir);\n\n grass::from_string(file_content, &grass_config).map_err(|err| anyhow!(\"Encountered SCSS parsing error: {:?}\", err))\n\n}\n\n\n\n#[ext(pub, name = StringExt)]\n\nimpl<T: AsRef<str>> T {\n\n /// check if the string is empty after removing all linebreaks and trimming\n\n /// whitespace\n\n fn is_blank(self) -> bool {\n\n self.as_ref().replace('\\n', \"\").trim().is_empty()\n\n }\n\n\n\n /// trim all lines in a string\n\n fn trim_lines(self) -> String {\n\n self.as_ref().lines().map(|line| line.trim()).join(\"\\n\")\n\n }\n\n}\n\n\n", "file_path": "crates/eww/src/util.rs", "rank": 48, "score": 128011.9574580323 }, { "content": "pub fn b<T>(x: T) -> Box<T> {\n\n Box::new(x)\n\n}\n\n\n", "file_path": "crates/simplexpr/src/parser/lalrpop_helpers.rs", "rank": 49, "score": 127841.04266244889 }, { "content": "/// detach the process from the terminal, also redirecting stdout and stderr to LOG_FILE\n\nfn do_detach(log_file_path: impl AsRef<Path>) -> Result<ForkResult> {\n\n // detach from terminal\n\n match unsafe { nix::unistd::fork()? } {\n\n nix::unistd::ForkResult::Child => {\n\n nix::unistd::setsid()?;\n\n match unsafe { nix::unistd::fork()? } {\n\n nix::unistd::ForkResult::Parent { .. } => std::process::exit(0),\n\n nix::unistd::ForkResult::Child => {}\n\n }\n\n }\n\n nix::unistd::ForkResult::Parent { .. } => {\n\n return Ok(ForkResult::Parent);\n\n }\n\n }\n\n\n\n let file = std::fs::OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .open(&log_file_path)\n\n .unwrap_or_else(|_| panic!(\"Error opening log file ({}), for writing\", log_file_path.as_ref().to_string_lossy()));\n", "file_path": "crates/eww/src/server.rs", "rank": 50, "score": 124575.63412460637 }, { "content": "#[cfg(feature = \"x11\")]\n\nfn apply_window_position(\n\n mut window_geometry: WindowGeometry,\n\n monitor_geometry: gdk::Rectangle,\n\n window: &gtk::Window\n\n) -> Result<()> {\n\n\n\n let gdk_window = window.window().context(\"Failed to get gdk window from gtk window\")?;\n\n window_geometry.size = Coords::from_pixels(window.size());\n\n let actual_window_rect = get_window_rectangle(window_geometry, monitor_geometry);\n\n\n\n let gdk_origin = gdk_window.origin();\n\n\n\n if actual_window_rect.x != gdk_origin.1 || actual_window_rect.y != gdk_origin.2 {\n\n gdk_window.move_(actual_window_rect.x, actual_window_rect.y);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/eww/src/app.rs", "rank": 51, "score": 123611.32887371731 }, { "content": "fn build_if_else(bargs: &mut BuilderArgs) -> Result<gtk::Box> {\n\n if bargs.widget.children.len() != 2 {\n\n bail!(\"if-widget needs to have exactly two children, but had {}\", bargs.widget.children.len());\n\n }\n\n let gtk_widget = gtk::Box::new(gtk::Orientation::Vertical, 0);\n\n let (yes_widget, no_widget) = (bargs.widget.children[0].clone(), bargs.widget.children[1].clone());\n\n\n\n let yes_widget = yes_widget.render(bargs.eww_state, bargs.window_name, bargs.widget_definitions)?;\n\n let no_widget = no_widget.render(bargs.eww_state, bargs.window_name, bargs.widget_definitions)?;\n\n\n\n resolve_block!(bargs, gtk_widget, {\n\n prop(cond: as_bool) {\n\n gtk_widget.children().iter().for_each(|w| gtk_widget.remove(w));\n\n if cond {\n\n gtk_widget.add(&yes_widget)\n\n } else {\n\n gtk_widget.add(&no_widget)\n\n }\n\n }\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 52, "score": 122102.63250589554 }, { "content": "/// @widget input\n\n/// @desc An input field. For this to be useful, set `focusable=\"true\"` on the window.\n\nfn build_gtk_input(bargs: &mut BuilderArgs) -> Result<gtk::Entry> {\n\n let gtk_widget = gtk::Entry::new();\n\n let on_change_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop value - the content of the text field\n\n prop(value: as_string) {\n\n gtk_widget.set_text(&value);\n\n },\n\n\n\n // @prop onchange - Command to run when the text changes. The placeholder `{}` will be replaced by the value\n\n // @prop timeout - timeout of the command\n\n prop(timeout: as_duration = Duration::from_millis(200), onchange: as_string) {\n\n let old_id = on_change_handler_id.replace(Some(\n\n gtk_widget.connect_changed(move |gtk_widget| {\n\n run_command(timeout, &onchange, gtk_widget.text().to_string());\n\n })\n\n ));\n\n old_id.map(|id| gtk_widget.disconnect(id));\n\n }\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 53, "score": 120463.10477146071 }, { "content": "/// @widget button extends container\n\n/// @desc A button\n\nfn build_gtk_button(bargs: &mut BuilderArgs) -> Result<gtk::Button> {\n\n let gtk_widget = gtk::Button::new();\n\n let on_click_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop onclick - a command that get's run when the button is clicked\n\n // @prop onmiddleclick - a command that get's run when the button is middleclicked\n\n // @prop onrightclick - a command that get's run when the button is rightclicked\n\n // @prop timeout - timeout of the command\n\n prop(\n\n timeout: as_duration = Duration::from_millis(200),\n\n onclick: as_string = \"\",\n\n onmiddleclick: as_string = \"\",\n\n onrightclick: as_string = \"\"\n\n ) {\n\n gtk_widget.add_events(gdk::EventMask::ENTER_NOTIFY_MASK);\n\n let old_id = on_click_handler_id.replace(Some(\n\n gtk_widget.connect_button_press_event(move |_, evt| {\n\n match evt.button() {\n\n 1 => run_command(timeout, &onclick, \"\"),\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 54, "score": 120459.29843450004 }, { "content": "/// @widget literal\n\n/// @desc A widget that allows you to render arbitrary yuck.\n\nfn build_gtk_literal(bargs: &mut BuilderArgs) -> Result<gtk::Box> {\n\n let gtk_widget = gtk::Box::new(gtk::Orientation::Vertical, 0);\n\n gtk_widget.set_widget_name(\"literal\");\n\n\n\n // TODO these clones here are dumdum\n\n let window_name = bargs.window_name.to_string();\n\n let widget_definitions = bargs.widget_definitions.clone();\n\n let literal_use_span = bargs.widget.span;\n\n\n\n // the file id the literal-content has been stored under, for error reporting.\n\n let literal_file_id: Rc<RefCell<Option<usize>>> = Rc::new(RefCell::new(None));\n\n\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop content - inline yuck that will be rendered as a widget.\n\n prop(content: as_string) {\n\n gtk_widget.children().iter().for_each(|w| gtk_widget.remove(w));\n\n if !content.is_empty() {\n\n let widget_node_result: AstResult<_> = try {\n\n let ast = {\n\n let mut yuck_files = error_handling_ctx::YUCK_FILES.write().unwrap();\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 55, "score": 120459.29843450004 }, { "content": "/// @widget calendar\n\n/// @desc A widget that displays a calendar\n\nfn build_gtk_calendar(bargs: &mut BuilderArgs) -> Result<gtk::Calendar> {\n\n let gtk_widget = gtk::Calendar::new();\n\n let on_click_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop day - the selected day\n\n prop(day: as_f64) { gtk_widget.set_day(day as i32) },\n\n // @prop month - the selected month\n\n prop(month: as_f64) { gtk_widget.set_month(month as i32) },\n\n // @prop year - the selected year\n\n prop(year: as_f64) { gtk_widget.set_year(year as i32) },\n\n // @prop show-details - show details\n\n prop(show_details: as_bool) { gtk_widget.set_show_details(show_details) },\n\n // @prop show-heading - show heading line\n\n prop(show_heading: as_bool) { gtk_widget.set_show_heading(show_heading) },\n\n // @prop show-day-names - show names of days\n\n prop(show_day_names: as_bool) { gtk_widget.set_show_day_names(show_day_names) },\n\n // @prop show-week-numbers - show week numbers\n\n prop(show_week_numbers: as_bool) { gtk_widget.set_show_week_numbers(show_week_numbers) },\n\n // @prop onclick - command to run when the user selects a date. The `{}` placeholder will be replaced by the selected date.\n\n // @prop timeout - timeout of the command\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 56, "score": 120459.29843450004 }, { "content": "/// @widget image\n\n/// @desc A widget displaying an image\n\nfn build_gtk_image(bargs: &mut BuilderArgs) -> Result<gtk::Image> {\n\n let gtk_widget = gtk::Image::new();\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop path - path to the image file\n\n // @prop width - width of the image\n\n // @prop height - height of the image\n\n prop(path: as_string, width: as_i32 = 10000, height: as_i32 = 10000) {\n\n if path.ends_with(\".gif\") {\n\n let pixbuf_animation = gtk::gdk_pixbuf::PixbufAnimation::from_file(std::path::PathBuf::from(path))?;\n\n gtk_widget.set_from_animation(&pixbuf_animation);\n\n } else {\n\n let pixbuf = gtk::gdk_pixbuf::Pixbuf::from_file_at_size(std::path::PathBuf::from(path), width, height)?;\n\n gtk_widget.set_from_pixbuf(Some(&pixbuf));\n\n }\n\n }\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 57, "score": 120459.29843450004 }, { "content": "/// @widget box extends container\n\n/// @desc the main layout container\n\nfn build_gtk_box(bargs: &mut BuilderArgs) -> Result<gtk::Box> {\n\n let gtk_widget = gtk::Box::new(gtk::Orientation::Horizontal, 0);\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop spacing - spacing between elements\n\n prop(spacing: as_i32 = 0) { gtk_widget.set_spacing(spacing) },\n\n // @prop orientation - orientation of the box. possible values: $orientation\n\n prop(orientation: as_string) { gtk_widget.set_orientation(parse_orientation(&orientation)?) },\n\n // @prop space-evenly - space the widgets evenly.\n\n prop(space_evenly: as_bool = true) { gtk_widget.set_homogeneous(space_evenly) },\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 58, "score": 120459.29843450004 }, { "content": "/// @widget expander extends container\n\n/// @desc A widget that can expand and collapse, showing/hiding it's children.\n\nfn build_gtk_expander(bargs: &mut BuilderArgs) -> Result<gtk::Expander> {\n\n let gtk_widget = gtk::Expander::new(None);\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop name - name of the expander\n\n prop(name: as_string) {gtk_widget.set_label(Some(&name));},\n\n // @prop expanded - sets if the tree is expanded\n\n prop(expanded: as_bool) { gtk_widget.set_expanded(expanded); }\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 59, "score": 120459.29843450004 }, { "content": "/// @widget revealer extends container\n\n/// @desc A widget that can reveal a child with an animation.\n\nfn build_gtk_revealer(bargs: &mut BuilderArgs) -> Result<gtk::Revealer> {\n\n let gtk_widget = gtk::Revealer::new();\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop transition - the name of the transition. Possible values: $transition\n\n prop(transition: as_string = \"crossfade\") { gtk_widget.set_transition_type(parse_transition(&transition)?); },\n\n // @prop reveal - sets if the child is revealed or not\n\n prop(reveal: as_bool) { gtk_widget.set_reveal_child(reveal); },\n\n // @prop duration - the duration of the reveal transition\n\n prop(duration: as_duration = Duration::from_millis(500)) { gtk_widget.set_transition_duration(duration.as_millis() as u32); },\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 60, "score": 120459.29843450004 }, { "content": "/// @widget scale extends range\n\n/// @desc A slider.\n\nfn build_gtk_scale(bargs: &mut BuilderArgs) -> Result<gtk::Scale> {\n\n let gtk_widget = gtk::Scale::new(gtk::Orientation::Horizontal, Some(&gtk::Adjustment::new(0.0, 0.0, 100.0, 1.0, 1.0, 1.0)));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop flipped - flip the direction\n\n prop(flipped: as_bool) { gtk_widget.set_inverted(flipped) },\n\n\n\n // @prop draw-value - draw the value of the property\n\n prop(draw_value: as_bool = false) { gtk_widget.set_draw_value(draw_value) },\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 61, "score": 120459.29843450004 }, { "content": "/// @widget label\n\n/// @desc A text widget giving you more control over how the text is displayed\n\nfn build_gtk_label(bargs: &mut BuilderArgs) -> Result<gtk::Label> {\n\n let gtk_widget = gtk::Label::new(None);\n\n\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop text - the text to display\n\n // @prop limit-width - maximum count of characters to display\n\n // @prop show_truncated - show whether the text was truncated\n\n prop(text: as_string, limit_width: as_i32 = i32::MAX, show_truncated: as_bool = true) {\n\n let truncated = text.chars().count() > limit_width as usize;\n\n let mut text = text.chars().take(limit_width as usize).collect::<String>();\n\n\n\n if show_truncated && truncated {\n\n text.push_str(\"...\");\n\n }\n\n\n\n let text = unescape::unescape(&text).context(format!(\"Failed to unescape label text {}\", &text))?;\n\n let text = unindent::unindent(&text);\n\n gtk_widget.set_text(&text);\n\n },\n\n // @prop markup - Pango markup to display\n\n prop(markup: as_string) { gtk_widget.set_markup(&markup); },\n\n // @prop wrap - Wrap the text. This mainly makes sense if you set the width of this widget.\n\n prop(wrap: as_bool) { gtk_widget.set_line_wrap(wrap) },\n\n // @prop angle - the angle of rotation for the label (between 0 - 360)\n\n prop(angle: as_f64 = 0) { gtk_widget.set_angle(angle) }\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 62, "score": 120459.29843450004 }, { "content": "/// @widget centerbox extends container\n\n/// @desc a box that must contain exactly three children, which will be layed out at the start, center and end of the container.\n\nfn build_center_box(bargs: &mut BuilderArgs) -> Result<gtk::Box> {\n\n let gtk_widget = gtk::Box::new(gtk::Orientation::Horizontal, 0);\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop orientation - orientation of the centerbox. possible values: $orientation\n\n prop(orientation: as_string) { gtk_widget.set_orientation(parse_orientation(&orientation)?) },\n\n });\n\n\n\n match bargs.widget.children.len().cmp(&3) {\n\n Ordering::Less => {\n\n Err(DiagError::new(gen_diagnostic!(\"centerbox must contain exactly 3 elements\", bargs.widget.span)).into())\n\n }\n\n Ordering::Greater => {\n\n let (_, additional_children) = bargs.widget.children.split_at(3);\n\n // we know that there is more than three children, so unwrapping on first and left here is fine.\n\n let first_span = additional_children.first().unwrap().span();\n\n let last_span = additional_children.last().unwrap().span();\n\n Err(DiagError::new(gen_diagnostic!(\n\n \"centerbox must contain exactly 3 elements, but got more\",\n\n first_span.to(last_span)\n\n ))\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 63, "score": 120459.29843450004 }, { "content": "fn init_async_part(paths: EwwPaths, ui_send: UnboundedSender<app::DaemonCommand>) {\n\n std::thread::spawn(move || {\n\n let rt = tokio::runtime::Builder::new_multi_thread().enable_all().build().expect(\"Failed to initialize tokio runtime\");\n\n rt.block_on(async {\n\n let filewatch_join_handle = {\n\n let ui_send = ui_send.clone();\n\n let paths = paths.clone();\n\n tokio::spawn(async move { run_filewatch(paths.config_dir, ui_send).await })\n\n };\n\n\n\n let ipc_server_join_handle = {\n\n let ui_send = ui_send.clone();\n\n tokio::spawn(async move { ipc_server::run_server(ui_send, paths.get_ipc_socket_file()).await })\n\n };\n\n\n\n let forward_exit_to_app_handle = {\n\n let ui_send = ui_send.clone();\n\n tokio::spawn(async move {\n\n // Wait for application exit event\n\n let _ = crate::application_lifecycle::recv_exit().await;\n", "file_path": "crates/eww/src/server.rs", "rank": 64, "score": 119562.43336951268 }, { "content": "/// @widget progress\n\n/// @desc A progress bar\n\nfn build_gtk_progress(bargs: &mut BuilderArgs) -> Result<gtk::ProgressBar> {\n\n let gtk_widget = gtk::ProgressBar::new();\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop flipped - flip the direction\n\n prop(flipped: as_bool) { gtk_widget.set_inverted(flipped) },\n\n\n\n // @prop value - value of the progress bar (between 0-100)\n\n prop(value: as_f64) { gtk_widget.set_fraction(value / 100f64) },\n\n\n\n // @prop orientation - orientation of the progress bar. possible values: $orientation\n\n prop(orientation: as_string) { gtk_widget.set_orientation(parse_orientation(&orientation)?) },\n\n });\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 65, "score": 118882.02922412581 }, { "content": "/// @widget a checkbox\n\n/// @desc A checkbox that can trigger events on checked / unchecked.\n\nfn build_gtk_checkbox(bargs: &mut BuilderArgs) -> Result<gtk::CheckButton> {\n\n let gtk_widget = gtk::CheckButton::new();\n\n let on_change_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop timeout - timeout of the command\n\n // @prop onchecked - action (command) to be executed when checked by the user\n\n // @prop onunchecked - similar to onchecked but when the widget is unchecked\n\n prop(timeout: as_duration = Duration::from_millis(200), onchecked: as_string = \"\", onunchecked: as_string = \"\") {\n\n let old_id = on_change_handler_id.replace(Some(\n\n gtk_widget.connect_toggled(move |gtk_widget| {\n\n run_command(timeout, if gtk_widget.is_active() { &onchecked } else { &onunchecked }, \"\");\n\n })\n\n ));\n\n old_id.map(|id| gtk_widget.disconnect(id));\n\n }\n\n });\n\n\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 66, "score": 118882.02922412581 }, { "content": "pub fn parse_string(file_id: usize, s: &str) -> AstResult<Ast> {\n\n let lexer = lexer::Lexer::new(file_id, s.to_string());\n\n let parser = parser::AstParser::new();\n\n parser.parse(file_id, lexer).map_err(|e| AstError::from_parse_error(file_id, e))\n\n}\n\n\n", "file_path": "crates/yuck/src/parser/mod.rs", "rank": 67, "score": 118630.43499252842 }, { "content": "/// @widget color-button\n\n/// @desc A button opening a color chooser window\n\nfn build_gtk_color_button(bargs: &mut BuilderArgs) -> Result<gtk::ColorButton> {\n\n let gtk_widget = gtk::ColorButtonBuilder::new().build();\n\n let on_change_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop use-alpha - bool to whether or not use alpha\n\n prop(use_alpha: as_bool) {gtk_widget.set_use_alpha(use_alpha);},\n\n\n\n // @prop onchange - runs the code when the color was selected\n\n // @prop timeout - timeout of the command\n\n prop(timeout: as_duration = Duration::from_millis(200), onchange: as_string) {\n\n let old_id = on_change_handler_id.replace(Some(\n\n gtk_widget.connect_color_set(move |gtk_widget| {\n\n run_command(timeout, &onchange, gtk_widget.rgba());\n\n })\n\n ));\n\n old_id.map(|id| gtk_widget.disconnect(id));\n\n }\n\n });\n\n\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 68, "score": 117366.91949442484 }, { "content": "/// @widget color-chooser\n\n/// @desc A color chooser widget\n\nfn build_gtk_color_chooser(bargs: &mut BuilderArgs) -> Result<gtk::ColorChooserWidget> {\n\n let gtk_widget = gtk::ColorChooserWidget::new();\n\n let on_change_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop use-alpha - bool to wether or not use alpha\n\n prop(use_alpha: as_bool) {gtk_widget.set_use_alpha(use_alpha);},\n\n\n\n // @prop onchange - runs the code when the color was selected\n\n // @prop timeout - timeout of the command\n\n prop(timeout: as_duration = Duration::from_millis(200), onchange: as_string) {\n\n let old_id = on_change_handler_id.replace(Some(\n\n gtk_widget.connect_color_activated(move |_a, color| {\n\n run_command(timeout, &onchange, *color);\n\n })\n\n ));\n\n old_id.map(|id| gtk_widget.disconnect(id));\n\n }\n\n });\n\n\n\n Ok(gtk_widget)\n\n}\n\n\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 69, "score": 115910.36573537912 }, { "content": "/// @widget combo-box-text\n\n/// @desc A combo box allowing the user to choose between several items.\n\nfn build_gtk_combo_box_text(bargs: &mut BuilderArgs) -> Result<gtk::ComboBoxText> {\n\n let gtk_widget = gtk::ComboBoxText::new();\n\n let on_change_handler_id: EventHandlerId = Rc::new(RefCell::new(None));\n\n resolve_block!(bargs, gtk_widget, {\n\n // @prop items - Items that should be displayed in the combo box\n\n prop(items: as_vec) {\n\n gtk_widget.remove_all();\n\n for i in items {\n\n gtk_widget.append_text(&i);\n\n }\n\n },\n\n // @prop timeout - timeout of the command\n\n // @prop onchange - runs the code when a item was selected, replacing {} with the item as a string\n\n prop(timeout: as_duration = Duration::from_millis(200), onchange: as_string) {\n\n let old_id = on_change_handler_id.replace(Some(\n\n gtk_widget.connect_changed(move |gtk_widget| {\n\n run_command(timeout, &onchange, gtk_widget.active_text().unwrap_or_else(|| \"\".into()));\n\n })\n\n ));\n\n old_id.map(|id| gtk_widget.disconnect(id));\n\n },\n\n });\n\n Ok(gtk_widget)\n\n}\n", "file_path": "crates/eww/src/widgets/widget_definitions.rs", "rank": 70, "score": 114509.0376935173 }, { "content": "fn attempt_connect(socket_path: impl AsRef<Path>, attempts: usize) -> Option<net::UnixStream> {\n\n for _ in 0..attempts {\n\n if let Ok(mut con) = net::UnixStream::connect(&socket_path) {\n\n if client::do_server_call(&mut con, &opts::ActionWithServer::Ping).is_ok() {\n\n return net::UnixStream::connect(&socket_path).ok();\n\n }\n\n }\n\n std::thread::sleep(Duration::from_millis(200));\n\n }\n\n None\n\n}\n\n\n", "file_path": "crates/eww/src/main.rs", "rank": 71, "score": 114142.46013823857 }, { "content": "/// Parse multiple toplevel nodes into a list of [Ast]\n\npub fn parse_toplevel(file_id: usize, s: String) -> AstResult<(Span, Vec<Ast>)> {\n\n let lexer = lexer::Lexer::new(file_id, s);\n\n let parser = parser::ToplevelParser::new();\n\n parser.parse(file_id, lexer).map_err(|e| AstError::from_parse_error(file_id, e))\n\n}\n\n\n", "file_path": "crates/yuck/src/parser/mod.rs", "rank": 72, "score": 112083.22598821358 }, { "content": "fn parse_var_update_arg(s: &str) -> Result<(VarName, DynVal)> {\n\n let (name, value) = s\n\n .split_once('=')\n\n .with_context(|| format!(\"arguments must be in the shape `variable_name=\\\"new_value\\\"`, but got: {}\", s))?;\n\n Ok((name.into(), DynVal::from_string(value.to_owned())))\n\n}\n\n\n\nimpl ActionWithServer {\n\n pub fn can_start_daemon(&self) -> bool {\n\n matches!(self, ActionWithServer::OpenWindow { .. } | ActionWithServer::OpenMany { .. })\n\n }\n\n\n\n pub fn into_daemon_command(self) -> (app::DaemonCommand, Option<daemon_response::DaemonResponseReceiver>) {\n\n let command = match self {\n\n ActionWithServer::Update { mappings } => app::DaemonCommand::UpdateVars(mappings),\n\n\n\n ActionWithServer::KillServer => app::DaemonCommand::KillServer,\n\n ActionWithServer::CloseAll => app::DaemonCommand::CloseAll,\n\n ActionWithServer::Ping => {\n\n let (send, recv) = tokio::sync::mpsc::unbounded_channel();\n", "file_path": "crates/eww/src/opts.rs", "rank": 73, "score": 112083.09881764624 }, { "content": "fn label_from_simplexpr(value: SimplExpr, span: Span) -> WidgetUse {\n\n WidgetUse {\n\n name: \"label\".to_string(),\n\n name_span: span.point_span(),\n\n attrs: Attributes::new(\n\n span,\n\n maplit::hashmap! {\n\n AttrName(\"text\".to_string()) => AttrEntry::new(\n\n span,\n\n Ast::SimplExpr(span, value)\n\n )\n\n },\n\n ),\n\n children: Vec::new(),\n\n span,\n\n }\n\n}\n", "file_path": "crates/yuck/src/config/widget_use.rs", "rank": 74, "score": 110871.22326642784 }, { "content": "pub fn parse_string(byte_offset: usize, file_id: usize, s: &str) -> Result<SimplExpr> {\n\n let lexer = lexer::Lexer::new(file_id, byte_offset, s);\n\n let parser = crate::simplexpr_parser::ExprParser::new();\n\n parser.parse(file_id, lexer).map_err(|e| Error::from_parse_error(file_id, e))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n macro_rules! test_parser {\n\n ($($text:literal),* $(,)?) => {{\n\n let p = crate::simplexpr_parser::ExprParser::new();\n\n use crate::parser::lexer::Lexer;\n\n ::insta::with_settings!({sort_maps => true}, {\n\n $(\n\n ::insta::assert_debug_snapshot!(p.parse(0, Lexer::new(0, 0, $text)));\n\n )*\n\n });\n\n }}\n\n }\n\n\n", "file_path": "crates/simplexpr/src/parser/mod.rs", "rank": 75, "score": 110574.61232748312 }, { "content": "fn with_response_channel<O, F>(f: F) -> (O, Option<tokio::sync::mpsc::UnboundedReceiver<DaemonResponse>>)\n\nwhere\n\n F: FnOnce(DaemonResponseSender) -> O,\n\n{\n\n let (sender, recv) = daemon_response::create_pair();\n\n (f(sender), Some(recv))\n\n}\n", "file_path": "crates/eww/src/opts.rs", "rank": 76, "score": 109798.05096724555 }, { "content": "fn variable_deprecation_note(var_name: String) -> Option<String> {\n\n (var_name == \"EWW_CPU_USAGE\")\n\n .then(|| \"Note: EWW_CPU_USAGE has recently been removed, and has now been renamed to EWW_CPU\".to_string())\n\n}\n\n\n", "file_path": "crates/yuck/src/format_diagnostic.rs", "rank": 77, "score": 104887.96232616673 }, { "content": "/// Get the monitor geometry of a given monitor number, or the default if none is given\n\nfn get_monitor_geometry(n: Option<i32>) -> Result<gdk::Rectangle> {\n\n #[allow(deprecated)]\n\n let display = gdk::Display::default().expect(\"could not get default display\");\n\n let monitor = match n {\n\n Some(n) => display.monitor(n).with_context(|| format!(\"Failed to get monitor with index {}\", n))?,\n\n None => display.primary_monitor().context(\"Failed to get primary monitor from GTK\")?,\n\n };\n\n Ok(monitor.geometry())\n\n}\n\n\n", "file_path": "crates/eww/src/app.rs", "rank": 78, "score": 104749.39243755973 }, { "content": "/// Compute the difference of two lists, returning a tuple of\n\n/// (\n\n/// elements that where in a but not in b,\n\n/// elements that where in b but not in a\n\n/// ).\n\npub fn list_difference<'a, 'b, T: PartialEq>(a: &'a [T], b: &'b [T]) -> (Vec<&'a T>, Vec<&'b T>) {\n\n let mut missing = Vec::new();\n\n for elem in a {\n\n if !b.contains(elem) {\n\n missing.push(elem);\n\n }\n\n }\n\n\n\n let mut new = Vec::new();\n\n for elem in b {\n\n if !a.contains(elem) {\n\n new.push(elem);\n\n }\n\n }\n\n (missing, new)\n\n}\n\n\n", "file_path": "crates/eww/src/util.rs", "rank": 79, "score": 104565.3072048305 }, { "content": "fn on_screen_changed(window: &gtk::Window, _old_screen: Option<&gdk::Screen>) {\n\n let visual = window\n\n .screen()\n\n .and_then(|screen| screen.rgba_visual().filter(|_| screen.is_composited()).or_else(|| screen.system_visual()));\n\n window.set_visual(visual.as_ref());\n\n}\n\n\n", "file_path": "crates/eww/src/app.rs", "rank": 80, "score": 100186.31593582404 }, { "content": "use std::process::Command;\n\n\n\nuse anyhow::*;\n\nuse codespan_reporting::diagnostic::Severity;\n\nuse eww_shared_util::{Span, VarName};\n\nuse simplexpr::dynval::DynVal;\n\nuse yuck::{\n\n config::script_var_definition::{ScriptVarDefinition, VarSource},\n\n gen_diagnostic,\n\n};\n\n\n\nuse crate::error::DiagError;\n\n\n", "file_path": "crates/eww/src/config/script_var.rs", "rank": 81, "score": 99549.78423091338 }, { "content": "/// extends a hashmap, returning a list of keys that already where present in the hashmap.\n\npub fn extend_safe<K: std::cmp::Eq + std::hash::Hash + Clone, V, T: IntoIterator<Item = (K, V)>>(\n\n a: &mut std::collections::HashMap<K, V>,\n\n b: T,\n\n) -> Vec<K> {\n\n b.into_iter().filter_map(|(k, v)| a.insert(k.clone(), v).map(|_| k.clone())).collect()\n\n}\n\n\n", "file_path": "crates/eww/src/util.rs", "rank": 82, "score": 99103.48253597961 }, { "content": "pub fn get_parse_error_span<T, E: Spanned>(file_id: usize, err: &lalrpop_util::ParseError<usize, T, E>) -> Span {\n\n use lalrpop_util::ParseError::*;\n\n match err {\n\n InvalidToken { location } => Span(*location, *location, file_id),\n\n UnrecognizedEOF { location, expected } => Span(*location, *location, file_id),\n\n UnrecognizedToken { token, expected } => Span(token.0, token.2, file_id),\n\n ExtraToken { token } => Span(token.0, token.2, file_id),\n\n User { error } => error.span(),\n\n }\n\n}\n\n\n", "file_path": "crates/yuck/src/error.rs", "rank": 83, "score": 97428.64880600736 }, { "content": "impl ScriptVarDefinition {\n\n pub fn name_span(&self) -> Span {\n\n match self {\n\n ScriptVarDefinition::Poll(x) => x.name_span,\n\n ScriptVarDefinition::Listen(x) => x.name_span,\n\n }\n\n }\n\n\n\n pub fn name(&self) -> &VarName {\n\n match self {\n\n ScriptVarDefinition::Poll(x) => &x.name,\n\n ScriptVarDefinition::Listen(x) => &x.name,\n\n }\n\n }\n\n\n\n pub fn command_span(&self) -> Option<Span> {\n\n match self {\n\n ScriptVarDefinition::Poll(x) => match x.command {\n\n VarSource::Shell(span, ..) => Some(span),\n\n VarSource::Function(_) => None,\n", "file_path": "crates/yuck/src/config/script_var_definition.rs", "rank": 84, "score": 97047.08580768581 }, { "content": " pub command_span: Span,\n\n pub name_span: Span,\n\n}\n\nimpl FromAstElementContent for ListenScriptVar {\n\n const ELEMENT_NAME: &'static str = \"deflisten\";\n\n\n\n fn from_tail<I: Iterator<Item = Ast>>(span: Span, mut iter: AstIterator<I>) -> AstResult<Self> {\n\n let result: AstResult<_> = try {\n\n let (name_span, name) = iter.expect_symbol()?;\n\n let mut attrs = iter.expect_key_values()?;\n\n let initial_value = attrs.primitive_optional(\"initial\")?.unwrap_or_else(|| DynVal::from_string(String::new()));\n\n let (command_span, script) = iter.expect_literal()?;\n\n iter.expect_done()?;\n\n Self { name_span, name: VarName(name), command: script.to_string(), initial_value, command_span }\n\n };\n\n result.note(r#\"Expected format: `(deflisten name :initial \"0\" \"tail -f /tmp/example\")`\"#)\n\n }\n\n}\n", "file_path": "crates/yuck/src/config/script_var_definition.rs", "rank": 85, "score": 97045.3551343154 }, { "content": " pub initial_value: Option<DynVal>,\n\n pub interval: std::time::Duration,\n\n pub name_span: Span,\n\n}\n\n\n\nimpl FromAstElementContent for PollScriptVar {\n\n const ELEMENT_NAME: &'static str = \"defpoll\";\n\n\n\n fn from_tail<I: Iterator<Item = Ast>>(span: Span, mut iter: AstIterator<I>) -> AstResult<Self> {\n\n let result: AstResult<_> = try {\n\n let (name_span, name) = iter.expect_symbol()?;\n\n let mut attrs = iter.expect_key_values()?;\n\n let initial_value = Some(attrs.primitive_optional(\"initial\")?.unwrap_or_else(|| DynVal::from_string(String::new())));\n\n let interval = attrs.primitive_required::<DynVal, _>(\"interval\")?.as_duration()?;\n\n let (script_span, script) = iter.expect_literal()?;\n\n\n\n let run_while_expr =\n\n attrs.ast_optional::<SimplExpr>(\"run-while\")?.unwrap_or_else(|| SimplExpr::Literal(DynVal::from(true)));\n\n let run_while_var_refs = run_while_expr.collect_var_refs();\n\n\n", "file_path": "crates/yuck/src/config/script_var_definition.rs", "rank": 86, "score": 97045.2687508134 }, { "content": "use std::collections::HashMap;\n\n\n\nuse simplexpr::{dynval::DynVal, SimplExpr};\n\n\n\nuse crate::{\n\n error::{AstError, AstResult, AstResultExt},\n\n parser::{\n\n ast::Ast,\n\n ast_iterator::AstIterator,\n\n from_ast::{FromAst, FromAstElementContent},\n\n },\n\n};\n\nuse eww_shared_util::{AttrName, Span, Spanned, VarName};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]\n\npub enum ScriptVarDefinition {\n\n Poll(PollScriptVar),\n\n Listen(ListenScriptVar),\n\n}\n\n\n", "file_path": "crates/yuck/src/config/script_var_definition.rs", "rank": 87, "score": 97043.87198687906 }, { "content": " iter.expect_done()?;\n\n Self {\n\n name_span,\n\n name: VarName(name),\n\n run_while_expr,\n\n run_while_var_refs,\n\n command: VarSource::Shell(script_span, script.to_string()),\n\n initial_value,\n\n interval,\n\n }\n\n };\n\n result.note(r#\"Expected format: `(defpoll name :interval \"10s\" \"echo 'a shell script'\")`\"#)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]\n\npub struct ListenScriptVar {\n\n pub name: VarName,\n\n pub command: String,\n\n pub initial_value: DynVal,\n", "file_path": "crates/yuck/src/config/script_var_definition.rs", "rank": 88, "score": 97043.11308497276 }, { "content": " },\n\n ScriptVarDefinition::Listen(x) => Some(x.command_span),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]\n\npub enum VarSource {\n\n // TODO allow for other executors? (python, etc)\n\n Shell(Span, String),\n\n #[serde(skip)]\n\n Function(fn() -> Result<DynVal, Box<dyn std::error::Error + Sync + Send + 'static>>),\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, serde::Serialize)]\n\npub struct PollScriptVar {\n\n pub name: VarName,\n\n pub run_while_expr: SimplExpr,\n\n pub run_while_var_refs: Vec<VarName>,\n\n pub command: VarSource,\n", "file_path": "crates/yuck/src/config/script_var_definition.rs", "rank": 89, "score": 97038.81732789808 }, { "content": "#[derive(StructOpt, Debug, Serialize, Deserialize, PartialEq)]\n\nstruct RawOpt {\n\n /// Write out debug logs. (To read the logs, run `eww logs`).\n\n #[structopt(long = \"debug\", global = true)]\n\n log_debug: bool,\n\n\n\n /// override path to configuration directory (directory that contains eww.yuck and eww.scss)\n\n #[structopt(short, long, global = true)]\n\n config: Option<std::path::PathBuf>,\n\n\n\n /// Watch the log output after executing the command\n\n #[structopt(long = \"logs\", global = true)]\n\n show_logs: bool,\n\n\n\n /// Restart the daemon completely before running the command\n\n #[structopt(long = \"restart\", global = true)]\n\n restart: bool,\n\n\n\n #[structopt(subcommand)]\n\n action: Action,\n\n}\n", "file_path": "crates/eww/src/opts.rs", "rank": 90, "score": 95746.34031813747 }, { "content": "fn handle_server_command(paths: &EwwPaths, action: &ActionWithServer, connect_attempts: usize) -> Result<()> {\n\n log::debug!(\"Trying to find server process at socket {}\", paths.get_ipc_socket_file().display());\n\n let mut stream = attempt_connect(&paths.get_ipc_socket_file(), connect_attempts).context(\"Failed to connect to daemon\")?;\n\n log::debug!(\"Connected to Eww server ({}).\", &paths.get_ipc_socket_file().display());\n\n let response = client::do_server_call(&mut stream, action).context(\"Error while forwarding command to server\")?;\n\n if let Some(response) = response {\n\n println!(\"{}\", response);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/eww/src/main.rs", "rank": 91, "score": 95411.03990981233 }, { "content": "/// Joins two paths while keeping it somewhat pretty.\n\n/// If the second path is absolute, this will just return the second path.\n\n/// If it is relative, it will return the second path joined onto the first path, removing any `./` if present.\n\n/// TODO this is not yet perfect, as it will still leave ../ and multiple ./ etc,... check for a Path::simplify or something.\n\npub fn join_path_pretty<P: AsRef<std::path::Path>, P2: AsRef<std::path::Path>>(a: P, b: P2) -> std::path::PathBuf {\n\n let a = a.as_ref();\n\n let b = b.as_ref();\n\n if b.is_absolute() {\n\n b.to_path_buf()\n\n } else {\n\n a.parent().unwrap().join(b.strip_prefix(\"./\").unwrap_or(b))\n\n }\n\n}\n\n\n", "file_path": "crates/eww/src/util.rs", "rank": 92, "score": 92731.83482137744 }, { "content": "fn main() {\n\n lalrpop::Configuration::new().log_verbose().process_current_dir().unwrap();\n\n}\n", "file_path": "crates/simplexpr/build.rs", "rank": 93, "score": 92200.88585098267 }, { "content": "fn main() {\n\n lalrpop::process_root().unwrap();\n\n}\n", "file_path": "crates/yuck/build.rs", "rank": 94, "score": 92200.88585098267 }, { "content": "pub trait Rectangular {\n\n fn get_rect(&self) -> Rect;\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Eq, PartialEq, Display)]\n\n#[display(fmt = \".x*.y:.width*.height\")]\n\npub struct Rect {\n\n pub x: i32,\n\n pub y: i32,\n\n pub width: i32,\n\n pub height: i32,\n\n}\n\n\n\nimpl Rect {\n\n pub fn of(x: i32, y: i32, width: i32, height: i32) -> Self {\n\n Rect { x, y, width, height }\n\n }\n\n}\n\n\n\nimpl Rectangular for Rect {\n", "file_path": "crates/eww/src/geometry.rs", "rank": 95, "score": 90560.79557560122 }, { "content": "fn main() {\n\n let eww_binary_name = std::env::args().next().unwrap();\n\n let opts: opts::Opt = opts::Opt::from_env();\n\n\n\n let log_level_filter = if opts.log_debug { log::LevelFilter::Debug } else { log::LevelFilter::Info };\n\n if std::env::var(\"RUST_LOG\").is_ok() {\n\n pretty_env_logger::init_timed();\n\n } else {\n\n pretty_env_logger::formatted_timed_builder().filter(Some(\"eww\"), log_level_filter).init();\n\n }\n\n\n\n let result: Result<()> = try {\n\n let paths = opts\n\n .config_path\n\n .map(EwwPaths::from_config_dir)\n\n .unwrap_or_else(EwwPaths::default)\n\n .context(\"Failed to initialize eww paths\")?;\n\n\n\n let would_show_logs = match opts.action {\n\n opts::Action::ClientOnly(action) => {\n", "file_path": "crates/eww/src/main.rs", "rank": 96, "score": 90484.35990137282 }, { "content": "pub trait IterAverage {\n\n fn avg(self) -> f32;\n\n}\n\n\n\nimpl<I: Iterator<Item = f32>> IterAverage for I {\n\n fn avg(self) -> f32 {\n\n let mut total = 0f32;\n\n let mut cnt = 0f32;\n\n for value in self {\n\n total += value;\n\n cnt += 1f32;\n\n }\n\n total / cnt\n\n }\n\n}\n\n\n", "file_path": "crates/eww/src/util.rs", "rank": 97, "score": 89000.13891776351 }, { "content": "#[test]\n\nfn test() {\n\n test_parser!(\n\n \"1\",\n\n \"(12)\",\n\n \"1.2\",\n\n \"-1.2\",\n\n \"(1 2)\",\n\n \"(1 :foo 1)\",\n\n \"(:foo 1)\",\n\n \"(:foo->: 1)\",\n\n \"(foo 1)\",\n\n \"(lol😄 1)\",\n\n r#\"(test \"hi\")\"#,\n\n r#\"(test \"h\\\"i\")\"#,\n\n r#\"(test \" hi \")\"#,\n\n \"(+ (1 2 (* 2 5)))\",\n\n r#\"foo ; test\"#,\n\n r#\"(f arg ; test\n\n arg2)\"#,\n\n \"\\\"h\\\\\\\"i\\\"\"\n\n );\n\n}\n", "file_path": "crates/yuck/src/parser/mod.rs", "rank": 98, "score": 88856.4308707322 }, { "content": " #[debug_stub = \"ScriptVarHandler(...)\"]\n\n pub script_var_handler: ScriptVarHandlerHandle,\n\n\n\n pub paths: EwwPaths,\n\n}\n\n\n\nimpl App {\n\n /// Handle a DaemonCommand event.\n\n pub fn handle_command(&mut self, event: DaemonCommand) {\n\n log::debug!(\"Handling event: {:?}\", &event);\n\n let result: Result<_> = try {\n\n match event {\n\n DaemonCommand::NoOp => {}\n\n DaemonCommand::UpdateVars(mappings) => {\n\n for (var_name, new_value) in mappings {\n\n self.update_state(var_name, new_value);\n\n }\n\n }\n\n DaemonCommand::ReloadConfigAndCss(sender) => {\n\n let mut errors = Vec::new();\n", "file_path": "crates/eww/src/app.rs", "rank": 99, "score": 31.881700978447196 } ]
Rust
src/engine/mod.rs
mersinvald/xsecurelock-saver-rs
e4c064918271a165657e52c4a22d20eb383e9e6a
use std::sync::Arc; use rayon::ThreadPoolBuilder; use sfml::graphics::{ Color, RenderTarget, RenderWindow, View as SfView, }; use sfml::system::{Clock, SfBox, Time, Vector2f}; use specs::{Component, System}; use shred::Resource; use physics::{ self, resources::{PhysicsDeltaTime, PhysicsElapsed}, systems::{ClearForceAccumulators, SetupNextPhysicsPosition}, }; use scene_management::{ self, resources::{SceneChange, SceneLoader}, SceneChangeHandler, SceneChangeHandlerBuilder, systems::DeleteSystem, }; use self::{ resources::{ draw::{ CurrentDrawLayer, DrawLayers, View, }, time::{ DeltaTime, Elapsed, }, }, systems::{ draw::{ DrawLayersUpdater, SyncDrawShapesSystem, DrawDrawShapesSystem, SfShape, }, specialized::{ SpecializedSystem, SpecializedSystemObject, }, }, }; pub mod components; pub mod resources; pub mod systems; pub struct EngineBuilder<'a, 'b> { world: ::specs::World, update_dispatcher: ::specs::DispatcherBuilder<'a, 'b>, scene_change_handler: SceneChangeHandlerBuilder<'a, 'b>, physics_update_dispatcher: ::specs::DispatcherBuilder<'a, 'b>, max_physics_updates: usize } impl<'a, 'b> EngineBuilder<'a, 'b> { pub fn new() -> Self { let thread_pool = Arc::new(ThreadPoolBuilder::new().build().unwrap()); Self { world: { let mut world = ::specs::World::new(); physics::register(&mut world); scene_management::register(&mut world); components::register_all(&mut world); resources::add_default_resources(&mut world); world }, update_dispatcher: ::specs::DispatcherBuilder::new() .with_pool(Arc::clone(&thread_pool)), scene_change_handler: SceneChangeHandlerBuilder::new() .with_threadpool(Arc::clone(&thread_pool)), physics_update_dispatcher: ::specs::DispatcherBuilder::new() .with_pool(thread_pool) .with(SetupNextPhysicsPosition, "", &[]) .with(ClearForceAccumulators, "", &[]) .with_barrier(), max_physics_updates: 5, } } pub fn with_update_sys<S>(mut self, sys: S, name: &str, dep: &[&str]) -> Self where S: for<'c> System<'c> + Send + 'a { self.add_update_sys(sys, name, dep); self } pub fn add_update_sys<S>(&mut self, sys: S, name: &str, dep: &[&str]) where S: for<'c> System<'c> + Send + 'a { self.update_dispatcher.add(sys, name, dep); } pub fn with_update_barrier(mut self) -> Self { self.add_update_barrier(); self } pub fn add_update_barrier(&mut self) { self.update_dispatcher.add_barrier(); } pub fn with_scene_change_sys<S>(mut self, sys: S, name: &str, dep: &[&str]) -> Self where S: for<'c> System<'c> + Send + 'a { self.add_scene_change_sys(sys, name, dep); self } pub fn add_scene_change_sys<S>(&mut self, sys: S, name: &str, dep: &[&str]) where S: for<'c> System<'c> + Send + 'a { self.scene_change_handler.add_pre_load_sys(sys, name, dep); } pub fn with_scene_change_barrier(mut self) -> Self { self.add_scene_change_barrier(); self } pub fn add_scene_change_barrier(&mut self) { self.scene_change_handler.add_pre_load_barrier(); } pub fn with_physics_update_sys<S>(mut self, sys: S, name: &str, dep: &[&str]) -> Self where S: for<'c> System<'c> + Send + 'a { self.add_physics_update_sys(sys, name, dep); self } pub fn add_physics_update_sys<S>(&mut self, sys: S, name: &str, dep: &[&str]) where S: for<'c> System<'c> + Send + 'a { self.physics_update_dispatcher.add(sys, name, dep); } pub fn with_physics_update_barrier(mut self) -> Self { self.add_physics_update_barrier(); self } pub fn add_physics_update_barrier(&mut self) { self.physics_update_dispatcher.add_barrier(); } pub fn with_component<C: Component>(mut self) -> Self where <C as Component>::Storage: Default { self.add_component::<C>(); self } pub fn add_component<C: Component>(&mut self) where <C as Component>::Storage: Default { self.world.register::<C>(); } pub fn with_resource<T: Resource>(mut self, res: T) -> Self { self.add_resource(res); self } pub fn add_resource<T: Resource>(&mut self, res: T) { self.world.add_resource(res); } pub fn with_initial_sceneloader<T>(mut self, loader: T) -> Self where T: for<'l> SceneLoader<'l> + Send + Sync + 'static { self.set_initial_sceneloader(loader); self } pub fn set_initial_sceneloader<T>(&mut self, loader: T) where T: for<'l> SceneLoader<'l> + Send + Sync + 'static { self.world.write_resource::<SceneChange>().change_scene(loader); } pub fn build<'tex>(self) -> Engine<'a, 'b, 'tex> { let mut engine = Engine { world: self.world, update_dispatcher: self.update_dispatcher .with_barrier() .with(DrawLayersUpdater::default(), "", &[]) .with(DeleteSystem, "", &[]) .build(), scene_change_handler: self.scene_change_handler.build(), physics_update_dispatcher: self.physics_update_dispatcher .with_barrier() .with(DeleteSystem, "", &[]) .build(), window: super::open_window(), view: SfView::new(Vector2f::new(0., 0.), Vector2f::new(1., 1.)), clock: Clock::start(), max_physics_updates: self.max_physics_updates, draw_shapes: Vec::new(), sync_draw_shapes: Default::default(), draw_draw_shapes: Default::default(), }; { let mut view = engine.world.write_resource::<View>(); let win_sz = engine.window.size(); let ratio = win_sz.x as f32 / win_sz.y as f32; view.size.y = 2000.; view.size.x = ratio * view.size.y; view.copy_to(&mut engine.view); } engine.update_dispatcher.setup(&mut engine.world.res); engine.scene_change_handler.setup(&mut engine.world); engine.physics_update_dispatcher.setup(&mut engine.world.res); engine.sync_draw_shapes.setup_special(&mut engine.draw_shapes, &mut engine.world.res); engine.draw_draw_shapes.setup_special( (&mut engine.window, &mut engine.draw_shapes), &mut engine.world.res); engine } } pub struct Engine<'a, 'b, 'tex> { world: ::specs::World, update_dispatcher: ::specs::Dispatcher<'a, 'b>, scene_change_handler: SceneChangeHandler<'a, 'b>, physics_update_dispatcher: ::specs::Dispatcher<'a, 'b>, window: RenderWindow, view: SfBox<SfView>, clock: Clock, max_physics_updates: usize, draw_shapes: Vec<Option<SfShape<'tex>>>, sync_draw_shapes: SyncDrawShapesSystem, draw_draw_shapes: DrawDrawShapesSystem, } impl<'a, 'b, 'tex> Engine<'a, 'b, 'tex> { pub fn create_entity(&mut self) -> ::specs::world::EntityBuilder { self.world.create_entity() } pub fn run(mut self) { sigint::init(); self.clock.restart(); { let start = self.clock.elapsed_time(); let mut physt = self.world.write_resource::<PhysicsElapsed>(); physt.current = start; physt.previous = start - self.world.read_resource::<PhysicsDeltaTime>().0; let mut dt = self.world.write_resource::<DeltaTime>(); dt.0 = Time::milliseconds(5); let mut t = self.world.write_resource::<Elapsed>(); t.current = start; t.previous = start - dt.0; } while !sigint::received_sigint() { let now = self.clock.elapsed_time(); self.maybe_physics_update(now); self.update(now); self.draw(); } } fn maybe_physics_update(&mut self, now: Time) { for _ in 0..self.max_physics_updates { { let mut physt = self.world.write_resource::<PhysicsElapsed>(); if physt.current >= now { return; } let physdt = self.world.read_resource::<PhysicsDeltaTime>(); physt.previous = physt.current; physt.current += physdt.0; } self.physics_update_dispatcher.dispatch(&self.world.res); self.world.maintain(); self.scene_change_handler.handle_scene_change(&mut self.world); } let mut phys = self.world.write_resource::<PhysicsElapsed>(); let dphys = self.world.read_resource::<PhysicsDeltaTime>(); while phys.current < now { phys.previous = phys.current; phys.current += dphys.0; } } fn update(&mut self, now: Time) { { let mut elapsed = self.world.write_resource::<Elapsed>(); elapsed.previous = elapsed.current; elapsed.current = now; let mut delta = self.world.write_resource::<DeltaTime>(); delta.0 = elapsed.current - elapsed.previous; } self.update_dispatcher.dispatch(&self.world.res); self.world.maintain(); self.scene_change_handler.handle_scene_change(&mut self.world); } fn draw(&mut self) { self.sync_draw_shapes.run(&mut self.draw_shapes, &self.world.res); self.world.write_resource::<View>().copy_to(&mut self.view); self.window.clear(Color::BLACK); self.window.set_view(&self.view); for layer in 0..=DrawLayers::NUM_LAYERS { self.world.write_resource::<CurrentDrawLayer>().set_layer(layer); self.draw_draw_shapes.run((&mut self.window, &mut self.draw_shapes), &self.world.res); } self.window.display(); } }
use std::sync::Arc; use rayon::ThreadPoolBuilder; use sfml::graphics::{ Color, RenderTarget, RenderWindow, View as SfView, }; use sfml::system::{Clock, SfBox, Time, Vector2f}; use specs::{Component, System}; use shred::Resource; use physics::{ self, resources::{PhysicsDeltaTime, PhysicsElapsed}, systems::{ClearForceAccumulators, SetupNextPhysicsPosition}, }; use scene_management::{ self, resources::{SceneChange, SceneLoader}, SceneChangeHandler, SceneChangeHandlerBuilder, systems::DeleteSystem, }; use self::{ resources::{ draw::{ CurrentDrawLayer, DrawLayers, View, }, time::{ DeltaTime, Elapsed, }, }, systems::{ draw::{ DrawLayersUpdater, SyncDrawShapesSystem, DrawDrawShapesSystem, SfShape, }, specialized::{ SpecializedSystem, SpecializedSystemObject, }, }, }; pub mod components; pub mod resources; pub mod systems; pub struct EngineBuilder<'a, 'b> { world: ::specs::World, update_dispatcher: ::specs::DispatcherBuilder<'a, 'b>, scene_change_handler: SceneChangeHandlerBuilder<'a, 'b>, physics_update_dispatcher: ::specs::DispatcherBuilder<'a, 'b>, max_physics_updates: usize } impl<'a, 'b> EngineBuilder<'a, 'b> {
pub fn with_update_sys<S>(mut self, sys: S, name: &str, dep: &[&str]) -> Self where S: for<'c> System<'c> + Send + 'a { self.add_update_sys(sys, name, dep); self } pub fn add_update_sys<S>(&mut self, sys: S, name: &str, dep: &[&str]) where S: for<'c> System<'c> + Send + 'a { self.update_dispatcher.add(sys, name, dep); } pub fn with_update_barrier(mut self) -> Self { self.add_update_barrier(); self } pub fn add_update_barrier(&mut self) { self.update_dispatcher.add_barrier(); } pub fn with_scene_change_sys<S>(mut self, sys: S, name: &str, dep: &[&str]) -> Self where S: for<'c> System<'c> + Send + 'a { self.add_scene_change_sys(sys, name, dep); self } pub fn add_scene_change_sys<S>(&mut self, sys: S, name: &str, dep: &[&str]) where S: for<'c> System<'c> + Send + 'a { self.scene_change_handler.add_pre_load_sys(sys, name, dep); } pub fn with_scene_change_barrier(mut self) -> Self { self.add_scene_change_barrier(); self } pub fn add_scene_change_barrier(&mut self) { self.scene_change_handler.add_pre_load_barrier(); } pub fn with_physics_update_sys<S>(mut self, sys: S, name: &str, dep: &[&str]) -> Self where S: for<'c> System<'c> + Send + 'a { self.add_physics_update_sys(sys, name, dep); self } pub fn add_physics_update_sys<S>(&mut self, sys: S, name: &str, dep: &[&str]) where S: for<'c> System<'c> + Send + 'a { self.physics_update_dispatcher.add(sys, name, dep); } pub fn with_physics_update_barrier(mut self) -> Self { self.add_physics_update_barrier(); self } pub fn add_physics_update_barrier(&mut self) { self.physics_update_dispatcher.add_barrier(); } pub fn with_component<C: Component>(mut self) -> Self where <C as Component>::Storage: Default { self.add_component::<C>(); self } pub fn add_component<C: Component>(&mut self) where <C as Component>::Storage: Default { self.world.register::<C>(); } pub fn with_resource<T: Resource>(mut self, res: T) -> Self { self.add_resource(res); self } pub fn add_resource<T: Resource>(&mut self, res: T) { self.world.add_resource(res); } pub fn with_initial_sceneloader<T>(mut self, loader: T) -> Self where T: for<'l> SceneLoader<'l> + Send + Sync + 'static { self.set_initial_sceneloader(loader); self } pub fn set_initial_sceneloader<T>(&mut self, loader: T) where T: for<'l> SceneLoader<'l> + Send + Sync + 'static { self.world.write_resource::<SceneChange>().change_scene(loader); } pub fn build<'tex>(self) -> Engine<'a, 'b, 'tex> { let mut engine = Engine { world: self.world, update_dispatcher: self.update_dispatcher .with_barrier() .with(DrawLayersUpdater::default(), "", &[]) .with(DeleteSystem, "", &[]) .build(), scene_change_handler: self.scene_change_handler.build(), physics_update_dispatcher: self.physics_update_dispatcher .with_barrier() .with(DeleteSystem, "", &[]) .build(), window: super::open_window(), view: SfView::new(Vector2f::new(0., 0.), Vector2f::new(1., 1.)), clock: Clock::start(), max_physics_updates: self.max_physics_updates, draw_shapes: Vec::new(), sync_draw_shapes: Default::default(), draw_draw_shapes: Default::default(), }; { let mut view = engine.world.write_resource::<View>(); let win_sz = engine.window.size(); let ratio = win_sz.x as f32 / win_sz.y as f32; view.size.y = 2000.; view.size.x = ratio * view.size.y; view.copy_to(&mut engine.view); } engine.update_dispatcher.setup(&mut engine.world.res); engine.scene_change_handler.setup(&mut engine.world); engine.physics_update_dispatcher.setup(&mut engine.world.res); engine.sync_draw_shapes.setup_special(&mut engine.draw_shapes, &mut engine.world.res); engine.draw_draw_shapes.setup_special( (&mut engine.window, &mut engine.draw_shapes), &mut engine.world.res); engine } } pub struct Engine<'a, 'b, 'tex> { world: ::specs::World, update_dispatcher: ::specs::Dispatcher<'a, 'b>, scene_change_handler: SceneChangeHandler<'a, 'b>, physics_update_dispatcher: ::specs::Dispatcher<'a, 'b>, window: RenderWindow, view: SfBox<SfView>, clock: Clock, max_physics_updates: usize, draw_shapes: Vec<Option<SfShape<'tex>>>, sync_draw_shapes: SyncDrawShapesSystem, draw_draw_shapes: DrawDrawShapesSystem, } impl<'a, 'b, 'tex> Engine<'a, 'b, 'tex> { pub fn create_entity(&mut self) -> ::specs::world::EntityBuilder { self.world.create_entity() } pub fn run(mut self) { sigint::init(); self.clock.restart(); { let start = self.clock.elapsed_time(); let mut physt = self.world.write_resource::<PhysicsElapsed>(); physt.current = start; physt.previous = start - self.world.read_resource::<PhysicsDeltaTime>().0; let mut dt = self.world.write_resource::<DeltaTime>(); dt.0 = Time::milliseconds(5); let mut t = self.world.write_resource::<Elapsed>(); t.current = start; t.previous = start - dt.0; } while !sigint::received_sigint() { let now = self.clock.elapsed_time(); self.maybe_physics_update(now); self.update(now); self.draw(); } } fn maybe_physics_update(&mut self, now: Time) { for _ in 0..self.max_physics_updates { { let mut physt = self.world.write_resource::<PhysicsElapsed>(); if physt.current >= now { return; } let physdt = self.world.read_resource::<PhysicsDeltaTime>(); physt.previous = physt.current; physt.current += physdt.0; } self.physics_update_dispatcher.dispatch(&self.world.res); self.world.maintain(); self.scene_change_handler.handle_scene_change(&mut self.world); } let mut phys = self.world.write_resource::<PhysicsElapsed>(); let dphys = self.world.read_resource::<PhysicsDeltaTime>(); while phys.current < now { phys.previous = phys.current; phys.current += dphys.0; } } fn update(&mut self, now: Time) { { let mut elapsed = self.world.write_resource::<Elapsed>(); elapsed.previous = elapsed.current; elapsed.current = now; let mut delta = self.world.write_resource::<DeltaTime>(); delta.0 = elapsed.current - elapsed.previous; } self.update_dispatcher.dispatch(&self.world.res); self.world.maintain(); self.scene_change_handler.handle_scene_change(&mut self.world); } fn draw(&mut self) { self.sync_draw_shapes.run(&mut self.draw_shapes, &self.world.res); self.world.write_resource::<View>().copy_to(&mut self.view); self.window.clear(Color::BLACK); self.window.set_view(&self.view); for layer in 0..=DrawLayers::NUM_LAYERS { self.world.write_resource::<CurrentDrawLayer>().set_layer(layer); self.draw_draw_shapes.run((&mut self.window, &mut self.draw_shapes), &self.world.res); } self.window.display(); } }
pub fn new() -> Self { let thread_pool = Arc::new(ThreadPoolBuilder::new().build().unwrap()); Self { world: { let mut world = ::specs::World::new(); physics::register(&mut world); scene_management::register(&mut world); components::register_all(&mut world); resources::add_default_resources(&mut world); world }, update_dispatcher: ::specs::DispatcherBuilder::new() .with_pool(Arc::clone(&thread_pool)), scene_change_handler: SceneChangeHandlerBuilder::new() .with_threadpool(Arc::clone(&thread_pool)), physics_update_dispatcher: ::specs::DispatcherBuilder::new() .with_pool(thread_pool) .with(SetupNextPhysicsPosition, "", &[]) .with(ClearForceAccumulators, "", &[]) .with_barrier(), max_physics_updates: 5, } }
function_block-full_function
[ { "content": "pub trait SpecializedSystem<'a, T> {\n\n type SystemData: SystemData<'a>;\n\n\n\n fn run_special(&mut self, specialized: T, data: Self::SystemData);\n\n\n\n fn setup_special(&mut self, _specialized: T, res: &mut Resources) {\n\n Self::SystemData::setup(res);\n\n }\n\n}\n\n\n\npub(crate) trait SpecializedSystemObject<'a, T> {\n\n fn run(&mut self, special_data: T, res: &'a Resources);\n\n\n\n fn setup(&mut self, special_data: T, res: &mut Resources);\n\n}\n\n\n\nimpl<'a, T, S> SpecializedSystemObject<'a, T> for S\n\n where S: SpecializedSystem<'a, T>,\n\n{\n\n fn run(&mut self, special_data: T, res: &'a Resources) {\n", "file_path": "src/engine/systems/specialized.rs", "rank": 0, "score": 142337.94887796583 }, { "content": "/// Wraps a SpecializedSystem as a System, allowing us to pass through extra\n\n/// data. Should only be used once, and only for run, not setup. Becomes \n\n/// invalidated after use. Should only be used by the generic implementation of\n\n/// SpecializedSystemObject.\n\nstruct SpecializedSystemWrapper<'b, T, S> \n\n where T: 'b,\n\n S: 'b\n\n{\n\n system: &'b mut S,\n\n special_data: Option<T>,\n\n}\n\n\n\nimpl<'a, 'b, T, S> System<'a> for SpecializedSystemWrapper<'b, T, S>\n\n where S: SpecializedSystem<'a, T>,\n\n{\n\n type SystemData = S::SystemData;\n\n\n\n fn run(&mut self, data: Self::SystemData) {\n\n self.system.run_special(\n\n self.special_data.take().expect(\"Cannot re-use the wrapper\"),\n\n data,\n\n );\n\n }\n\n\n\n fn setup(&mut self, _res: &mut Resources) {\n\n panic!(\"Don't use the wrapper to set up SpecializedSystems.\");\n\n }\n\n}\n", "file_path": "src/engine/systems/specialized.rs", "rank": 1, "score": 141242.08417604736 }, { "content": "/// Register all components and default resources.\n\npub fn register(world: &mut ::specs::World) {\n\n components::register_all(world);\n\n resources::add_default_resources(world);\n\n}\n", "file_path": "physics/src/lib.rs", "rank": 2, "score": 132201.10759405102 }, { "content": "pub fn register(world: &mut World) {\n\n components::register_all(world);\n\n resources::add_default_resources(world);\n\n}\n\n\n\n/// Builder for scene change handler.\n\npub struct SceneChangeHandlerBuilder<'a, 'b> {\n\n scene_change_dispatcher: DispatcherBuilder<'a, 'b>,\n\n}\n\n\n\nimpl<'a, 'b> SceneChangeHandlerBuilder<'a, 'b> {\n\n pub fn new() -> Self {\n\n SceneChangeHandlerBuilder {\n\n scene_change_dispatcher: DispatcherBuilder::new(),\n\n }\n\n }\n\n\n\n /// Set the threadpool to use when dispatching.\n\n pub fn with_threadpool(mut self, pool: Arc<ThreadPool>) -> Self {\n\n self.set_threadpool(pool);\n", "file_path": "scene_management/src/lib.rs", "rank": 3, "score": 104661.70205383058 }, { "content": "pub trait SceneLoader<'a> {\n\n type SystemData: SystemData<'a>;\n\n\n\n fn load(&mut self, data: Self::SystemData);\n\n}\n\n\n\nimpl<L> SceneLoaderDispatcher for L where L: for<'a> SceneLoader<'a> {\n\n fn dispatch(&mut self, world: &mut World) {\n\n world.exec(|data: L::SystemData| self.load(data));\n\n }\n\n}\n\n\n\n/// Resource used to set a scene change.\n\n#[derive(Default)]\n\npub struct SceneChange(Option<Box<dyn SceneLoaderDispatcher + Send + Sync>>);\n\n\n\nimpl SceneChange {\n\n /// Will cause the scene to change after the current frame. If another scene change is already\n\n /// configured, this will override it.\n\n pub fn change_scene<T>(&mut self, scene_loader: T) \n", "file_path": "scene_management/src/resources.rs", "rank": 4, "score": 79402.8224619119 }, { "content": "/// Storage for models.\n\n// TODO(zstewart): fix sqlite storage with some thread local magic so that non-mutating methods can\n\n// use &self instead of &mut self.\n\npub trait Storage {\n\n /// Add a new root scenario. This scenario is the new root of a family of scenarios.\n\n fn add_root_scenario(&mut self, world: World, score: f64) -> Result<Scenario, Box<dyn Error>>;\n\n\n\n /// Add a new scenario that is the child of the specified scenario\n\n fn add_child_scenario(\n\n &mut self,\n\n world: World,\n\n score: f64,\n\n parent: &Scenario,\n\n ) -> Result<Scenario, Box<dyn Error>>;\n\n\n\n /// Returns the number of scenarios available.\n\n fn num_scenarios(&mut self) -> Result<u64, Box<dyn Error>>;\n\n\n\n /// Gets the nth scenario, in order of score (descending, so lower indexes are higher scoring\n\n /// scenarios). May return None if the index is outside the number of scenarios.\n\n fn get_nth_scenario_by_score(&mut self, index: u64) -> Result<Option<Scenario>, Box<dyn Error>>;\n\n\n\n /// Removes the bottom scoring scenarios, keeping up to number_to_keep top scoring scenarios.\n\n /// Returns the number of scenarios pruned.\n\n fn keep_top_scenarios_by_score(&mut self, number_to_keep: u64) -> Result<u64, Box<dyn Error>>;\n\n}\n", "file_path": "saver_genetic_orbits/src/storage/mod.rs", "rank": 5, "score": 77328.69204357456 }, { "content": " impl Sub for Time {\n\n type Output = Self;\n\n \n\n fn sub(mut self, rhs: Self) -> Self { self -= rhs; self }\n\n }\n\n \n\n impl SubAssign for Time {\n\n fn sub_assign(&mut self, Time(rhs): Self) { self.0 -= rhs; }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"graphical\")] \n\nmod time_impl {\n\n extern crate sfml;\n\n\n\n pub type Time = sfml::system::Time;\n\n}\n", "file_path": "physics/src/time.rs", "rank": 6, "score": 72323.0984655893 }, { "content": " /// A partial copy fo the SFML time api that doesn't depend on SFML.\n\n #[derive(Copy, Clone, Default, Eq, PartialEq, Ord, PartialOrd)]\n\n pub struct Time(/* microseconds */ i64);\n\n\n\n \n\n impl Time {\n\n const MICROSECONDS_PER_SECOND: f32 = 1000000.;\n\n const MICROSECONDS_PER_MILLISECOND: i64 = 1000;\n\n\n\n pub const ZERO: Time = Time(0);\n\n\n\n pub fn seconds(seconds: f32) -> Self {\n\n Time((seconds * Self::MICROSECONDS_PER_SECOND) as i64)\n\n }\n\n \n\n pub fn milliseconds(ms: i32) -> Self {\n\n Time(ms as i64 * Self::MICROSECONDS_PER_MILLISECOND)\n\n }\n\n \n\n pub fn as_seconds(self) -> f32 {\n", "file_path": "physics/src/time.rs", "rank": 7, "score": 72318.02095481478 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\npub use self::time_impl::Time;\n\n\n\n#[cfg(not(feature = \"graphical\"))] \n\nmod time_impl {\n\n use std::ops::{Add, AddAssign, Sub, SubAssign, Neg};\n\n \n", "file_path": "physics/src/time.rs", "rank": 8, "score": 72316.84382445154 }, { "content": " self.0 as f32 / Self::MICROSECONDS_PER_SECOND\n\n }\n\n }\n\n \n\n impl Add for Time {\n\n type Output = Self;\n\n \n\n fn add(mut self, rhs: Self) -> Self { self += rhs; self }\n\n }\n\n \n\n impl AddAssign for Time {\n\n fn add_assign(&mut self, Time(rhs): Self) { self.0 += rhs }\n\n }\n\n \n\n impl Neg for Time {\n\n type Output = Self;\n\n \n\n fn neg(self) -> Self { Time(-self.0) }\n\n }\n\n \n", "file_path": "physics/src/time.rs", "rank": 9, "score": 72314.71691604701 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::time::Time;\n\n\n\npub(crate) fn add_default_resources(world: &mut ::specs::World) {\n\n world.add_resource(PhysicsDeltaTime::default());\n\n world.add_resource(PhysicsElapsed::default());\n\n}\n", "file_path": "physics/src/resources.rs", "rank": 10, "score": 72208.0609887217 }, { "content": "\n\n/// The amount of time elapsed in the physics simulation.\n\npub struct PhysicsDeltaTime(pub Time);\n\n\n\nimpl Default for PhysicsDeltaTime {\n\n fn default() -> Self { PhysicsDeltaTime(Time::milliseconds(16)) }\n\n}\n\n\n\n/// The total elapsed time in the physics simulation.\n\npub struct PhysicsElapsed {\n\n pub previous: Time,\n\n pub current: Time,\n\n}\n\n\n\nimpl Default for PhysicsElapsed {\n\n fn default() -> Self { \n\n PhysicsElapsed {\n\n current: Time::ZERO,\n\n previous: Time::ZERO,\n\n }\n\n }\n\n}\n", "file_path": "physics/src/resources.rs", "rank": 11, "score": 72206.34476560763 }, { "content": "use crate::{\n\n resources::PhysicsElapsed,\n\n time::Time,\n\n};\n\n\n\npub type Vector = ::nalgebra::Vector2<f32>;\n\npub type Isometry = ::nalgebra::Isometry2<f32>;\n\npub type Translation = ::nalgebra::Translation2<f32>;\n\npub type Rotation = ::nalgebra::UnitComplex<f32>;\n\n\n\npub(crate) fn register_all(world: &mut World) {\n\n world.register::<Position>();\n\n world.register::<Velocity>();\n\n world.register::<Mass>();\n\n world.register::<ForceAccumulator>();\n\n}\n\n\n\n/// How position interpolation is handled.\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub enum PositionInterpolationMode {\n", "file_path": "physics/src/components.rs", "rank": 12, "score": 72069.77138093178 }, { "content": " pub fn physics_interpolation_factor(elapsed: Time, physics: &PhysicsElapsed) -> f32 {\n\n let since_physics = elapsed - physics.previous;\n\n let physics_step = physics.current - physics.previous;\n\n since_physics.as_seconds() / physics_step.as_seconds()\n\n }\n\n\n\n /// Copy current to previous to setup for the next iteration.\n\n pub(crate) fn step_previous(&mut self) {\n\n self.previous = self.current;\n\n }\n\n}\n\n\n\n/// Velocity of an object, used for Sympletic Euler integration.\n\npub struct Velocity {\n\n pub linear: Vector,\n\n pub angular: Rotation,\n\n}\n\n\n\nimpl Component for Velocity {\n\n type Storage = VecStorage<Self>;\n", "file_path": "physics/src/components.rs", "rank": 13, "score": 72069.17127184257 }, { "content": " /// Position is interpolated.\n\n Interpolated,\n\n /// Positions are not interpolated.\n\n Static,\n\n}\n\n\n\n/// The physics system position. Should usually only be updated during physics update.\n\n#[derive(Debug)]\n\npub struct Position {\n\n mode: PositionInterpolationMode,\n\n current: (Vector, Rotation),\n\n previous: (Vector, Rotation),\n\n}\n\n\n\nimpl Component for Position {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\nimpl Position {\n\n /// Creates an interpolated position with the specified starting position and rotation.\n", "file_path": "physics/src/components.rs", "rank": 14, "score": 72064.96130206906 }, { "content": "\n\nimpl Component for Mass {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\nimpl Default for Mass {\n\n fn default() -> Self {\n\n Self {\n\n linear: 1.,\n\n angular: 1.,\n\n }\n\n }\n\n}\n\n\n\n/// Accumulates forces applied to an object over the course of a frame.\n\npub struct ForceAccumulator {\n\n /// The linear force applied to an object.\n\n pub linear: Vector,\n\n /// The torque applied to an object.\n\n pub angular: Rotation,\n", "file_path": "physics/src/components.rs", "rank": 15, "score": 72061.7686862466 }, { "content": "}\n\n\n\nimpl Component for ForceAccumulator {\n\n type Storage = VecStorage<Self>;\n\n}\n\n\n\nimpl ForceAccumulator {\n\n pub fn clear(&mut self) {\n\n *self = Default::default();\n\n }\n\n}\n\n\n\nimpl Default for ForceAccumulator {\n\n /// Create an empty force accumulator.\n\n fn default() -> Self {\n\n Self {\n\n linear: Vector::new(0., 0.),\n\n angular: Rotation::from_angle(0.),\n\n }\n\n }\n\n}\n", "file_path": "physics/src/components.rs", "rank": 16, "score": 72060.56683022666 }, { "content": "}\n\n\n\nimpl Velocity {\n\n /// Create a new velocity component.\n\n pub fn new(linear: Vector, angular: f32) -> Self {\n\n Velocity {\n\n linear,\n\n angular: Rotation::from_angle(angular),\n\n }\n\n }\n\n}\n\n\n\n/// The mass of an object.\n\n#[derive(Copy, Clone)]\n\npub struct Mass {\n\n /// Linear inertia of this object.\n\n pub linear: f32,\n\n /// Angular (moment of inertia) of this object.\n\n pub angular: f32,\n\n}\n", "file_path": "physics/src/components.rs", "rank": 17, "score": 72060.56542429123 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::{\n\n Component,\n\n VecStorage,\n\n World,\n\n};\n\n\n", "file_path": "physics/src/components.rs", "rank": 18, "score": 72058.67313510264 }, { "content": " pub fn new(pos: Vector, rot: Rotation) -> Self {\n\n Self {\n\n mode: PositionInterpolationMode::Interpolated,\n\n current: (pos, rot),\n\n previous: (pos, rot),\n\n }\n\n }\n\n\n\n /// Creates a position with the specified starting position and rotation, and the given\n\n /// interpolation mode.\n\n pub fn new_with_mode(pos: Vector, rot: Rotation, mode: PositionInterpolationMode) -> Self {\n\n Self {\n\n mode,\n\n current: (pos, rot),\n\n previous: (pos, rot),\n\n }\n\n }\n\n\n\n /// The position of the object.\n\n #[inline]\n", "file_path": "physics/src/components.rs", "rank": 19, "score": 72058.0581138923 }, { "content": " #[inline]\n\n pub fn mode(&self) -> PositionInterpolationMode { self.mode }\n\n\n\n /// Sets the interpolation mode.\n\n #[inline]\n\n pub fn set_mode(&mut self, mode: PositionInterpolationMode) { self.mode = mode; }\n\n\n\n /// Interpolate from the previous position to the current position.\n\n pub fn interpolate(&self, factor: f32) -> (Vector, Rotation) {\n\n match self.mode {\n\n PositionInterpolationMode::Interpolated => {\n\n let pos = self.current.0 * factor + self.previous.0 * (1. - factor);\n\n let rotation = self.previous.1.rotation_to(&self.current.1).powf(factor);\n\n (pos, self.previous.1 * rotation)\n\n },\n\n PositionInterpolationMode::Static => self.current,\n\n }\n\n }\n\n\n\n /// Calculate an interpolation factor from the previous physics timestep to now.\n", "file_path": "physics/src/components.rs", "rank": 20, "score": 72056.42025460597 }, { "content": " pub fn pos(&self) -> Vector { self.current.0 }\n\n\n\n /// Mutable access to the position of the object.\n\n #[inline]\n\n pub fn pos_mut(&mut self) -> &mut Vector { &mut self.current.0 }\n\n\n\n /// Sets the postion of the object.\n\n #[inline]\n\n pub fn set_pos(&mut self, p: Vector) { self.current.0 = p }\n\n\n\n /// Sets the position, clearing the previous position. This allows moving an interpolated\n\n /// position without interpolation for a single frame.\n\n #[inline]\n\n pub fn teleport_pos(&mut self, p: Vector) {\n\n self.current.0 = p;\n\n self.previous.0 = p;\n\n }\n\n\n\n /// The rotation of the object.\n\n #[inline]\n", "file_path": "physics/src/components.rs", "rank": 21, "score": 72054.57277173927 }, { "content": " pub fn rot(&self) -> Rotation { self.current.1 }\n\n\n\n /// Mutable access to the rotation of the object.\n\n #[inline]\n\n pub fn rot_mut(&mut self) -> &mut Rotation { &mut self.current.1 }\n\n\n\n /// Sets the rotation of the object.\n\n #[inline]\n\n pub fn set_rot(&mut self, r: Rotation) { self.current.1 = r; }\n\n\n\n\n\n /// Sets the rotation, clearing the previous rotation. This allows moving an interpolated\n\n /// rotation without interpolation for a single frame.\n\n #[inline]\n\n pub fn teleport_rot(&mut self, r: Rotation) {\n\n self.current.1 = r;\n\n self.previous.1 = r;\n\n }\n\n\n\n /// Gets the interpolation mode.\n", "file_path": "physics/src/components.rs", "rank": 22, "score": 72054.55921737239 }, { "content": " WriteStorage,\n\n};\n\n\n\nuse crate::{\n\n components::{\n\n ForceAccumulator,\n\n Position,\n\n Mass,\n\n Velocity,\n\n },\n\n resources::PhysicsDeltaTime,\n\n};\n\n\n\n/// Copies current positions to previous positions to allow interpolation. Should run before each\n\n/// physics tick.\n\npub struct SetupNextPhysicsPosition;\n\n\n\nimpl<'a> System<'a> for SetupNextPhysicsPosition {\n\n type SystemData = WriteStorage<'a, Position>;\n\n\n", "file_path": "physics/src/systems.rs", "rank": 23, "score": 71849.4751597587 }, { "content": "/// Integrates velocities, adding to the position, multiplied by PhysicsDeltaTime.\n\npub struct SympleticEulerVelocityStep;\n\nimpl<'a> System<'a> for SympleticEulerVelocityStep {\n\n type SystemData = (\n\n Read<'a, PhysicsDeltaTime>,\n\n WriteStorage<'a, Position>,\n\n ReadStorage<'a, Velocity>,\n\n );\n\n\n\n fn run(&mut self, (time, mut positions, velocities): Self::SystemData) {\n\n let dt = time.0.as_seconds();\n\n for (pos, vel) in (&mut positions, &velocities).join() {\n\n *pos.pos_mut() += vel.linear * dt;\n\n *pos.rot_mut() *= vel.angular.powf(dt);\n\n }\n\n }\n\n}\n", "file_path": "physics/src/systems.rs", "rank": 24, "score": 71843.28384805149 }, { "content": " fn run(&mut self, mut positions: Self::SystemData) {\n\n for pos in (&mut positions).join() {\n\n pos.step_previous();\n\n }\n\n }\n\n}\n\n\n\npub struct ClearForceAccumulators;\n\n\n\nimpl<'a> System<'a> for ClearForceAccumulators {\n\n type SystemData = WriteStorage<'a, ForceAccumulator>;\n\n\n\n fn run(&mut self, mut accumulators: Self::SystemData) {\n\n for acc in (&mut accumulators).join() {\n\n acc.clear();\n\n }\n\n }\n\n}\n\n\n\n/// Integrates forces, adding to the velocity, multiplied by PhysicsDeltaTime.\n", "file_path": "physics/src/systems.rs", "rank": 25, "score": 71842.79669281571 }, { "content": "pub struct SympleticEulerForceStep;\n\nimpl<'a> System<'a> for SympleticEulerForceStep {\n\n type SystemData = (\n\n Read<'a, PhysicsDeltaTime>,\n\n Entities<'a>,\n\n WriteStorage<'a, Velocity>,\n\n ReadStorage<'a, Mass>,\n\n ReadStorage<'a, ForceAccumulator>,\n\n );\n\n\n\n fn run(&mut self, (time, entities, mut velocities, masses, forces): Self::SystemData) {\n\n let dt = time.0.as_seconds();\n\n for (vel, force, ent) in (&mut velocities, &forces, &*entities).join() {\n\n let mass = masses.get(ent).cloned().unwrap_or_default();\n\n vel.linear += (force.linear / mass.linear) * dt;\n\n vel.angular *= force.angular.powf(1./mass.angular).powf(dt);\n\n }\n\n }\n\n}\n\n\n", "file_path": "physics/src/systems.rs", "rank": 26, "score": 71841.17323877284 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::{\n\n Entities,\n\n Join,\n\n Read,\n\n ReadStorage,\n\n System,\n", "file_path": "physics/src/systems.rs", "rank": 27, "score": 71832.98257059058 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse sfml::system::Time;\n\n\n\npub(crate) fn add_default_resources(world: &mut ::specs::World) {\n\n world.add_resource(DeltaTime::default());\n\n world.add_resource(Elapsed::default());\n\n}\n", "file_path": "src/engine/resources/time.rs", "rank": 28, "score": 69999.3656921995 }, { "content": "\n\n/// The time since the last update/fixed update.\n\npub struct DeltaTime(pub Time);\n\n\n\nimpl Default for DeltaTime {\n\n fn default() -> Self { DeltaTime(Time::ZERO) }\n\n}\n\n\n\n/// The total elapsed time since the simulation started.\n\npub struct Elapsed {\n\n pub previous: Time,\n\n pub current: Time,\n\n}\n\n\n\nimpl Default for Elapsed {\n\n fn default() -> Self { \n\n Elapsed {\n\n current: Time::ZERO,\n\n previous: Time::ZERO,\n\n }\n\n }\n\n}\n", "file_path": "src/engine/resources/time.rs", "rank": 29, "score": 69994.28485425544 }, { "content": " use specs::RunNow;\n\n SpecializedSystemWrapper{\n\n system: self,\n\n special_data: Some(special_data),\n\n }.run_now(res);\n\n }\n\n\n\n fn setup(&mut self, special_data: T, res: &mut Resources) {\n\n self.setup_special(special_data, res);\n\n }\n\n}\n\n\n", "file_path": "src/engine/systems/specialized.rs", "rank": 30, "score": 69685.47565335683 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::{Resources, System, SystemData};\n\n\n", "file_path": "src/engine/systems/specialized.rs", "rank": 31, "score": 69679.43634097007 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse sfml::graphics::View as SfView;\n\nuse sfml::system::Vector2f;\n\nuse specs::BitSet;\n\n\n\npub(crate) fn add_default_resources(world: &mut ::specs::World) {\n\n world.add_resource(DrawLayers::default());\n", "file_path": "src/engine/resources/draw.rs", "rank": 32, "score": 69621.65705814528 }, { "content": " world.add_resource(CurrentDrawLayer::default());\n\n world.add_resource(View::default());\n\n}\n\n\n\n/// Internal tracking of bitsets for which entities are on which layers.\n\n#[derive(Default)]\n\npub(crate) struct DrawLayers {\n\n /// Masks for each layer.\n\n pub(crate) masks: [BitSet; DrawLayers::NUM_LAYERS],\n\n /// Mask of values set in any layer.\n\n pub(crate) any_mask: BitSet,\n\n}\n\n\n\nimpl DrawLayers {\n\n /// Number of total layer masks.\n\n pub(crate) const NUM_LAYERS: usize = 10;\n\n}\n\n\n\n/// Which draw layer to read from for the current draw.\n\n#[derive(Default)]\n", "file_path": "src/engine/resources/draw.rs", "rank": 33, "score": 69619.68623023256 }, { "content": "pub(crate) struct CurrentDrawLayer(usize);\n\n\n\nimpl CurrentDrawLayer {\n\n /// Get the current draw layer. Guaranteed to be <= DrawLayers::NUM_LAYERS.\n\n #[inline]\n\n pub(crate) fn layer(&self) -> usize { self.0 }\n\n\n\n /// Set the current draw layer. Must be <= DrawLayers::NUM_LAYERS.\n\n #[inline]\n\n pub(crate) fn set_layer(&mut self, layer: usize) {\n\n assert!(layer <= DrawLayers::NUM_LAYERS, \"layer out of range\");\n\n self.0 = layer;\n\n }\n\n}\n\n\n\npub struct View {\n\n pub center: Vector2f,\n\n pub size: Vector2f,\n\n pub rotation: f32,\n\n}\n", "file_path": "src/engine/resources/draw.rs", "rank": 34, "score": 69618.6651807378 }, { "content": "\n\nimpl Default for View {\n\n fn default() -> Self { \n\n Self {\n\n center: Vector2f::new(0., 0.),\n\n size: Vector2f::new(1000., 1000.),\n\n rotation: 0.,\n\n }\n\n }\n\n}\n\n\n\nimpl View {\n\n pub(crate) fn copy_to(&self, view: &mut SfView) {\n\n view.set_center(self.center);\n\n view.set_size(self.size);\n\n view.set_rotation(self.rotation);\n\n }\n\n}\n", "file_path": "src/engine/resources/draw.rs", "rank": 35, "score": 69615.32246938914 }, { "content": " Component,\n\n FlaggedStorage,\n\n VecStorage,\n\n World,\n\n};\n\n\n\nuse crate::engine::resources::draw::DrawLayers;\n\n\n\npub(crate) fn register_all(world: &mut World) {\n\n world.register::<DrawLayer>();\n\n world.register::<DrawColor>();\n\n world.register::<DrawShape>();\n\n}\n\n\n\n/// What layer to draw an object on.\n\n#[derive(Debug)]\n\npub struct DrawLayer(u8);\n\n\n\nimpl Component for DrawLayer {\n\n type Storage = FlaggedStorage<Self, VecStorage<Self>>;\n", "file_path": "src/engine/components/draw.rs", "rank": 36, "score": 69494.1185458234 }, { "content": " pub outline_thickness: f32,\n\n}\n\n\n\nimpl Component for DrawColor {\n\n type Storage = FlaggedStorage<Self, VecStorage<Self>>;\n\n}\n\n\n\nimpl DrawColor {\n\n pub(crate) fn copy_to<'a, T>(&self, shape: &mut T) where T: Shape<'a> {\n\n shape.set_fill_color(self.fill_color);\n\n shape.set_outline_color(self.outline_color);\n\n shape.set_outline_thickness(self.outline_thickness);\n\n }\n\n}\n\n\n\n/// A drawable shape.\n\n#[derive(Clone)]\n\npub struct DrawShape {\n\n pub shape_type: ShapeType,\n\n pub origin: Vector2f,\n", "file_path": "src/engine/components/draw.rs", "rank": 37, "score": 69487.40276777181 }, { "content": "}\n\n\n\n/// Properties specific to particular types of shapes.\n\n#[derive(Clone)]\n\npub enum ShapeType {\n\n Rect {\n\n size: Vector2f\n\n },\n\n Circle {\n\n radius: f32,\n\n point_count: u32,\n\n },\n\n Convex(Vec<Vector2f>),\n\n}\n\n\n\nimpl Component for DrawShape {\n\n type Storage = FlaggedStorage<Self, VecStorage<Self>>;\n\n}\n", "file_path": "src/engine/components/draw.rs", "rank": 38, "score": 69483.27396148353 }, { "content": "}\n\n\n\nimpl DrawLayer {\n\n /// Set the layer. Must be less than `NUM_LAYERS`.\n\n #[inline]\n\n pub fn set_layer(&mut self, layer: u8) {\n\n assert!((layer as usize) < DrawLayers::NUM_LAYERS, \"layer out of range\");\n\n self.0 = layer;\n\n }\n\n\n\n /// Get the current draw layer.\n\n #[inline]\n\n pub fn layer(&self) -> u8 { self.0 }\n\n}\n\n\n\n/// Properties for basic shape renderers.\n\n#[derive(Debug)]\n\npub struct DrawColor {\n\n pub fill_color: Color,\n\n pub outline_color: Color,\n", "file_path": "src/engine/components/draw.rs", "rank": 39, "score": 69482.32061574666 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse sfml::graphics::{\n\n Color,\n\n Shape,\n\n};\n\nuse sfml::system::Vector2f;\n\nuse specs::{\n", "file_path": "src/engine/components/draw.rs", "rank": 40, "score": 69477.69041436646 }, { "content": " RemovedFlag,\n\n};\n\n\n\nuse physics::{\n\n components::Position,\n\n resources::PhysicsElapsed,\n\n};\n\n\n\nuse crate::engine::{\n\n components::draw::{\n\n DrawColor,\n\n DrawLayer,\n\n DrawShape,\n\n ShapeType,\n\n },\n\n resources::{\n\n draw::{\n\n CurrentDrawLayer,\n\n DrawLayers,\n\n },\n", "file_path": "src/engine/systems/draw.rs", "rank": 41, "score": 69275.3526027455 }, { "content": " fn setup_special(&mut self, _: &'b mut Vec<Option<SfShape<'tex>>>, res: &mut Resources) {\n\n Self::SystemData::setup(res);\n\n let mut shape_storage: WriteStorage<DrawShape> = SystemData::fetch(&res);\n\n self.removed_shape_reader_id = Some(shape_storage.track_removed());\n\n self.modified_shape_reader_id = Some(shape_storage.track_modified());\n\n self.inserted_shape_reader_id = Some(shape_storage.track_inserted());\n\n\n\n let mut color_storage: WriteStorage<DrawColor> = SystemData::fetch(&res);\n\n self.modified_color_reader_id = Some(color_storage.track_modified());\n\n self.inserted_color_reader_id = Some(color_storage.track_inserted());\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub(crate) struct DrawDrawShapesSystem;\n\n\n\nimpl<'a, 'b, 'tex> \n\n SpecializedSystem<'a, (&'b mut RenderWindow, &'b mut Vec<Option<SfShape<'tex>>>)> \n\n for DrawDrawShapesSystem\n\n{\n", "file_path": "src/engine/systems/draw.rs", "rank": 42, "score": 69271.95546332406 }, { "content": "impl<'a, 'b, 'tex> SpecializedSystem<'a, &'b mut Vec<Option<SfShape<'tex>>>> \n\n for SyncDrawShapesSystem \n\n{\n\n type SystemData = (\n\n Entities<'a>,\n\n Read<'a, Elapsed>,\n\n Read<'a, PhysicsElapsed>,\n\n ReadStorage<'a, Position>,\n\n ReadStorage<'a, DrawShape>,\n\n ReadStorage<'a, DrawColor>,\n\n );\n\n\n\n fn run_special(\n\n &mut self, \n\n sf_shapes: &'b mut Vec<Option<SfShape<'tex>>>,\n\n (entities, elapsed, physics_elapsed, positions, shapes, colors): Self::SystemData,\n\n ) {\n\n self.dirty.clear();\n\n shapes.populate_removed(\n\n &mut self.removed_shape_reader_id.as_mut().unwrap(), \n", "file_path": "src/engine/systems/draw.rs", "rank": 43, "score": 69271.35715225709 }, { "content": " time::Elapsed,\n\n },\n\n systems::specialized::SpecializedSystem,\n\n};\n\n\n\n/// Updates draw layers.\n\n#[derive(Default)]\n\npub(crate) struct DrawLayersUpdater {\n\n dirty: BitSet,\n\n removed_reader_id: Option<ReaderId<RemovedFlag>>,\n\n modified_reader_id: Option<ReaderId<ModifiedFlag>>,\n\n inserted_reader_id: Option<ReaderId<InsertedFlag>>,\n\n}\n\n\n\nimpl<'a> System<'a> for DrawLayersUpdater {\n\n type SystemData = (\n\n Write<'a, DrawLayers>,\n\n ReadStorage<'a, DrawLayer>,\n\n );\n\n\n", "file_path": "src/engine/systems/draw.rs", "rank": 44, "score": 69268.24822028863 }, { "content": " &mut self.inserted_color_reader_id.as_mut().unwrap(),\n\n &mut self.dirty,\n\n );\n\n colors.populate_modified(\n\n &mut self.modified_color_reader_id.as_mut().unwrap(),\n\n &mut self.dirty,\n\n );\n\n for (_, color, id) in (&shapes, &colors, &self.dirty).join() {\n\n sf_shapes[id as usize].as_mut().unwrap().apply_color(color);\n\n }\n\n\n\n let factor = Position::physics_interpolation_factor(elapsed.current, &physics_elapsed);\n\n for (entity, _, position) in (&*entities, &shapes, &positions).join() {\n\n let (pos, rot) = position.interpolate(factor);\n\n let draw_pos = Vector2f::new(pos.x, -pos.y);\n\n let draw_rot = -rot.angle().to_degrees();\n\n sf_shapes[entity.id() as usize].as_mut().unwrap().apply_position(draw_pos, draw_rot);\n\n }\n\n }\n\n\n", "file_path": "src/engine/systems/draw.rs", "rank": 45, "score": 69268.16163721235 }, { "content": " type SystemData = (\n\n Read<'a, DrawLayers>,\n\n Read<'a, CurrentDrawLayer>,\n\n ReadStorage<'a, DrawShape>,\n\n );\n\n\n\n fn run_special(\n\n &mut self, \n\n (window, sf_shapes): (&'b mut RenderWindow, &'b mut Vec<Option<SfShape>>),\n\n (layer_masks, current_draw_layer, shapes): Self::SystemData,\n\n ) {\n\n if current_draw_layer.layer() < DrawLayers::NUM_LAYERS {\n\n for (_, id) in (&shapes, &layer_masks.masks[current_draw_layer.layer()]).join() {\n\n sf_shapes[id as usize].as_mut().unwrap().draw(window);\n\n }\n\n } else {\n\n for (_, id) in (&shapes, !&layer_masks.any_mask).join() {\n\n sf_shapes[id as usize].as_mut().unwrap().draw(window);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/engine/systems/draw.rs", "rank": 46, "score": 69266.59936163099 }, { "content": " RenderTarget,\n\n Transformable,\n\n};\n\nuse sfml::system::Vector2f;\n\nuse specs::{\n\n BitSet,\n\n Entities,\n\n Join,\n\n Read,\n\n ReaderId,\n\n ReadStorage,\n\n Resources,\n\n System,\n\n SystemData,\n\n Write,\n\n WriteStorage,\n\n};\n\nuse specs::storage::{\n\n InsertedFlag,\n\n ModifiedFlag,\n", "file_path": "src/engine/systems/draw.rs", "rank": 47, "score": 69265.83943647926 }, { "content": " }\n\n layer_masks.masks[layer.layer() as usize].add(id);\n\n layer_masks.any_mask.add(id);\n\n }\n\n }\n\n\n\n fn setup(&mut self, res: &mut Resources) {\n\n Self::SystemData::setup(res);\n\n let mut storage: WriteStorage<DrawLayer> = SystemData::fetch(&res);\n\n self.removed_reader_id = Some(storage.track_removed());\n\n self.modified_reader_id = Some(storage.track_modified());\n\n self.inserted_reader_id = Some(storage.track_inserted());\n\n }\n\n}\n\n\n\npub(crate) enum SfShape<'tex> {\n\n Rect(RectangleShape<'tex>),\n\n Circle(CircleShape<'tex>),\n\n Convex(ConvexShape<'tex>),\n\n}\n", "file_path": "src/engine/systems/draw.rs", "rank": 48, "score": 69265.74421318514 }, { "content": " }\n\n }\n\n\n\n fn draw_to<D: Drawable>(drawable: &D, target: &mut RenderWindow) {\n\n target.draw(drawable);\n\n }\n\n}\n\n\n\n/// System to sync all SFML shapes from their corresponding components.\n\n#[derive(Default)]\n\npub(crate) struct SyncDrawShapesSystem {\n\n dirty: BitSet,\n\n removed_shape_reader_id: Option<ReaderId<RemovedFlag>>,\n\n modified_shape_reader_id: Option<ReaderId<ModifiedFlag>>,\n\n inserted_shape_reader_id: Option<ReaderId<InsertedFlag>>,\n\n\n\n inserted_color_reader_id: Option<ReaderId<InsertedFlag>>,\n\n modified_color_reader_id: Option<ReaderId<ModifiedFlag>>,\n\n}\n\n\n", "file_path": "src/engine/systems/draw.rs", "rank": 49, "score": 69265.51835583082 }, { "content": " sf_shapes[id as usize] = Some(SfShape::create_for(shape));\n\n } else {\n\n sf_shapes[id as usize].as_mut().unwrap().update_for(shape);\n\n }\n\n if let Some(color) = colors.get(entity) {\n\n sf_shapes[id as usize].as_mut().unwrap().apply_color(color);\n\n }\n\n }\n\n\n\n self.dirty.clear();\n\n shapes.populate_modified(\n\n &mut self.modified_shape_reader_id.as_mut().unwrap(), \n\n &mut self.dirty,\n\n );\n\n for (shape, id) in (&shapes, &self.dirty).join() {\n\n sf_shapes[id as usize].as_mut().unwrap().update_for(shape);\n\n }\n\n\n\n self.dirty.clear();\n\n colors.populate_inserted(\n", "file_path": "src/engine/systems/draw.rs", "rank": 50, "score": 69259.95611098339 }, { "content": " },\n\n (SfShape::Convex(convex), &ShapeType::Convex(ref points)) => {\n\n convex.set_point_count(points.len() as u32);\n\n for (i, point) in points.iter().cloned().enumerate() {\n\n convex.set_point(i as u32, point);\n\n }\n\n convex.set_origin(shape.origin);\n\n },\n\n (this, _) => *this = SfShape::create_for(shape),\n\n }\n\n }\n\n\n\n /// Copy the given color properties to this shape.\n\n fn apply_color(&mut self, color: &DrawColor) {\n\n match self {\n\n SfShape::Rect(shape) => color.copy_to(shape),\n\n SfShape::Circle(shape) => color.copy_to(shape),\n\n SfShape::Convex(shape) => color.copy_to(shape),\n\n }\n\n }\n", "file_path": "src/engine/systems/draw.rs", "rank": 51, "score": 69258.20152098247 }, { "content": "\n\n /// Sets the position of this drawable.\n\n fn apply_position(&mut self, pos: Vector2f, rot: f32) {\n\n match self {\n\n SfShape::Rect(shape) => SfShape::apply_position_to(shape, pos, rot),\n\n SfShape::Circle(shape) => SfShape::apply_position_to(shape, pos, rot),\n\n SfShape::Convex(shape) => SfShape::apply_position_to(shape, pos, rot),\n\n }\n\n }\n\n\n\n fn apply_position_to<T: Transformable>(trans: &mut T, pos: Vector2f, rot: f32) {\n\n trans.set_position(pos);\n\n trans.set_rotation(rot);\n\n }\n\n\n\n fn draw(&self, target: &mut RenderWindow) {\n\n match self {\n\n SfShape::Rect(shape) => SfShape::draw_to(shape, target),\n\n SfShape::Circle(shape) => SfShape::draw_to(shape, target),\n\n SfShape::Convex(shape) => SfShape::draw_to(shape, target),\n", "file_path": "src/engine/systems/draw.rs", "rank": 52, "score": 69257.91641158312 }, { "content": "\n\nimpl<'tex> SfShape<'tex> {\n\n /// Create an SfShape for a given DrawShape.\n\n fn create_for(shape: &DrawShape) -> Self {\n\n match shape.shape_type {\n\n ShapeType::Rect{size} => SfShape::Rect({\n\n let mut rect = RectangleShape::with_size(size);\n\n rect.set_origin(shape.origin);\n\n rect\n\n }),\n\n ShapeType::Circle{\n\n radius,\n\n point_count,\n\n } => SfShape::Circle({\n\n let mut circ = CircleShape::new(radius, point_count);\n\n circ.set_origin(shape.origin);\n\n circ\n\n }),\n\n ShapeType::Convex(ref points) => SfShape::Convex({\n\n let mut convex = ConvexShape::new(points.len() as u32);\n", "file_path": "src/engine/systems/draw.rs", "rank": 53, "score": 69256.39800810591 }, { "content": " &mut self.dirty,\n\n );\n\n for id in (&self.dirty).join() {\n\n if (id as usize) < sf_shapes.len() {\n\n sf_shapes[id as usize] = None;\n\n }\n\n }\n\n\n\n self.dirty.clear();\n\n shapes.populate_inserted(\n\n &mut self.inserted_shape_reader_id.as_mut().unwrap(), \n\n &mut self.dirty,\n\n );\n\n for (entity, shape, id) in (&*entities, &shapes, &self.dirty).join() {\n\n if (id as usize) >= sf_shapes.len() {\n\n let needed = ((id as usize) - sf_shapes.len()) + 1;\n\n sf_shapes.reserve(needed);\n\n sf_shapes.extend((0..needed).map(|_| None));\n\n }\n\n if sf_shapes[id as usize].is_none() {\n", "file_path": "src/engine/systems/draw.rs", "rank": 54, "score": 69255.66771794697 }, { "content": " fn run(&mut self, (mut layer_masks, layers): Self::SystemData) {\n\n // If a remove flag is set, remove the id from all numbered masks and add it to the \"none\n\n // set\" mask.\n\n self.dirty.clear();\n\n layers.populate_removed(&mut self.removed_reader_id.as_mut().unwrap(), &mut self.dirty);\n\n for id in (&self.dirty).join() {\n\n for mask in &mut layer_masks.masks[..] {\n\n mask.remove(id);\n\n }\n\n layer_masks.any_mask.remove(id);\n\n }\n\n\n\n // If inserted or modified, remove from all masks and add to the current mask.\n\n self.dirty.clear();\n\n layers.populate_inserted(&mut self.inserted_reader_id.as_mut().unwrap(), &mut self.dirty);\n\n layers.populate_modified(&mut self.modified_reader_id.as_mut().unwrap(), &mut self.dirty);\n\n\n\n for (layer, id) in (&layers, &self.dirty).join() {\n\n for mask in &mut layer_masks.masks[..] {\n\n mask.remove(id);\n", "file_path": "src/engine/systems/draw.rs", "rank": 55, "score": 69255.01064036551 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse sfml::graphics::{\n\n CircleShape,\n\n ConvexShape,\n\n Drawable,\n\n RectangleShape,\n\n RenderWindow,\n", "file_path": "src/engine/systems/draw.rs", "rank": 56, "score": 69254.3863796563 }, { "content": " for (i, point) in points.iter().cloned().enumerate() {\n\n convex.set_point(i as u32, point);\n\n }\n\n convex.set_origin(shape.origin);\n\n convex\n\n }),\n\n }\n\n }\n\n\n\n /// Update this shape if it's of the same type, or replace it with the correct type.\n\n fn update_for(&mut self, shape: &DrawShape) {\n\n match (self, &shape.shape_type) {\n\n (SfShape::Rect(rect), &ShapeType::Rect{size}) => {\n\n rect.set_size(size);\n\n rect.set_origin(shape.origin);\n\n },\n\n (SfShape::Circle(circ), &ShapeType::Circle{radius, point_count}) => {\n\n circ.set_radius(radius);\n\n circ.set_point_count(point_count);\n\n circ.set_origin(shape.origin);\n", "file_path": "src/engine/systems/draw.rs", "rank": 57, "score": 69253.43567759548 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\npub mod draw;\n\npub mod time;\n\n\n\npub(crate) fn add_default_resources(world: &mut ::specs::World) {\n\n time::add_default_resources(world);\n\n draw::add_default_resources(world);\n\n}\n", "file_path": "src/engine/resources/mod.rs", "rank": 58, "score": 68112.97121382189 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::World;\n\n\n\npub mod draw;\n\n\n\npub(crate) fn register_all(world: &mut World) {\n\n draw::register_all(world);\n\n}\n", "file_path": "src/engine/components/mod.rs", "rank": 59, "score": 67973.03773740162 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\npub mod draw;\n\npub mod specialized;\n", "file_path": "src/engine/systems/mod.rs", "rank": 60, "score": 67756.0663076776 }, { "content": "struct StaticScreensaver {\n\n img: Image,\n\n}\n\n\n\nimpl Screensaver for StaticScreensaver {\n\n fn update(&mut self) {\n\n for Vector2u{x, y} in row_major_iterator(self.img.size()) {\n\n self.img.set_pixel(x, y, Color::rgb(rand::random(), rand::random(), rand::random()));\n\n }\n\n }\n\n\n\n fn draw<T: RenderTarget>(&self, target: &mut T) {\n\n let tex = Texture::from_image(&self.img).unwrap();\n\n let sprite = Sprite::with_texture(&tex);\n\n\n\n target.draw(&sprite);\n\n }\n\n}\n\n\n", "file_path": "saver_colorstatic/src/main.rs", "rank": 61, "score": 54053.164445961345 }, { "content": "/// A screensaver which can be run on an SFML RenderTarget.\n\npub trait Screensaver {\n\n /// Runs one \"tick\" in the screensaver, with the update happening at the specified time.\n\n fn update(&mut self);\n\n\n\n /// Draw the screensaver on the specified target.\n\n fn draw<T>(&self, target: &mut T) where T: RenderTarget;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 62, "score": 52651.21698585396 }, { "content": "pub fn init() {\n\n if !INITIALIZED.swap(true, Ordering::AcqRel) {\n\n unsafe { signal(libc::SIGINT, sigint_handler) };\n\n }\n\n}\n", "file_path": "sigint/src/lib.rs", "rank": 63, "score": 51450.55959115332 }, { "content": "/// Struct for serializing u64 in Sql, clamping at bounds.\n\nstruct SqlBoundedU64(u64);\n\n\n\nimpl ToSql for SqlBoundedU64 {\n\n fn to_sql(&self) -> Result<ToSqlOutput, SqlError> {\n\n if self.0 <= i64::max_value() as u64 {\n\n Ok(ToSqlOutput::Owned(SqlValue::Integer(self.0 as i64)))\n\n } else {\n\n Err(SqlError::ToSqlConversionFailure(\n\n format!(\n\n \"Value {} is too large for SQLite, max is {}\", self.0, i64::max_value(),\n\n ).into(),\n\n ))\n\n }\n\n }\n\n}\n\n\n\nimpl FromSql for SqlBoundedU64 {\n\n fn column_result(value: SqlValueRef) -> Result<Self, FromSqlError> {\n\n match value {\n\n SqlValueRef::Integer(value) if value >= 0 => Ok(SqlBoundedU64(value as u64)),\n", "file_path": "saver_genetic_orbits/src/storage/sqlite.rs", "rank": 64, "score": 48327.25901622286 }, { "content": "/// Struct for serializing u64 in Sql, wrapping out of range i64 values.\n\nstruct SqlWrappingU64(u64);\n\n\n\nimpl ToSql for SqlWrappingU64 {\n\n fn to_sql(&self) -> Result<ToSqlOutput, SqlError> {\n\n Ok(ToSqlOutput::Owned(SqlValue::Integer(self.0 as i64)))\n\n }\n\n}\n\n\n\nimpl FromSql for SqlWrappingU64 {\n\n fn column_result(value: SqlValueRef) -> Result<Self, FromSqlError> {\n\n match value {\n\n SqlValueRef::Integer(value) => Ok(SqlWrappingU64(value as u64)),\n\n _ => Err(FromSqlError::InvalidType),\n\n }\n\n }\n\n}\n\n\n", "file_path": "saver_genetic_orbits/src/storage/sqlite.rs", "rank": 65, "score": 48327.163239933376 }, { "content": "pub fn received_sigint() -> bool {\n\n RECEIVED_SIGINT.load(Ordering::Relaxed)\n\n}\n\n\n", "file_path": "sigint/src/lib.rs", "rank": 66, "score": 47838.68454814669 }, { "content": "/// A visitor that receives a node from an expression tree.\n\npub trait Visitor {\n\n /// Visit the given expression subtree, optionally replacing it.\n\n fn visit(&mut self, node: &Expression) -> Option<Expression>;\n\n}\n\n\n\nimpl<F> Visitor for F where F: FnMut(&Expression) -> Option<Expression> {\n\n fn visit(&mut self, node: &Expression) -> Option<Expression> {\n\n self(node)\n\n }\n\n}\n\n\n\nimpl Expression {\n\n /// Perform a postorder traversal of the expression tree, running the specified visitor on\n\n /// every node to transform it.\n\n fn transform_postorder<V: Visitor>(&mut self, visitor: &mut V) {\n\n // Traverse all children first.\n\n match self {\n\n Expression::BinaryOp(lhs, _, rhs) => {\n\n lhs.transform_postorder(visitor);\n\n rhs.transform_postorder(visitor);\n", "file_path": "saver_genetic_orbits/src/statustracker/scoring_function/transforms.rs", "rank": 67, "score": 46503.69038874919 }, { "content": "pub fn run_saver<F, S>(create_saver: F) \n\nwhere F: FnOnce(Vector2u) -> S,\n\n S: Screensaver {\n\n sigint::init();\n\n\n\n let mut window = open_window();\n\n let mut saver = create_saver(window.size());\n\n\n\n while !sigint::received_sigint() {\n\n while let Some(_) = window.poll_event() {}\n\n\n\n saver.update();\n\n\n\n window.clear(Color::GREEN);\n\n saver.draw(&mut window);\n\n window.display();\n\n }\n\n info!(\"Shutting Down\");\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 68, "score": 41867.10038690735 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\npub mod components;\n\npub mod systems;\n\npub mod resources;\n\npub mod time;\n\n\n\n/// Register all components and default resources.\n", "file_path": "physics/src/lib.rs", "rank": 69, "score": 35986.426094679555 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::{SystemData, World};\n\n\n\npub(crate) fn add_default_resources(world: &mut World) {\n\n world.add_resource(SceneChange(None));\n\n}\n\n\n\n/// Builds a new scene after a previous one has been cleaned up.\n\npub(crate) trait SceneLoaderDispatcher {\n\n fn dispatch(&mut self, world: &mut World);\n\n}\n\n\n", "file_path": "scene_management/src/resources.rs", "rank": 70, "score": 34949.37629784672 }, { "content": " where T: for<'a> SceneLoader<'a> + Send + Sync + 'static\n\n {\n\n self.0 = Some(Box::new(scene_loader));\n\n }\n\n\n\n /// True if a scene change is scheduled for after the end of the current frame.\n\n pub fn is_scene_change_scheduled(&self) -> bool {\n\n self.0.is_some()\n\n }\n\n\n\n /// Cancels a scene change scheduled for after the end of the current frame.\n\n pub fn cancel_scene_change(&mut self) {\n\n self.0 = None;\n\n }\n\n\n\n /// Retrieves the pending scene changer, clearing it in the process. This is needed because the\n\n /// scene change resource cannot be borrowed when the scene loader is executed.\n\n pub(crate) fn take_scene_changer(&mut self) \n\n -> Option<Box<dyn SceneLoaderDispatcher + Send + Sync>>\n\n {\n\n self.0.take()\n\n }\n\n}\n", "file_path": "scene_management/src/resources.rs", "rank": 71, "score": 34940.51124646188 }, { "content": "\n\n/// A component which marks an object as being part of the current \"Scene\", it will be removed when\n\n/// the scene changes.\n\n#[derive(Default)]\n\npub struct InScene;\n\nimpl Component for InScene { type Storage = NullStorage<Self>; }\n\n\n\n/// A component which marks an object as deleted. Will be cleaned up at the end of the current\n\n/// dispatcher run before the next call to maintain.\n\n/// This is useful because Entities.delete(ent) does not apply until the next call to maintain, but\n\n/// there's no way to query for deleted entities. If systems want to delete entities early and let\n\n/// other systems see that the entities are meant to be deleted before the next call to maintain,\n\n/// they can use this.\n\n#[derive(Default)]\n\npub struct Deleted;\n\nimpl Component for Deleted { type Storage = NullStorage<Self>; }\n", "file_path": "scene_management/src/components.rs", "rank": 72, "score": 34817.78577376029 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::{Component, NullStorage, World};\n\n\n\npub(crate) fn register_all(world: &mut World) {\n\n world.register::<InScene>();\n\n world.register::<Deleted>();\n\n}\n", "file_path": "scene_management/src/components.rs", "rank": 73, "score": 34813.041379570015 }, { "content": "\n\nuse crate::components::{InScene, Deleted};\n\n\n\n/// Removes all entities in the current scene.\n\npub(crate) struct ClearCurrentScene;\n\nimpl<'a> System<'a> for ClearCurrentScene {\n\n type SystemData = (\n\n Entities<'a>,\n\n ReadStorage<'a, InScene>,\n\n );\n\n\n\n fn run(&mut self, (entities, in_scene): Self::SystemData) {\n\n for (entity, _) in (&*entities, &in_scene).join() {\n\n entities.delete(entity).unwrap();\n\n }\n\n }\n\n}\n\n\n\n/// Removes all entities that have the Deleted component. Can be added to a world to support\n\n/// deleting by adding the delete component (allowing other systems in the same dispatch run to\n", "file_path": "scene_management/src/systems.rs", "rank": 74, "score": 34603.835650432 }, { "content": "/// determine that those entities are deleted).\n\npub struct DeleteSystem;\n\nimpl<'a> System<'a> for DeleteSystem {\n\n type SystemData = (\n\n Entities<'a>,\n\n ReadStorage<'a, Deleted>,\n\n );\n\n\n\n fn run(&mut self, (entities, deleted): Self::SystemData) {\n\n for (ent, _) in (&*entities, &deleted).join() {\n\n entities.delete(ent).unwrap();\n\n }\n\n }\n\n}\n", "file_path": "scene_management/src/systems.rs", "rank": 75, "score": 34596.665956496276 }, { "content": "// Copyright 2018 Google LLC\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse specs::{\n\n Entities,\n\n Join,\n\n ReadStorage,\n\n System,\n\n};\n", "file_path": "scene_management/src/systems.rs", "rank": 76, "score": 34591.761374241374 }, { "content": "/// Starts automatic background pruning of scenarios. Returns a handle that can be used to shutdown\n\n/// background pruning.\n\npub fn prune_scenarios<S>(interval: Duration, number_to_keep: u64, storage: S) -> ShutdownPrune \n\nwhere S: Storage + Send + 'static\n\n{\n\n let (send, recv) = mpsc::channel();\n\n let handle = thread::spawn(move || {\n\n let mut storage = storage;\n\n loop {\n\n match recv.recv() {\n\n Ok(Action::Tick) => {\n\n info!(\"Pruning scenarios\");\n\n match storage.keep_top_scenarios_by_score(number_to_keep) {\n\n Ok(num_pruned) => info!(\"Pruned {} scenarios\", num_pruned),\n\n Err(err) => error!(\"Falied to prune scenarios: {}\", err),\n\n }\n\n },\n\n Ok(Action::Shutdown) => {\n\n info!(\"Sending final prune and shutting down.\");\n\n match storage.keep_top_scenarios_by_score(number_to_keep) {\n\n Ok(num_pruned) => info!(\"Pruned {} scenarios\", num_pruned),\n\n Err(err) => error!(\"Falied to prune scenarios: {}\", err),\n", "file_path": "saver_genetic_orbits/src/pruner.rs", "rank": 96, "score": 33686.847505799386 }, { "content": "/// Every `interval` send `value` into the given channel. Stops sending only when the channel is\n\n/// closed.\n\npub fn interval<T: Clone + Send + 'static>(interval: Duration, chan: Sender<T>, value: T) {\n\n thread::spawn(move || {\n\n loop {\n\n thread::sleep(interval);\n\n match chan.send(value.clone()) {\n\n Ok(()) => {},\n\n Err(_) => break,\n\n };\n\n }\n\n });\n\n}\n", "file_path": "saver_genetic_orbits/src/timer.rs", "rank": 97, "score": 32287.61764569558 }, { "content": "}\n\n\n\n#[cfg(feature = \"graphical\")]\n\npub(crate) mod graphical_components {\n\n use rand::{Rng, distributions::{Distribution, Uniform}};\n\n use sfml::{graphics::Color, system::Vector2f};\n\n use specs::world::LazyBuilder;\n\n\n\n use xsecurelock_saver::engine::components::draw::{DrawColor, DrawShape, ShapeType};\n\n\n\n pub(super) fn add_graphical_components<'a, R: Rng>(\n\n radius: f32, rng: &mut R, builder: LazyBuilder<'a>,\n\n ) -> LazyBuilder<'a> {\n\n let color = generate_random_color(rng);\n\n builder.with(DrawColor {\n\n fill_color: color,\n\n outline_color: color,\n\n outline_thickness: 0.,\n\n })\n\n .with(DrawShape {\n", "file_path": "saver_genetic_orbits/src/worldgenerator/mod.rs", "rank": 98, "score": 30981.231145370915 }, { "content": "\n\nmod scoring_function;\n\n\n\n#[cfg(feature = \"graphical\")]\n\nmod area_scaling {\n\n use specs::Read;\n\n use xsecurelock_saver::engine::resources::draw::View;\n\n\n\n pub(super) type AreaScalingData<'a> = Read<'a, View>;\n\n\n\n pub(super) fn get_aspect<'a>(view: &AreaScalingData<'a>) -> f32 {\n\n // x / y = w / w_0; w_0 * x / y = w\n\n view.size.x / view.size.y\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"graphical\"))]\n\nmod area_scaling {\n\n pub(super) type AreaScalingData<'a> = ();\n\n\n", "file_path": "saver_genetic_orbits/src/statustracker/mod.rs", "rank": 99, "score": 30973.765835316488 } ]
Rust
src/clock_control/config.rs
reitermarkus/esp32-hal
95c7596c78a8e380b858d34f4f0ad1170a5b259a
use super::Error; use crate::prelude::*; use core::fmt; use super::{ dfs, CPUSource, ClockControlConfig, FastRTCSource, SlowRTCSource, CLOCK_CONTROL, CLOCK_CONTROL_MUTEX, }; impl<'a> super::ClockControlConfig { pub fn cpu_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency } } pub fn apb_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().apb_frequency } } pub fn cpu_frequency_default(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency_default } } pub fn cpu_frequency_locked(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency_locked } } pub fn cpu_frequency_apb_locked(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency_apb_locked } } pub fn apb_frequency_apb_locked(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().apb_frequency_apb_locked } } pub fn is_ref_clock_stable(&self) -> bool { unsafe { CLOCK_CONTROL.as_ref().unwrap().ref_clock_stable } } pub fn ref_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().ref_frequency } } pub fn slow_rtc_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().slow_rtc_frequency } } pub fn fast_rtc_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().fast_rtc_frequency } } pub fn apll_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().apll_frequency } } pub fn pll_d2_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().pll_d2_frequency } } pub fn xtal_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().xtal_frequency } } pub fn xtal32k_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().xtal32k_frequency } } pub fn pll_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().pll_frequency } } pub fn rtc8m_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().rtc8m_frequency } } pub fn rtc8md256_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().rtc8md256_frequency } } pub fn rtc_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().rtc_frequency } } pub fn cpu_source(&self) -> CPUSource { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_source } } pub fn slow_rtc_source(&self) -> SlowRTCSource { unsafe { CLOCK_CONTROL.as_ref().unwrap().slow_rtc_source } } pub fn fast_rtc_source(&self) -> FastRTCSource { unsafe { CLOCK_CONTROL.as_ref().unwrap().fast_rtc_source } } pub fn lock_cpu_frequency(&self) -> dfs::LockCPU { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_cpu_frequency() } } pub fn lock_apb_frequency(&self) -> dfs::LockAPB { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_apb_frequency() } } pub fn lock_awake(&self) -> dfs::LockAwake { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_awake() } } pub fn lock_plld2(&self) -> dfs::LockPllD2 { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_plld2() } } pub fn add_callback<F>(&self, f: &'static F) -> Result<(), Error> where F: Fn(), { unsafe { CLOCK_CONTROL.as_mut().unwrap().add_callback(f) } } pub fn get_lock_count(&self) -> dfs::Locks { unsafe { CLOCK_CONTROL.as_mut().unwrap().get_lock_count() } } pub unsafe fn park_core(&mut self, core: crate::Core) { (&CLOCK_CONTROL_MUTEX).lock(|_| { CLOCK_CONTROL.as_mut().unwrap().park_core(core); }) } pub fn unpark_core(&mut self, core: crate::Core) { (&CLOCK_CONTROL_MUTEX) .lock(|_| unsafe { CLOCK_CONTROL.as_mut().unwrap().unpark_core(core) }) } pub fn start_app_core(&mut self, entry: fn() -> !) -> Result<(), Error> { (&CLOCK_CONTROL_MUTEX) .lock(|_| unsafe { CLOCK_CONTROL.as_mut().unwrap().start_app_core(entry) }) } pub fn rtc_tick_count(&self) -> TicksU64 { unsafe { CLOCK_CONTROL.as_mut().unwrap().rtc_tick_count() } } pub fn rtc_nanoseconds(&self) -> NanoSecondsU64 { unsafe { CLOCK_CONTROL.as_mut().unwrap().rtc_nanoseconds() } } } impl fmt::Debug for ClockControlConfig { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { unsafe { CLOCK_CONTROL.as_ref().unwrap().fmt(f) } } }
use super::Error; use crate::prelude::*; use core::fmt; use super::{ dfs, CPUSource, ClockControlConfig, FastRTCSource, SlowRTCSource, CLOCK_CONTROL, CLOCK_CONTROL_MUTEX, }; impl<'a> super::ClockControlConfig { pub fn cpu_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency } } pub fn apb_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().apb_frequency } } pub fn cpu_frequency_default(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency_default } } pub fn cpu_frequency_locked(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency_locked } } pub fn cpu_frequency_apb_locked(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_frequency_apb_locked } } pub fn apb_frequency_apb_locked(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().apb_frequency_apb_locked } } pub fn is_ref_clock_stable(&self) -> bool { unsafe { CLOCK_CONTROL.as_ref().unwrap().ref_clock_stable } } pub fn ref_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().ref_frequency } } pub fn slow_rtc_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().slow_rtc_frequency } } pub fn fast_rtc_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().fast_rtc_frequency } } pub fn apll_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().apll_frequency } } pub fn pll_d2_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().pll_d2_frequency } } pub fn xtal_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().xtal_frequency } } pub fn xtal32k_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().xtal32k_frequency } } pub fn pll_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().pll_frequency } } pub fn rtc8m_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().rtc8m_frequency } } pub fn rtc8md256_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().rtc8md256_frequency } } pub fn rtc_frequency(&self) -> Hertz { unsafe { CLOCK_CONTROL.as_ref().unwrap().rtc_frequency } } pub fn cpu_source(&self) -> CPUSource { unsafe { CLOCK_CONTROL.as_ref().unwrap().cpu_source } } pub fn slow_rtc_source(&self) -> SlowRTCSource { unsafe { CLOCK_CONTROL.as_ref().unwrap().slow_rtc_source } } pub fn fast_rtc_source(&self) -> FastRTCSource { unsafe { CLOCK_CONTROL.as_ref().unwrap().fast_rtc_source } } pub fn lock_cpu_frequency(&self) -> dfs::LockCPU { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_cpu_frequency() } } pub fn lock_apb_frequency(&self) -> dfs::LockAPB { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_apb_frequency() } } pub fn lock_awake(&self) -> dfs::LockAwake { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_awake() } } pub fn lock_plld2(&self) -> dfs::LockPllD2 { unsafe { CLOCK_CONTROL.as_mut().unwrap().lock_plld2() } } pub fn add_callback<F>(&self, f: &'static F) -> Result<(), Error>
{ CLOCK_CONTROL.as_mut().unwrap().rtc_nanoseconds() } } } impl fmt::Debug for ClockControlConfig { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { unsafe { CLOCK_CONTROL.as_ref().unwrap().fmt(f) } } }
where F: Fn(), { unsafe { CLOCK_CONTROL.as_mut().unwrap().add_callback(f) } } pub fn get_lock_count(&self) -> dfs::Locks { unsafe { CLOCK_CONTROL.as_mut().unwrap().get_lock_count() } } pub unsafe fn park_core(&mut self, core: crate::Core) { (&CLOCK_CONTROL_MUTEX).lock(|_| { CLOCK_CONTROL.as_mut().unwrap().park_core(core); }) } pub fn unpark_core(&mut self, core: crate::Core) { (&CLOCK_CONTROL_MUTEX) .lock(|_| unsafe { CLOCK_CONTROL.as_mut().unwrap().unpark_core(core) }) } pub fn start_app_core(&mut self, entry: fn() -> !) -> Result<(), Error> { (&CLOCK_CONTROL_MUTEX) .lock(|_| unsafe { CLOCK_CONTROL.as_mut().unwrap().start_app_core(entry) }) } pub fn rtc_tick_count(&self) -> TicksU64 { unsafe { CLOCK_CONTROL.as_mut().unwrap().rtc_tick_count() } } pub fn rtc_nanoseconds(&self) -> NanoSecondsU64 { unsafe
random
[ { "content": "#[ram]\n\npub fn enable(interrupt: Interrupt) -> Result<(), Error> {\n\n match interrupt_to_cpu_interrupt(interrupt) {\n\n Ok(cpu_interrupt) => {\n\n unsafe { interrupt::enable_mask(1 << cpu_interrupt.0) };\n\n return Ok(());\n\n }\n\n Err(_) => enable_with_priority(crate::get_core(), interrupt, InterruptLevel(1)),\n\n }\n\n}\n\n\n\n/// Disable interrupt\n", "file_path": "src/interrupt.rs", "rank": 0, "score": 192054.05905088485 }, { "content": "#[ram]\n\npub fn disable(interrupt: Interrupt) -> Result<(), Error> {\n\n match interrupt_to_cpu_interrupt(interrupt) {\n\n Ok(cpu_interrupt) => {\n\n unsafe { interrupt::enable_mask(1 << cpu_interrupt.0) };\n\n return Ok(());\n\n }\n\n Err(_) => enable_with_priority(crate::get_core(), interrupt, InterruptLevel(0)),\n\n }\n\n}\n\n\n\n/// Trigger a (cross-)core interrupt\n\n///\n\n/// Valid interrupts are FROM_CPU_INTR[0-3],\n\n/// INTERNAL_SOFTWARE_LEVEL_1_INTR and INTERNAL_SOFTWARE_LEVEL_3_INTR.\n", "file_path": "src/interrupt.rs", "rank": 1, "score": 192054.05905088485 }, { "content": "#[ram]\n\npub fn clear_software_interrupt(interrupt: Interrupt) -> Result<(), Error> {\n\n unsafe {\n\n match interrupt {\n\n FROM_CPU_INTR0 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_0\n\n .write(|w| w.cpu_intr_from_cpu_0().clear_bit()),\n\n FROM_CPU_INTR1 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_1\n\n .write(|w| w.cpu_intr_from_cpu_1().clear_bit()),\n\n FROM_CPU_INTR2 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_2\n\n .write(|w| w.cpu_intr_from_cpu_2().clear_bit()),\n\n FROM_CPU_INTR3 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_3\n\n .write(|w| w.cpu_intr_from_cpu_3().clear_bit()),\n\n INTERNAL_SOFTWARE_LEVEL_1_INTR | INTERNAL_SOFTWARE_LEVEL_3_INTR => {\n\n interrupt::clear(1 << interrupt_to_cpu_interrupt(interrupt)?.0)\n\n }\n\n\n\n _ => return Err(Error::InvalidInterrupt),\n\n }\n\n };\n\n Ok(())\n\n}\n", "file_path": "src/interrupt.rs", "rank": 2, "score": 183729.64733641598 }, { "content": "#[ram]\n\npub fn set_software_interrupt(interrupt: Interrupt) -> Result<(), Error> {\n\n unsafe {\n\n match interrupt {\n\n FROM_CPU_INTR0 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_0\n\n .write(|w| w.cpu_intr_from_cpu_0().set_bit()),\n\n FROM_CPU_INTR1 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_1\n\n .write(|w| w.cpu_intr_from_cpu_1().set_bit()),\n\n FROM_CPU_INTR2 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_2\n\n .write(|w| w.cpu_intr_from_cpu_2().set_bit()),\n\n FROM_CPU_INTR3 => (*DPORT::ptr())\n\n .cpu_intr_from_cpu_3\n\n .write(|w| w.cpu_intr_from_cpu_3().set_bit()),\n\n INTERNAL_SOFTWARE_LEVEL_1_INTR | INTERNAL_SOFTWARE_LEVEL_3_INTR => {\n\n interrupt::set(1 << interrupt_to_cpu_interrupt(interrupt)?.0)\n\n }\n\n\n\n _ => return Err(Error::InvalidInterrupt),\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n\n/// Clear a (cross-)core interrupt\n\n///\n\n/// Valid interrupts are FROM_CPU_INTR[0-3],\n\n/// INTERNAL_SOFTWARE_LEVEL_1_INTR and INTERNAL_SOFTWARE_LEVEL_3_INTR.\n", "file_path": "src/interrupt.rs", "rank": 3, "score": 183729.64733641595 }, { "content": "#[ram]\n\nfn interrupt_to_cpu_interrupt(interrupt: target::Interrupt) -> Result<CPUInterrupt, Error> {\n\n match interrupt {\n\n target::Interrupt::INTERNAL_TIMER0_INTR => Ok(CPUInterrupt(6)),\n\n target::Interrupt::INTERNAL_SOFTWARE_LEVEL_1_INTR => Ok(CPUInterrupt(7)),\n\n target::Interrupt::INTERNAL_PROFILING_INTR => Ok(CPUInterrupt(11)),\n\n target::Interrupt::INTERNAL_TIMER1_INTR => Ok(CPUInterrupt(15)),\n\n target::Interrupt::INTERNAL_TIMER2_INTR => Ok(CPUInterrupt(16)),\n\n target::Interrupt::INTERNAL_SOFTWARE_LEVEL_3_INTR => Ok(CPUInterrupt(29)),\n\n _ => Err(Error::InvalidCPUInterrupt),\n\n }\n\n}\n\n\n", "file_path": "src/interrupt.rs", "rank": 4, "score": 128384.43938090441 }, { "content": "fn cpu_interrupt_to_interrupt(cpu_interrupt: CPUInterrupt) -> Result<target::Interrupt, Error> {\n\n #[ram]\n\n const CPU_INTERRUPT_TO_INTERRUPT: [Option<target::Interrupt>; 32] = [\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n None,\n\n Some(target::Interrupt::INTERNAL_TIMER0_INTR),\n\n Some(target::Interrupt::INTERNAL_SOFTWARE_LEVEL_1_INTR),\n\n None,\n\n None,\n\n None,\n\n Some(target::Interrupt::INTERNAL_PROFILING_INTR),\n\n None,\n\n None,\n\n None,\n\n Some(target::Interrupt::INTERNAL_TIMER1_INTR),\n\n Some(target::Interrupt::INTERNAL_TIMER2_INTR),\n", "file_path": "src/interrupt.rs", "rank": 5, "score": 125934.51292517666 }, { "content": "#[ram]\n\npub fn enable_with_priority(\n\n core: crate::Core,\n\n interrupt: Interrupt,\n\n level: InterruptLevel,\n\n) -> Result<(), Error> {\n\n match interrupt_to_cpu_interrupt(interrupt) {\n\n Ok(cpu_interrupt) => {\n\n if core != crate::get_core() {\n\n return Err(Error::InvalidCore);\n\n }\n\n if level == InterruptLevel(0) {\n\n interrupt::disable_mask(1 << cpu_interrupt.0);\n\n return Ok(());\n\n } else if level == cpu_interrupt_to_level(cpu_interrupt) {\n\n unsafe { interrupt::enable_mask(1 << cpu_interrupt.0) };\n\n return Ok(());\n\n } else {\n\n return Err(Error::InvalidInterruptLevel);\n\n }\n\n }\n", "file_path": "src/interrupt.rs", "rank": 6, "score": 112446.87560929784 }, { "content": "pub trait Frequency: Quantity + Into<Hertz> {}\n", "file_path": "src/units.rs", "rank": 7, "score": 106204.26039013712 }, { "content": "pub fn get_other_core() -> Core {\n\n match get_core() {\n\n Core::PRO => Core::APP,\n\n Core::APP => Core::PRO,\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 8, "score": 105894.53500167752 }, { "content": "pub fn get_core() -> Core {\n\n match ((xtensa_lx6::get_processor_id() >> 13) & 1) != 0 {\n\n false => Core::PRO,\n\n true => Core::APP,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 105894.53500167752 }, { "content": "/// Get the size of the external RAM (also called PSRAM).\n\npub fn get_size() -> usize {\n\n unsafe { EXTERNAL_RAM_SIZE.assume_init().unwrap() }\n\n}\n\n\n\n/// Initialize external RAM\n\npub(super) unsafe fn init() {\n\n EXTERNAL_RAM_SIZE = core::mem::MaybeUninit::new(Some(calculate_external_ram_size()));\n\n\n\n if &_external_heap_start as *const u32 > (&_external_ram_start as *const u32).add(get_size()) {\n\n panic!(\"External RAM too small for data\");\n\n }\n\n xtensa_lx6_rt::zero_bss(&mut _external_bss_start, &mut _external_bss_end);\n\n}\n\n\n\n/// Calculate the size of external RAM by reading and writing at defined intervals while\n\n/// thrashing the cache in between.\n\n///\n\n/// TODO: should be replaced by reading the size via SPI\n\nunsafe fn calculate_external_ram_size() -> usize {\n\n let ram_start_addr: usize = &_external_ram_start as *const u32 as usize;\n", "file_path": "src/external_ram.rs", "rank": 10, "score": 103399.74703902833 }, { "content": "/// cycle accurate delay using the cycle counter register\n\npub fn delay(clocks: u32) {\n\n let start = get_cycle_count();\n\n loop {\n\n if get_cycle_count().wrapping_sub(start) >= clocks {\n\n break;\n\n }\n\n }\n\n}\n", "file_path": "examples/blinky.rs", "rank": 11, "score": 102642.29101886 }, { "content": "#[ram]\n\nfn interrupt_is_edge(interrupt: Interrupt) -> bool {\n\n [\n\n TG0_T0_EDGE_INTR,\n\n TG0_T1_EDGE_INTR,\n\n TG0_WDT_EDGE_INTR,\n\n TG0_LACT_EDGE_INTR,\n\n TG1_T0_EDGE_INTR,\n\n TG1_T1_EDGE_INTR,\n\n TG1_WDT_EDGE_INTR,\n\n TG1_LACT_EDGE_INTR,\n\n ]\n\n .contains(&interrupt)\n\n}\n\n\n", "file_path": "src/interrupt.rs", "rank": 12, "score": 101637.19818045264 }, { "content": "#[ram]\n\npub fn get_interrupt_status(core: Core) -> u128 {\n\n unsafe {\n\n match core {\n\n PRO => {\n\n ((*DPORT::ptr()).pro_intr_status_0.read().bits() as u128)\n\n | ((*DPORT::ptr()).pro_intr_status_1.read().bits() as u128) << 32\n\n | ((*DPORT::ptr()).pro_intr_status_2.read().bits() as u128) << 64\n\n }\n\n APP => {\n\n ((*DPORT::ptr()).app_intr_status_0.read().bits() as u128)\n\n | ((*DPORT::ptr()).app_intr_status_1.read().bits() as u128) << 32\n\n | ((*DPORT::ptr()).app_intr_status_2.read().bits() as u128) << 64\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Map an interrupt to a CPU interrupt\n", "file_path": "src/interrupt.rs", "rank": 13, "score": 92906.78748064135 }, { "content": "/// Returns `true` if `attr.path` matches `name`\n\nfn eq(attr: &Attribute, name: &str) -> bool {\n\n attr.style == AttrStyle::Outer && attr.path.is_ident(name)\n\n}\n", "file_path": "procmacros/src/lib.rs", "rank": 14, "score": 91796.2438204885 }, { "content": "/// Extracts `static mut` vars from the beginning of the given statements\n\nfn extract_static_muts(\n\n stmts: impl IntoIterator<Item = Stmt>,\n\n) -> Result<(Vec<ItemStatic>, Vec<Stmt>), parse::Error> {\n\n let mut istmts = stmts.into_iter();\n\n\n\n let mut seen = HashSet::new();\n\n let mut statics = vec![];\n\n let mut stmts = vec![];\n\n while let Some(stmt) = istmts.next() {\n\n match stmt {\n\n Stmt::Item(Item::Static(var)) => {\n\n if var.mutability.is_some() {\n\n if seen.contains(&var.ident) {\n\n return Err(parse::Error::new(\n\n var.ident.span(),\n\n format!(\"the name `{}` is defined multiple times\", var.ident),\n\n ));\n\n }\n\n\n\n seen.insert(var.ident.clone());\n", "file_path": "procmacros/src/lib.rs", "rank": 15, "score": 86200.59131606453 }, { "content": "/// Function only available once clock if frozen\n\npub fn sleep<T: Into<NanoSeconds>>(time: T) {\n\n unsafe { CLOCK_CONTROL.as_ref().unwrap().delay(time) };\n\n}\n\n\n\nimpl ClockControl {\n\n /// Create new ClockControl structure\n\n pub fn new<T: Into<Hertz> + Copy>(\n\n rtc_control: RTCCNTL,\n\n apb_control: APB_CTRL,\n\n dport_control: crate::dport::ClockControl,\n\n xtal_frequency: T,\n\n ) -> Result<Self, Error> {\n\n let mut cc = ClockControl {\n\n rtc_control,\n\n apb_control,\n\n dport_control,\n\n\n\n cpu_frequency_default: CPU_FREQ_MIN_DEFAULT,\n\n cpu_source_default: CPU_SOURCE_DEFAULT_DEFAULT,\n\n cpu_frequency_locked: CPU_FREQ_MAX_DEFAULT,\n", "file_path": "src/clock_control/mod.rs", "rank": 16, "score": 84523.71583488732 }, { "content": "#[proc_macro_attribute]\n\npub fn interrupt(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut f: ItemFn = syn::parse(input).expect(\"`#[interrupt]` must be applied to a function\");\n\n\n\n let attr_args = parse_macro_input!(args as AttributeArgs);\n\n\n\n if attr_args.len() > 1 {\n\n Span::call_site()\n\n .error(\"This attribute accepts zero or 1 arguments\")\n\n .emit();\n\n }\n\n\n\n let ident = f.sig.ident.clone();\n\n let mut ident_s = &ident.clone();\n\n\n\n if attr_args.len() == 1 {\n\n match &attr_args[0] {\n\n syn::NestedMeta::Meta(Path(x)) => {\n\n ident_s = x.get_ident().unwrap();\n\n }\n\n _ => {\n", "file_path": "procmacros/src/lib.rs", "rank": 17, "score": 81175.42246386231 }, { "content": "#[proc_macro_attribute]\n\npub fn ram(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let attr_args = parse_macro_input!(args as AttributeArgs);\n\n\n\n let RamArgs {\n\n rtc_fast,\n\n rtc_slow,\n\n external,\n\n uninitialized,\n\n zeroed,\n\n } = match FromMeta::from_list(&attr_args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return e.write_errors().into();\n\n }\n\n };\n\n\n\n if rtc_slow && rtc_fast {\n\n Span::call_site()\n\n .error(\"Only one of rtc_slow and rtc_fast is allowed\")\n\n .emit();\n", "file_path": "procmacros/src/lib.rs", "rank": 18, "score": 81175.42246386231 }, { "content": "pub trait FrequencyU64: Quantity + Into<HertzU64> {}\n", "file_path": "src/units.rs", "rank": 19, "score": 79793.48291413956 }, { "content": "fn check_attr_whitelist(attrs: &[Attribute], caller: WhiteListCaller) -> Result<(), TokenStream> {\n\n let whitelist = &[\n\n \"doc\",\n\n \"link_section\",\n\n \"cfg\",\n\n \"allow\",\n\n \"warn\",\n\n \"deny\",\n\n \"forbid\",\n\n \"cold\",\n\n \"ram\",\n\n ];\n\n\n\n 'o: for attr in attrs {\n\n for val in whitelist {\n\n if eq(&attr, &val) {\n\n continue 'o;\n\n }\n\n }\n\n\n", "file_path": "procmacros/src/lib.rs", "rank": 20, "score": 79235.27378657463 }, { "content": "#[alloc_error_handler]\n\nfn alloc_error_handler(layout: core::alloc::Layout) -> ! {\n\n panic!(\n\n \"Error allocating {} bytes of memory with alignment {}\",\n\n layout.size(),\n\n layout.align()\n\n );\n\n}\n", "file_path": "examples/alloc.rs", "rank": 21, "score": 72747.21606258978 }, { "content": "#[alloc_error_handler]\n\nfn alloc_error_handler(layout: core::alloc::Layout) -> ! {\n\n panic!(\n\n \"Error allocating {} bytes of memory with alignment {}\",\n\n layout.size(),\n\n layout.align()\n\n );\n\n}\n", "file_path": "examples/mem.rs", "rank": 22, "score": 72747.21606258978 }, { "content": "fn time(output: &mut dyn core::fmt::Write, text: &str, bytes: usize, f: &dyn Fn() -> ()) {\n\n let start = get_cycle_count();\n\n for _ in 0..REPEAT {\n\n f();\n\n }\n\n let end = get_cycle_count();\n\n\n\n let time = (end - start) as f32 / ClockControlConfig {}.cpu_frequency().0 as f32;\n\n writeln!(\n\n output,\n\n \"{:>40}: {:.3}s, {:.3}KB/s\",\n\n text,\n\n time,\n\n (bytes * REPEAT) as f32 / time / 1024.0\n\n )\n\n .unwrap();\n\n}\n\n\n\nunsafe fn time_memcpy(\n\n output: &mut dyn core::fmt::Write,\n", "file_path": "examples/mem.rs", "rank": 23, "score": 61874.18749697469 }, { "content": "/// Trait to split the DPORT peripheral into subsets\n\npub trait Split {\n\n fn split(self) -> (DPORT, ClockControl);\n\n}\n\n\n\nimpl Split for DPORT {\n\n /// function to split the DPORT peripheral into subsets\n\n fn split(self) -> (DPORT, ClockControl) {\n\n (self, ClockControl {})\n\n }\n\n}\n", "file_path": "src/dport.rs", "rank": 24, "score": 60749.46821301036 }, { "content": "fn main() {\n\n // Put the linker script somewhere the linker can find it\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n\n\n File::create(out.join(\"alias.x\"))\n\n .unwrap()\n\n .write_all(if cfg!(feature = \"all_in_ram\") {\n\n include_bytes!(\"ram.x\")\n\n } else {\n\n include_bytes!(\"rom.x\")\n\n })\n\n .unwrap();\n\n\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // Only re-run the build script when memory.x is changed,\n\n // instead of when any part of the source code changes.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n}\n", "file_path": "build.rs", "rank": 25, "score": 60033.84794080211 }, { "content": "/// Extension trait to split a GPIO peripheral in independent pins and registers\n\npub trait GpioExt {\n\n /// The to split the GPIO into\n\n type Parts;\n\n\n\n /// Splits the GPIO block into independent pins and registers\n\n fn split(self) -> Self::Parts;\n\n}\n\n\n\n/// Input mode (type state)\n\npub struct Input<MODE> {\n\n _mode: PhantomData<MODE>,\n\n}\n\n\n\n/// Floating input (type state)\n\npub struct Floating;\n\n\n\n/// Pulled down input (type state)\n\npub struct PullDown;\n\n\n\n/// Pulled up input (type state)\n", "file_path": "src/gpio.rs", "rank": 26, "score": 59257.927779115686 }, { "content": "/// Get heap sizes\n\npub trait AllocatorSize {\n\n /// Get total heap size\n\n fn size(&self) -> usize;\n\n /// Get used heap size\n\n fn used(&self) -> usize;\n\n /// Get free heap size\n\n fn free(&self) -> usize;\n\n}\n\n\n\nunsafe trait GlobalAllocSize: GlobalAlloc + AllocatorSize {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[doc(hidden)]\n\npub struct Allocator {\n\n allocator: &'static (dyn GlobalAllocSize + 'static),\n\n}\n\n\n\nunsafe impl Sync for Allocator {}\n\n\n\nimpl Allocator {\n", "file_path": "src/alloc.rs", "rank": 27, "score": 59257.927779115686 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timers on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // setup clocks & watchdog\n\n let mut clock_control = ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n", "file_path": "examples/mem.rs", "rank": 28, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timer on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let clkcntrl = esp32_hal::clock_control::ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n\n .unwrap();\n", "file_path": "examples/adc.rs", "rank": 29, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timer on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let clkcntrl = esp32_hal::clock_control::ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n\n .unwrap();\n", "file_path": "examples/serial.rs", "rank": 30, "score": 58281.79524794819 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().unwrap();\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n let clkcntrl = esp32_hal::clock_control::ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n\n .unwrap();\n\n\n\n let (clkcntrl_config, mut watchdog_rtc) = clkcntrl.freeze().unwrap();\n\n let (mut timer0, mut timer1, mut timer2, mut watchdog0) = Timer::new(dp.TIMG0, clkcntrl_config);\n\n let (mut timer3, mut timer4, mut timer5, mut watchdog1) = Timer::new(dp.TIMG1, clkcntrl_config);\n\n\n\n watchdog_rtc.disable();\n\n watchdog0.disable();\n\n\n", "file_path": "examples/timer.rs", "rank": 31, "score": 58281.79524794819 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().unwrap();\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timers on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // setup clocks & watchdog\n\n let mut clock_control = ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n", "file_path": "examples/exception.rs", "rank": 32, "score": 58281.79524794819 }, { "content": "#[exception]\n\n#[ram]\n\nfn other_exception(\n\n cause: xtensa_lx6_rt::exception::ExceptionCause,\n\n frame: xtensa_lx6_rt::exception::Context,\n\n) {\n\n (&TX).lock(|tx| {\n\n let tx = tx.as_mut().unwrap();\n\n writeln!(tx, \"Exception {:?}, {:08x?}\", cause, frame).unwrap();\n\n });\n\n loop {}\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 33, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timer on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let clkcntrl = esp32_hal::clock_control::ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n\n .unwrap();\n", "file_path": "examples/hall.rs", "rank": 34, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n let (_dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timer on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let clkcntrl = esp32_hal::clock_control::ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n\n .unwrap();\n", "file_path": "examples/dac.rs", "rank": 35, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut rtccntl = dp.RTCCNTL;\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the wdt's on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n disable_rtc_wdt(&mut rtccntl);\n\n\n\n let pins = dp.GPIO.split();\n\n let mut led = pins.gpio2.into_open_drain_output();\n\n\n\n loop {\n\n led.set_high().unwrap();\n\n delay(CORE_HZ);\n\n led.set_low().unwrap();\n\n delay(CORE_HZ);\n\n }\n\n}\n\n\n", "file_path": "examples/blinky.rs", "rank": 36, "score": 58281.79524794819 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timers on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // setup clocks & watchdog\n\n let clock_control = ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n", "file_path": "examples/ram.rs", "rank": 37, "score": 58281.79524794819 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timers on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // setup clocks & watchdog\n\n let clock_control = ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n", "file_path": "examples/alloc.rs", "rank": 38, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timers on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // setup clocks & watchdog\n\n let mut clock_control = ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n", "file_path": "examples/rtccntl.rs", "rank": 39, "score": 58281.79524794819 }, { "content": "#[no_mangle]\n\nfn main() -> ! {\n\n let dp = target::Peripherals::take().expect(\"Failed to obtain Peripherals\");\n\n\n\n let mut timg0 = dp.TIMG0;\n\n let mut timg1 = dp.TIMG1;\n\n\n\n // (https://github.com/espressif/openocd-esp32/blob/97ba3a6bb9eaa898d91df923bbedddfeaaaf28c9/src/target/esp32.c#L431)\n\n // openocd disables the watchdog timers on halt\n\n // we will do it manually on startup\n\n disable_timg_wdts(&mut timg0, &mut timg1);\n\n\n\n let (mut dport, dport_clock_control) = dp.DPORT.split();\n\n\n\n // setup clocks & watchdog\n\n let mut clock_control = ClockControl::new(\n\n dp.RTCCNTL,\n\n dp.APB_CTRL,\n\n dport_clock_control,\n\n esp32_hal::clock_control::XTAL_FREQUENCY_AUTO,\n\n )\n", "file_path": "examples/multicore.rs", "rank": 40, "score": 58281.79524794819 }, { "content": "#[doc(hidden)]\n\npub trait TimerInst {}\n\n\n", "file_path": "src/timer/mod.rs", "rank": 41, "score": 57888.79339906647 }, { "content": "pub trait SensExt {\n\n fn split(self) -> AvailableAnalog;\n\n}\n\n\n\nimpl SensExt for SENS {\n\n fn split(self) -> AvailableAnalog {\n\n AvailableAnalog {\n\n adc1: ADC1 {\n\n _private: PhantomData,\n\n },\n\n adc2: ADC2 {\n\n _private: PhantomData,\n\n },\n\n dac1: DAC1 {\n\n _private: PhantomData,\n\n },\n\n dac2: DAC2 {\n\n _private: PhantomData,\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/analog/mod.rs", "rank": 42, "score": 57888.79339906647 }, { "content": "pub trait Pins<UART> {}\n", "file_path": "src/serial.rs", "rank": 43, "score": 57114.985337013204 }, { "content": "pub trait Quantity: Sized {}\n", "file_path": "src/units.rs", "rank": 44, "score": 57114.985337013204 }, { "content": "fn cpu1_start() -> ! {\n\n let mut x: u32 = 0;\n\n let mut prev_ccount = 0;\n\n\n\n (&TX).lock(|tx| {\n\n writeln!(\n\n tx.as_mut().unwrap(),\n\n \"Stack Pointer Core 1: {:08x?}\",\n\n get_stack_pointer()\n\n )\n\n .unwrap()\n\n });\n\n\n\n loop {\n\n let cycles = ClockControlConfig {}.cpu_frequency() / BLINK_HZ;\n\n let start = get_cycle_count();\n\n let mut loop_count = 0;\n\n while get_cycle_count().wrapping_sub(start) < cycles {\n\n loop_count += 1;\n\n }\n\n\n\n print_info(x, loop_count, &mut prev_ccount);\n\n x = x.wrapping_add(1);\n\n }\n\n}\n\n\n", "file_path": "examples/multicore.rs", "rank": 45, "score": 56686.38302763419 }, { "content": "#[interrupt]\n\nfn FROM_CPU_INTR3() {\n\n locked_print(\"FROM_CPU_INTR3\");\n\n clear_software_interrupt(Interrupt::FROM_CPU_INTR3).unwrap();\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 46, "score": 56686.38302763419 }, { "content": "#[interrupt(INTERNAL_SOFTWARE_LEVEL_3_INTR)]\n\nfn software_level_3() {\n\n locked_print(\"INTERNAL_SOFTWARE_LEVEL_3_INTR\");\n\n clear_software_interrupt(Interrupt::FROM_CPU_INTR3).unwrap();\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 47, "score": 56686.38302763419 }, { "content": "#[ram]\n\nfn map_interrupt(\n\n core: crate::Core,\n\n interrupt: Interrupt,\n\n cpu_interrupt: CPUInterrupt,\n\n) -> Result<(), Error> {\n\n if cpu_interrupt.0 >= 32 {\n\n return Err(Error::InvalidCPUInterrupt);\n\n }\n\n if interrupt.nr() >= Interrupt::INTERNAL_TIMER0_INTR.nr() {\n\n return Err(Error::InternalInterruptsCannotBeMapped);\n\n }\n\n unsafe {\n\n let base_reg = match core {\n\n crate::Core::PRO => (*DPORT::ptr()).pro_mac_intr_map.as_ptr(),\n\n crate::Core::APP => (*DPORT::ptr()).app_mac_intr_map.as_ptr(),\n\n };\n\n\n\n let reg = base_reg.add(interrupt.nr() as usize);\n\n *reg = cpu_interrupt.0 as u32;\n\n };\n", "file_path": "src/interrupt.rs", "rank": 48, "score": 56686.38302763419 }, { "content": "#[interrupt]\n\nfn FROM_CPU_INTR0() {\n\n locked_print(\"FROM_CPU_INTR0\");\n\n clear_software_interrupt(Interrupt::FROM_CPU_INTR0).unwrap();\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 49, "score": 56686.38302763419 }, { "content": "#[interrupt]\n\nfn FROM_CPU_INTR2() {\n\n locked_print(\"FROM_CPU_INTR2\");\n\n clear_software_interrupt(Interrupt::FROM_CPU_INTR2).unwrap();\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 50, "score": 56686.38302763419 }, { "content": "#[interrupt(INTERNAL_SOFTWARE_LEVEL_1_INTR)]\n\nfn random_name() {\n\n locked_print(\"INTERNAL_SOFTWARE_LEVEL_1_INTR\");\n\n clear_software_interrupt(Interrupt::FROM_CPU_INTR3).unwrap();\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 51, "score": 56686.38302763419 }, { "content": "#[interrupt]\n\nfn FROM_CPU_INTR1() {\n\n locked_print(\"Start FROM_CPU_INTR1\");\n\n interrupt::set_software_interrupt(Interrupt::FROM_CPU_INTR0).unwrap();\n\n interrupt::set_software_interrupt(Interrupt::FROM_CPU_INTR2).unwrap();\n\n locked_print(\"End FROM_CPU_INTR1\");\n\n clear_software_interrupt(Interrupt::FROM_CPU_INTR1).unwrap();\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 52, "score": 56686.38302763419 }, { "content": "pub trait PinRx<UART> {}\n\n\n\nimpl<UART, TX, RX> Pins<UART> for (TX, RX)\n\nwhere\n\n TX: PinTx<UART>,\n\n RX: PinRx<UART>,\n\n{\n\n}\n\n\n\n/// A filler type for when the Tx pin is unnecessary\n\npub struct NoTx;\n\n/// A filler type for when the Rx pin is unnecessary\n\npub struct NoRx;\n\n\n\nimpl PinTx<UART0> for NoTx {}\n\nimpl PinRx<UART0> for NoRx {}\n\nimpl PinTx<UART1> for NoTx {}\n\nimpl PinRx<UART1> for NoRx {}\n\nimpl PinTx<UART2> for NoTx {}\n\nimpl PinRx<UART2> for NoRx {}\n", "file_path": "src/serial.rs", "rank": 53, "score": 55745.85095696399 }, { "content": "pub trait PinTx<UART> {}\n", "file_path": "src/serial.rs", "rank": 54, "score": 55745.85095696399 }, { "content": "#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Default)]\n\nstruct CPUInterrupt(pub usize);\n\n\n", "file_path": "src/interrupt.rs", "rank": 55, "score": 55745.85095696399 }, { "content": "// TODO:\n\n// - check if all called function are in ram\n\n// - check if all used data is in ram\n\n// - check that no constants are use in the function (these cannot be forced to ram)\n\nfn check_ram_function(_func: &syn::ItemFn) {\n\n // eprintln!(\"{:?}\", func);\n\n}\n\n\n", "file_path": "procmacros/src/lib.rs", "rank": 56, "score": 54654.596820497194 }, { "content": "#[interrupt]\n\nfn TG1_WDT_EDGE_INTR() {\n\n locked_print(\" TG1_WDT_EDGE_INTR\");\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 57, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG1_T0_LEVEL_INTR() {\n\n locked_print(\" TG1_T0_LEVEL_INTR\");\n\n locked_clear(&TIMER3);\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 58, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG0_T0_LEVEL_INTR() {\n\n locked_print(\" TG0_T0_LEVEL_INTR\");\n\n locked_clear(&TIMER0);\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 59, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG1_WDT_LEVEL_INTR() {\n\n locked_print(\" TG1_WDT_LEVEL_INTR\");\n\n\n\n (&WATCHDOG1).lock(|watchdog1| {\n\n let watchdog1 = watchdog1.as_mut().unwrap();\n\n watchdog1.clear_interrupt();\n\n });\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 60, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG0_T0_EDGE_INTR() {\n\n locked_print(\" TG0_T0_EDGE_INTR\");\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 61, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG1_LACT_LEVEL_INTR() {\n\n locked_print(\" TG1_LACT_LEVEL_INTR\");\n\n locked_clear(&TIMER5);\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 62, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG1_T1_LEVEL_INTR() {\n\n locked_print(\" TG1_T1_LEVEL_INTR\");\n\n locked_clear(&TIMER4);\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 63, "score": 53888.35004550296 }, { "content": "#[ram]\n\nfn interrupt_level_to_cpu_interrupt(\n\n interrupt_level: InterruptLevel,\n\n edge: bool,\n\n) -> Result<CPUInterrupt, Error> {\n\n #[ram]\n\n const INTERRUPT_LEVEL_TO_CPU_INTERRUPT_EDGE: [Option<CPUInterrupt>; 8] = [\n\n Some(CPUInterrupt(6)), // Disable (assign to internal interrupt)\n\n Some(CPUInterrupt(10)), // Level 1 edge triggered\n\n None, // Level 2 edge triggered not supported\n\n Some(CPUInterrupt(22)), // Level 3 edge triggered\n\n Some(CPUInterrupt(28)), // Level 4 edge triggered\n\n None, // Level 5 edge triggered not supported\n\n None, // Level 6 = Debug not supported for peripherals\n\n Some(CPUInterrupt(14)), // Level 7 = NMI edge triggered\n\n ];\n\n #[ram]\n\n const INTERRUPT_LEVEL_TO_CPU_INTERRUPT_LEVEL: [Option<CPUInterrupt>; 8] = [\n\n Some(CPUInterrupt(6)), // Disable (assign to internal interrupt)\n\n Some(CPUInterrupt(0)), // Level 1 level triggered\n\n Some(CPUInterrupt(19)), // Level 2 level triggered\n", "file_path": "src/interrupt.rs", "rank": 64, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG0_LACT_EDGE_INTR() {\n\n locked_print(\" TG0_LACT_EDGE_INTR\");\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 65, "score": 53888.35004550296 }, { "content": "#[interrupt]\n\nfn TG0_LACT_LEVEL_INTR() {\n\n locked_print(\" TG0_LACT_LEVEL_INTR\");\n\n locked_clear(&TIMER2);\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 66, "score": 53888.35004550296 }, { "content": "pub trait Count: Quantity + Into<Ticks> {}\n\n\n", "file_path": "src/units.rs", "rank": 67, "score": 52373.0043691773 }, { "content": "pub trait Time: Quantity + Into<NanoSeconds> {}\n", "file_path": "src/units.rs", "rank": 68, "score": 51111.80175047503 }, { "content": "#[panic_handler]\n\nfn panic(_info: &PanicInfo) -> ! {\n\n loop {}\n\n}\n", "file_path": "examples/dac.rs", "rank": 69, "score": 50148.68404471354 }, { "content": "fn locked_print(str: &str) {\n\n (&TX).lock(|tx| {\n\n let tx = tx.as_mut().unwrap();\n\n\n\n writeln!(tx, \"{}\", str).unwrap();\n\n });\n\n}\n\n\n", "file_path": "examples/timer.rs", "rank": 70, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(_info: &PanicInfo) -> ! {\n\n loop {}\n\n}\n", "file_path": "examples/adc.rs", "rank": 71, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n loop {}\n\n}\n\n\n", "file_path": "examples/alloc.rs", "rank": 72, "score": 50148.68404471354 }, { "content": "fn locked_print(str: &str) {\n\n (&TX).lock(|tx| {\n\n let tx = tx.as_mut().unwrap();\n\n\n\n writeln!(\n\n tx,\n\n \" {}, Level: {}\",\n\n str,\n\n xtensa_lx6::interrupt::get_level()\n\n )\n\n .unwrap();\n\n });\n\n}\n\n\n", "file_path": "examples/exception.rs", "rank": 73, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n loop {}\n\n}\n", "file_path": "examples/ram.rs", "rank": 74, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n loop {}\n\n}\n", "file_path": "examples/timer.rs", "rank": 75, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n // park the other core\n\n unsafe { ClockControlConfig {}.park_core(esp32_hal::get_other_core()) };\n\n\n\n // print panic message\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n\n\n // park this core\n\n unsafe { ClockControlConfig {}.park_core(esp32_hal::get_core()) };\n\n\n\n dprintln!(\"Not reached because core is parked.\");\n\n\n\n // this statement will not be reached, but is needed to make this a diverging function\n\n loop {}\n\n}\n", "file_path": "examples/multicore.rs", "rank": 76, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(_info: &PanicInfo) -> ! {\n\n loop {}\n\n}\n", "file_path": "examples/serial.rs", "rank": 77, "score": 50148.68404471354 }, { "content": "#[ram]\n\n#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n loop {}\n\n}\n", "file_path": "examples/exception.rs", "rank": 78, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n loop {}\n\n}\n", "file_path": "examples/rtccntl.rs", "rank": 79, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(info: &PanicInfo) -> ! {\n\n dprintln!(\"\\n\\n*** {:?}\", info);\n\n loop {}\n\n}\n\n\n", "file_path": "examples/mem.rs", "rank": 80, "score": 50148.68404471354 }, { "content": "#[panic_handler]\n\nfn panic(_info: &PanicInfo) -> ! {\n\n loop {}\n\n}\n", "file_path": "examples/hall.rs", "rank": 81, "score": 50148.68404471354 }, { "content": "pub trait CountU64: Quantity + Into<TicksU64> {}\n\n\n\n/// defines and implements extension traits for quantities with units\n\nmacro_rules! define {\n\n ($primitive:ident, $trait:ident, $( ($type: ident, $quantity: ident, $unit: ident,\n\n $print_unit: literal), )+) => {\n\n $(\n\n #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Default)]\n\n pub struct $quantity(pub $primitive);\n\n\n\n impl Quantity for $quantity {}\n\n impl $type for $quantity {}\n\n )*\n\n\n\n pub trait $trait {\n\n $(\n\n #[allow(non_snake_case)]\n\n fn $unit(self) -> $quantity;\n\n )*\n\n }\n", "file_path": "src/units.rs", "rank": 82, "score": 49946.25213937096 }, { "content": "pub trait TimeU64: Quantity + Into<NanoSecondsU64> {}\n", "file_path": "src/units.rs", "rank": 83, "score": 48865.87124139223 }, { "content": "#[doc(hidden)]\n\npub trait TimerGroup: core::ops::Deref {}\n\nimpl TimerGroup for target::TIMG0 {}\n\nimpl TimerGroup for target::TIMG1 {}\n\n\n", "file_path": "src/timer/mod.rs", "rank": 84, "score": 48865.87124139223 }, { "content": "/// Timer trait\n\npub trait TimerWithInterrupt: CountDown + Periodic + Cancel {\n\n /// Starts listening for an [Event]\n\n fn listen(&mut self, event: Event);\n\n\n\n /// Stops listening for an [Event]\n\n fn unlisten(&mut self, event: Event);\n\n\n\n /// Clear interrupt once fired\n\n fn clear_interrupt(&mut self) -> &mut Self;\n\n}\n\n\n\nimpl<TIMG: TimerGroup> Timer<TIMG, Timer0> {\n\n /// Create new timer resources\n\n ///\n\n /// This function will create 2 timers and 1 watchdog for a timer group.\n\n /// It uses the clock_control_config for obtaining the clock configuration.\n\n ///\n\n /// *Note: time to clock tick conversions are done with the clock frequency when the\n\n /// [start](embedded_hal::timer::CountDown::start) function is called. The clock frequency is not locked.*\n\n pub fn new(\n", "file_path": "src/timer/mod.rs", "rank": 85, "score": 48865.87124139223 }, { "content": "fn attr_none_fn(uart: &mut esp32_hal::serial::Serial<target::UART0, (NoTx, NoRx)>) {\n\n writeln!(\n\n uart,\n\n \"{:<40}: {:08x?}\",\n\n \"attr_none_fn\",\n\n get_program_counter()\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "examples/ram.rs", "rank": 86, "score": 45124.50364501008 }, { "content": "#[ram]\n\nfn attr_ram_fn(uart: &mut esp32_hal::serial::Serial<target::UART0, (NoTx, NoRx)>) {\n\n writeln!(\n\n uart,\n\n \"{:<40}: {:08x?}\",\n\n \"attr_ram_fn\",\n\n get_program_counter()\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "examples/ram.rs", "rank": 87, "score": 45124.50364501008 }, { "content": "fn disable_rtc_wdt(rtccntl: &mut target::RTCCNTL) {\n\n /* Disables the RTCWDT */\n\n rtccntl\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n rtccntl.wdtconfig0.modify(|_, w| unsafe {\n\n w.wdt_stg0()\n\n .bits(0x0)\n\n .wdt_stg1()\n\n .bits(0x0)\n\n .wdt_stg2()\n\n .bits(0x0)\n\n .wdt_stg3()\n\n .bits(0x0)\n\n .wdt_flashboot_mod_en()\n\n .clear_bit()\n\n .wdt_en()\n\n .clear_bit()\n\n });\n\n rtccntl.wdtwprotect.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n", "file_path": "examples/blinky.rs", "rank": 88, "score": 44040.91623924911 }, { "content": "#[ram(rtc_slow)]\n\nfn attr_ram_fn_rtc_slow(uart: &mut esp32_hal::serial::Serial<target::UART0, (NoTx, NoRx)>) {\n\n writeln!(\n\n uart,\n\n \"{:<40}: {:08x?}\",\n\n \"attr_ram_fn_rtc_slow\",\n\n get_program_counter()\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "examples/ram.rs", "rank": 89, "score": 43740.60556287508 }, { "content": "#[ram(rtc_fast)]\n\nfn attr_ram_fn_rtc_fast(uart: &mut esp32_hal::serial::Serial<target::UART0, (NoTx, NoRx)>) {\n\n writeln!(\n\n uart,\n\n \"{:<40}: {:08x?}\",\n\n \"attr_ram_fn_rtc_fast\",\n\n get_program_counter()\n\n )\n\n .unwrap();\n\n}\n\n\n\nstatic ATTR_NONE_STATIC: [u8; 16] = *b\"ATTR_NONE_STATIC\";\n\n\n\nstatic mut ATTR_NONE_STATIC_MUT: [u8; 20] = *b\"ATTR_NONE_STATIC_MUT\";\n\n\n\nstatic ATTR_NONE_STATIC_BSS: [u8; 32] = [0; 32];\n\n\n\nstatic mut ATTR_NONE_STATIC_MUT_BSS: [u8; 32] = [0; 32];\n\n\n\n#[ram]\n\nstatic ATTR_RAM_STATIC: [u8; 15] = *b\"ATTR_RAM_STATIC\";\n", "file_path": "examples/ram.rs", "rank": 90, "score": 43740.60556287508 }, { "content": "#[ram]\n\nfn cpu_interrupt_to_level(cpu_interrupt: CPUInterrupt) -> InterruptLevel {\n\n #[ram]\n\n const CPU_INTERRUPT_TO_LEVEL: [usize; 32] = [\n\n 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 7, 3, 5, 1, 1, 2, 2, 2, 3, 3, 4, 4, 5, 3, 4, 3,\n\n 4, 5,\n\n ];\n\n InterruptLevel(CPU_INTERRUPT_TO_LEVEL[cpu_interrupt.0 as usize])\n\n}\n\n\n\n#[ram]\n\nstatic mut INTERRUPT_LEVELS: [u128; 8] = [0u128; 8];\n\n\n\n#[ram]\n\nstatic INTERRUPT_LEVELS_MUTEX: CriticalSectionSpinLockMutex<bool> =\n\n CriticalSectionSpinLockMutex::new(false);\n\n\n\n#[xtensa_lx6_rt::interrupt(1)]\n\n#[ram]\n\nunsafe fn level_1_handler(level: u32) {\n\n handle_interrupts(level)\n", "file_path": "src/interrupt.rs", "rank": 91, "score": 43379.730872361324 }, { "content": "fn print_heap_info(output: &mut dyn core::fmt::Write) {\n\n writeln!(output).unwrap();\n\n print_single_heap_info(output, &GLOBAL_ALLOCATOR, \"Global\");\n\n print_single_heap_info(output, &DRAM_ALLOCATOR, \"DRAM\");\n\n print_single_heap_info(output, &IRAM_ALLOCATOR, \"IRAM\");\n\n #[cfg(feature = \"external_ram\")]\n\n print_single_heap_info(output, &EXTERNAL_ALLOCATOR, \"External RAM\");\n\n writeln!(output).unwrap();\n\n}\n\n\n\nconst WDT_WKEY_VALUE: u32 = 0x50D83AA1;\n\n\n", "file_path": "examples/alloc.rs", "rank": 92, "score": 40131.84667230105 }, { "content": "fn extract_cfgs(attrs: Vec<Attribute>) -> (Vec<Attribute>, Vec<Attribute>) {\n\n let mut cfgs = vec![];\n\n let mut not_cfgs = vec![];\n\n\n\n for attr in attrs {\n\n if eq(&attr, \"cfg\") {\n\n cfgs.push(attr);\n\n } else {\n\n not_cfgs.push(attr);\n\n }\n\n }\n\n\n\n (cfgs, not_cfgs)\n\n}\n\n\n", "file_path": "procmacros/src/lib.rs", "rank": 93, "score": 38447.66373143817 }, { "content": "fn disable_timg_wdts(timg0: &mut target::TIMG0, timg1: &mut target::TIMG1) {\n\n timg0\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n timg1\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n\n\n timg0.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n timg1.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n", "file_path": "examples/alloc.rs", "rank": 94, "score": 36909.70007322758 }, { "content": "fn disable_timg_wdts(timg0: &mut target::TIMG0, timg1: &mut target::TIMG1) {\n\n timg0\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n timg1\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n\n\n timg0.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n timg1.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n\n/// Basic panic handler - just loops\n", "file_path": "examples/hall.rs", "rank": 95, "score": 36909.70007322758 }, { "content": "fn disable_timg_wdts(timg0: &mut target::TIMG0, timg1: &mut target::TIMG1) {\n\n timg0\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n timg1\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n\n\n timg0.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n timg1.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n\n/// Basic panic handler - just loops\n", "file_path": "examples/serial.rs", "rank": 96, "score": 36909.70007322758 }, { "content": "fn disable_timg_wdts(timg0: &mut target::TIMG0, timg1: &mut target::TIMG1) {\n\n timg0\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n timg1\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n\n\n timg0.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n timg1.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n", "file_path": "examples/blinky.rs", "rank": 97, "score": 36909.70007322758 }, { "content": "fn disable_timg_wdts(timg0: &mut target::TIMG0, timg1: &mut target::TIMG1) {\n\n timg0\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n timg1\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n\n\n timg0.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n timg1.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n\n/// Basic panic handler - just loops\n", "file_path": "examples/adc.rs", "rank": 98, "score": 36909.70007322758 }, { "content": "fn disable_timg_wdts(timg0: &mut target::TIMG0, timg1: &mut target::TIMG1) {\n\n timg0\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n timg1\n\n .wdtwprotect\n\n .write(|w| unsafe { w.bits(WDT_WKEY_VALUE) });\n\n\n\n timg0.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n timg1.wdtconfig0.write(|w| unsafe { w.bits(0x0) });\n\n}\n\n\n\n/// Basic panic handler - just loops\n", "file_path": "examples/dac.rs", "rank": 99, "score": 36909.70007322758 } ]
Rust
src/lib.rs
gimli-rs/cpp_demangle
a8afba1db064469278c4530ad9bad28ec4d6c161
#![deny(missing_docs)] #![deny(missing_debug_implementations)] #![deny(unsafe_code)] #![allow(unknown_lints)] #![allow(clippy::inline_always)] #![allow(clippy::redundant_field_names)] #![cfg_attr(all(not(feature = "std"), feature = "alloc"), no_std)] #![cfg_attr(all(not(feature = "std"), feature = "alloc"), feature(alloc))] #[macro_use] extern crate cfg_if; cfg_if! { if #[cfg(all(not(feature = "std"), feature = "alloc"))] { extern crate core as std; #[macro_use] extern crate alloc; mod imports { pub use alloc::boxed; pub use alloc::vec; pub use alloc::string; pub use alloc::borrow; pub use alloc::collections::btree_map; } } else { mod imports { pub use std::boxed; pub use std::vec; pub use std::string; pub use std::borrow; pub use std::collections::btree_map; } } } use imports::*; use string::String; use vec::Vec; #[macro_use] mod logging; pub mod ast; pub mod error; mod index_str; mod subs; use ast::{Demangle, Parse, ParseContext}; use error::{Error, Result}; use index_str::IndexStr; use std::fmt; use std::num::NonZeroU32; #[derive(Clone, Copy, Debug, Default)] #[repr(C)] pub struct ParseOptions { recursion_limit: Option<NonZeroU32>, } impl ParseOptions { pub fn recursion_limit(mut self, limit: u32) -> Self { self.recursion_limit = Some(NonZeroU32::new(limit).expect("Recursion limit must be > 0")); self } } #[derive(Clone, Copy, Debug, Default)] #[repr(C)] pub struct DemangleOptions { no_params: bool, no_return_type: bool, recursion_limit: Option<NonZeroU32>, } impl DemangleOptions { pub fn new() -> Self { Default::default() } pub fn no_params(mut self) -> Self { self.no_params = true; self } pub fn no_return_type(mut self) -> Self { self.no_return_type = true; self } pub fn recursion_limit(mut self, limit: u32) -> Self { self.recursion_limit = Some(NonZeroU32::new(limit).expect("Recursion limit must be > 0")); self } } pub type OwnedSymbol = Symbol<Vec<u8>>; pub type BorrowedSymbol<'a> = Symbol<&'a [u8]>; #[derive(Clone, Debug, PartialEq)] pub struct Symbol<T> { raw: T, substitutions: subs::SubstitutionTable, parsed: ast::MangledName, } impl<T> Symbol<T> where T: AsRef<[u8]>, { #[inline] pub fn new(raw: T) -> Result<Symbol<T>> { Self::new_with_options(raw, &Default::default()) } pub fn new_with_options(raw: T, options: &ParseOptions) -> Result<Symbol<T>> { let mut substitutions = subs::SubstitutionTable::new(); let parsed = { let ctx = ParseContext::new(*options); let input = IndexStr::new(raw.as_ref()); let (parsed, tail) = ast::MangledName::parse(&ctx, &mut substitutions, input)?; debug_assert!(ctx.recursion_level() == 0); if tail.is_empty() { parsed } else { return Err(Error::UnexpectedText); } }; let symbol = Symbol { raw: raw, substitutions: substitutions, parsed: parsed, }; log!( "Successfully parsed '{}' as AST = {:#?} substitutions = {:#?}", String::from_utf8_lossy(symbol.raw.as_ref()), symbol.parsed, symbol.substitutions ); Ok(symbol) } #[allow(clippy::trivially_copy_pass_by_ref)] pub fn demangle(&self, options: &DemangleOptions) -> ::std::result::Result<String, fmt::Error> { let mut out = String::new(); { let mut ctx = ast::DemangleContext::new( &self.substitutions, self.raw.as_ref(), *options, &mut out, ); self.parsed.demangle(&mut ctx, None)?; } Ok(out) } #[allow(clippy::trivially_copy_pass_by_ref)] pub fn structured_demangle<W: DemangleWrite>( &self, out: &mut W, options: &DemangleOptions, ) -> fmt::Result { let mut ctx = ast::DemangleContext::new(&self.substitutions, self.raw.as_ref(), *options, out); self.parsed.demangle(&mut ctx, None) } } #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum DemangleNodeType { Prefix, TemplatePrefix, TemplateArgs, UnqualifiedName, TemplateParam, Decltype, DataMemberPrefix, NestedName, VirtualTable, __NonExhaustive, } pub trait DemangleWrite { fn push_demangle_node(&mut self, _: DemangleNodeType) {} fn write_string(&mut self, s: &str) -> fmt::Result; fn pop_demangle_node(&mut self) {} } impl<W: fmt::Write> DemangleWrite for W { fn write_string(&mut self, s: &str) -> fmt::Result { fmt::Write::write_str(self, s) } } impl<'a, T> Symbol<&'a T> where T: AsRef<[u8]> + ?Sized, { #[inline] pub fn with_tail(input: &'a T) -> Result<(BorrowedSymbol<'a>, &'a [u8])> { Self::with_tail_and_options(input, &Default::default()) } pub fn with_tail_and_options( input: &'a T, options: &ParseOptions, ) -> Result<(BorrowedSymbol<'a>, &'a [u8])> { let mut substitutions = subs::SubstitutionTable::new(); let ctx = ParseContext::new(*options); let idx_str = IndexStr::new(input.as_ref()); let (parsed, tail) = ast::MangledName::parse(&ctx, &mut substitutions, idx_str)?; debug_assert!(ctx.recursion_level() == 0); let symbol = Symbol { raw: input.as_ref(), substitutions: substitutions, parsed: parsed, }; log!( "Successfully parsed '{}' as AST = {:#?} substitutions = {:#?}", String::from_utf8_lossy(symbol.raw), symbol.parsed, symbol.substitutions ); Ok((symbol, tail.into())) } } impl<T> fmt::Display for Symbol<T> where T: AsRef<[u8]>, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut out = String::new(); { let options = DemangleOptions::default(); let mut ctx = ast::DemangleContext::new( &self.substitutions, self.raw.as_ref(), options, &mut out, ); self.parsed.demangle(&mut ctx, None).map_err(|err| { log!("Demangling error: {:#?}", err); fmt::Error })?; } write!(f, "{}", &out) } }
#![deny(missing_docs)] #![deny(missing_debug_implementations)] #![deny(unsafe_code)] #![allow(unknown_lints)] #![allow(clippy::inline_always)] #![allow(clippy::redundant_field_names)] #![cfg_attr(all(not(feature = "std"), feature = "alloc"), no_std)] #![cfg_attr(all(not(feature = "std"), feature = "alloc"), feature(alloc))] #[macro_use] extern crate cfg_if; cfg_if! { if #[cfg(all(not(feature = "std"), feature = "alloc"))] { extern crate core as std; #[macro_use] extern crate alloc; mod imports { pub use alloc::boxed; pub use alloc::vec; pub use alloc::string; pub use alloc::borrow; pub use alloc::collections::btree_map; } } else { mod imports { pub use std::boxed; pub use std::vec; pub use std::string; pub use std::borrow; pub use std::collections::btree_map; } } } use imports::*; use string::String; use vec::Vec; #[macro_use] mod logging; pub mod ast; pub mod error; mod index_str; mod subs; use ast::{Demangle, Parse, ParseContext}; use error::{Error, Result}; use index_str::IndexStr; use std::fmt; use std::num::NonZeroU32; #[derive(Clone, Copy, Debug, Default)] #[repr(C)] pub struct ParseOptions { recursion_limit: Option<NonZeroU32>, } impl ParseOptions { pub fn recursion_limit(mut self, limit: u32) -> Self { self.recursion_limit = Some(NonZeroU32::new(limit).expect("Recursion limit must be > 0")); self } } #[derive(Clone, Copy, Debug, Default)] #[repr(C)] pub struct DemangleOptions { no_params: bool, no_return_type: bool, recursion_limit: Option<NonZeroU32>, } impl DemangleOptions { pub fn new() -> Self { Default::default() } pub fn no_params(mut self) -> Self { self.no_params = true; self } pub fn no_return_type(mut self) -> Self { self.no_return_type = true; self } pub fn recursion_limit(mut self, limit: u32) -> Self { self.recursion_limit = Some(NonZeroU32::new(limit).expect("Recursion limit must be > 0")); self } } pub type OwnedSymbol = Symbol<Vec<u8>>; pub type BorrowedSymbol<'a> = Symbol<&'a [u8]>; #[derive(Clone, Debug, PartialEq)] pub struct Symbol<T> { raw: T, substitutions: subs::SubstitutionTable, parsed: ast::MangledName, } impl<T> Symbol<T> where T: AsRef<[u8]>, { #[inline] pub fn new(raw: T) -> Result<Symbol<T>> { Self::new_with_options(raw, &Default::default()) } pub fn new_with_options(raw: T, options: &ParseOptions) -> Result<Symbol<T>> { let mut substitutions = subs::SubstitutionTable::new(); let parsed = { let ctx = ParseContext::new(*options); let input = IndexStr::new(raw.as_ref()); let (parsed, tail) = ast::MangledName::parse(&ctx, &mut substitutions, input)?; debug_assert!(ctx.recursion_level() == 0);
}; let symbol = Symbol { raw: raw, substitutions: substitutions, parsed: parsed, }; log!( "Successfully parsed '{}' as AST = {:#?} substitutions = {:#?}", String::from_utf8_lossy(symbol.raw.as_ref()), symbol.parsed, symbol.substitutions ); Ok(symbol) } #[allow(clippy::trivially_copy_pass_by_ref)] pub fn demangle(&self, options: &DemangleOptions) -> ::std::result::Result<String, fmt::Error> { let mut out = String::new(); { let mut ctx = ast::DemangleContext::new( &self.substitutions, self.raw.as_ref(), *options, &mut out, ); self.parsed.demangle(&mut ctx, None)?; } Ok(out) } #[allow(clippy::trivially_copy_pass_by_ref)] pub fn structured_demangle<W: DemangleWrite>( &self, out: &mut W, options: &DemangleOptions, ) -> fmt::Result { let mut ctx = ast::DemangleContext::new(&self.substitutions, self.raw.as_ref(), *options, out); self.parsed.demangle(&mut ctx, None) } } #[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)] pub enum DemangleNodeType { Prefix, TemplatePrefix, TemplateArgs, UnqualifiedName, TemplateParam, Decltype, DataMemberPrefix, NestedName, VirtualTable, __NonExhaustive, } pub trait DemangleWrite { fn push_demangle_node(&mut self, _: DemangleNodeType) {} fn write_string(&mut self, s: &str) -> fmt::Result; fn pop_demangle_node(&mut self) {} } impl<W: fmt::Write> DemangleWrite for W { fn write_string(&mut self, s: &str) -> fmt::Result { fmt::Write::write_str(self, s) } } impl<'a, T> Symbol<&'a T> where T: AsRef<[u8]> + ?Sized, { #[inline] pub fn with_tail(input: &'a T) -> Result<(BorrowedSymbol<'a>, &'a [u8])> { Self::with_tail_and_options(input, &Default::default()) } pub fn with_tail_and_options( input: &'a T, options: &ParseOptions, ) -> Result<(BorrowedSymbol<'a>, &'a [u8])> { let mut substitutions = subs::SubstitutionTable::new(); let ctx = ParseContext::new(*options); let idx_str = IndexStr::new(input.as_ref()); let (parsed, tail) = ast::MangledName::parse(&ctx, &mut substitutions, idx_str)?; debug_assert!(ctx.recursion_level() == 0); let symbol = Symbol { raw: input.as_ref(), substitutions: substitutions, parsed: parsed, }; log!( "Successfully parsed '{}' as AST = {:#?} substitutions = {:#?}", String::from_utf8_lossy(symbol.raw), symbol.parsed, symbol.substitutions ); Ok((symbol, tail.into())) } } impl<T> fmt::Display for Symbol<T> where T: AsRef<[u8]>, { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mut out = String::new(); { let options = DemangleOptions::default(); let mut ctx = ast::DemangleContext::new( &self.substitutions, self.raw.as_ref(), options, &mut out, ); self.parsed.demangle(&mut ctx, None).map_err(|err| { log!("Demangling error: {:#?}", err); fmt::Error })?; } write!(f, "{}", &out) } }
if tail.is_empty() { parsed } else { return Err(Error::UnexpectedText); }
if_condition
[ { "content": "#[allow(unsafe_code)]\n\nfn parse_number(base: u32, allow_signed: bool, mut input: IndexStr) -> Result<(isize, IndexStr)> {\n\n if input.is_empty() {\n\n return Err(error::Error::UnexpectedEnd);\n\n }\n\n\n\n let num_is_negative = if allow_signed && input.as_ref()[0] == b'n' {\n\n input = input.range_from(1..);\n\n\n\n if input.is_empty() {\n\n return Err(error::Error::UnexpectedEnd);\n\n }\n\n\n\n true\n\n } else {\n\n false\n\n };\n\n\n\n let num_numeric = input\n\n .as_ref()\n\n .iter()\n", "file_path": "src/ast.rs", "rank": 0, "score": 106610.51204140321 }, { "content": "/// Print all the lines from the given `input` to `out`, with all mangled C++\n\n/// symbols replaced with their demangled form.\n\nfn demangle_all<R, W>(input: &mut R, out: &mut W, options: DemangleOptions) -> io::Result<()>\n\nwhere\n\n R: BufRead,\n\n W: Write,\n\n{\n\n let mut buf = vec![];\n\n\n\n while input.read_until(b'\\n', &mut buf)? > 0 {\n\n let nl = buf.ends_with(&[b'\\n']);\n\n if nl {\n\n buf.pop();\n\n }\n\n demangle_line(out, &buf[..], options)?;\n\n if nl {\n\n write!(out, \"\\n\")?;\n\n }\n\n buf.clear();\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/cppfilt.rs", "rank": 1, "score": 103260.6048222924 }, { "content": "/// Print the given `line` to `out`, with all mangled C++ symbols replaced with\n\n/// their demangled form.\n\nfn demangle_line<W>(out: &mut W, line: &[u8], options: DemangleOptions) -> io::Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n let mut line = line;\n\n\n\n while let Some(idx) = find_mangled(line) {\n\n write!(out, \"{}\", String::from_utf8_lossy(&line[..idx]))?;\n\n\n\n if let Ok((sym, tail)) = BorrowedSymbol::with_tail(&line[idx..]) {\n\n let demangled = sym\n\n .demangle(&options)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?;\n\n write!(out, \"{}\", demangled)?;\n\n line = tail;\n\n } else {\n\n write!(out, \"_Z\")?;\n\n line = &line[2..];\n\n }\n\n }\n\n\n\n write!(out, \"{}\", String::from_utf8_lossy(line))\n\n}\n\n\n", "file_path": "examples/cppfilt.rs", "rank": 2, "score": 96711.84139914955 }, { "content": "#[inline]\n\nfn consume<'a>(expected: &[u8], input: IndexStr<'a>) -> Result<IndexStr<'a>> {\n\n match input.try_split_at(expected.len()) {\n\n Some((head, tail)) if head == expected => Ok(tail),\n\n Some(_) => Err(error::Error::UnexpectedText),\n\n None => Err(error::Error::UnexpectedEnd),\n\n }\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 3, "score": 94987.05861130016 }, { "content": "#[doc(hidden)]\n\npub trait Demangle<'subs, W>: fmt::Debug\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n /// Write the demangled form of this AST node to the given context.\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result;\n\n}\n\n\n\n/// Any AST node that can be printed as an inner type.\n\n///\n\n/// See the comments surrounding `DemangleContext::inner` for details.\n", "file_path": "src/ast.rs", "rank": 4, "score": 86732.5689477882 }, { "content": "struct AutoLogParse;\n\n\n\n#[cfg(feature = \"logging\")]\n\nthread_local! {\n\n static LOG_DEPTH: RefCell<usize> = RefCell::new(0);\n\n}\n\n\n\nimpl AutoLogParse {\n\n #[cfg(feature = \"logging\")]\n\n fn new(production: &'static str, input: IndexStr<'_>) -> AutoLogParse {\n\n LOG_DEPTH.with(|depth| {\n\n if *depth.borrow() == 0 {\n\n println!();\n\n }\n\n\n\n let indent: String = (0..*depth.borrow() * 4).map(|_| ' ').collect();\n\n log!(\n\n \"{}({} \\\"{}\\\" {}\",\n\n indent,\n\n production,\n", "file_path": "src/ast.rs", "rank": 5, "score": 80911.73240346611 }, { "content": "struct AutoLogDemangle;\n\n\n\nimpl AutoLogDemangle {\n\n #[cfg(feature = \"logging\")]\n\n fn new<P, W>(\n\n production: &P,\n\n ctx: &DemangleContext<W>,\n\n scope: Option<ArgScopeStack>,\n\n is_inner: bool,\n\n ) -> AutoLogDemangle\n\n where\n\n P: ?Sized + fmt::Debug,\n\n W: DemangleWrite,\n\n {\n\n LOG_DEPTH.with(|depth| {\n\n if *depth.borrow() == 0 {\n\n println!();\n\n }\n\n\n\n let indent: String = (0..*depth.borrow() * 4).map(|_| ' ').collect();\n", "file_path": "src/ast.rs", "rank": 6, "score": 80911.73240346611 }, { "content": "#[test]\n\nfn size_of_error() {\n\n use std::mem;\n\n assert_eq!(\n\n mem::size_of::<Error>(),\n\n 1,\n\n \"We should keep the size of our Error type in check\"\n\n );\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n Error::UnexpectedEnd => write!(f, \"mangled symbol ends abruptly\"),\n\n Error::UnexpectedText => write!(f, \"mangled symbol is not well-formed\"),\n\n Error::BadBackReference => write!(\n\n f,\n\n \"back reference that is out-of-bounds of the substitution table\"\n\n ),\n\n Error::BadTemplateArgReference => write!(\n\n f,\n", "file_path": "src/error.rs", "rank": 7, "score": 76237.16769493591 }, { "content": "/// Find the index of the first (potential) occurrence of a mangled C++ symbol\n\n/// in the given `haystack`.\n\nfn find_mangled(haystack: &[u8]) -> Option<usize> {\n\n if haystack.is_empty() {\n\n return None;\n\n }\n\n\n\n for i in 0..haystack.len() - 1 {\n\n if haystack[i] == b'_' {\n\n match (\n\n haystack[i + 1],\n\n haystack.get(i + 2),\n\n haystack.get(i + 3),\n\n haystack.get(i + 4),\n\n ) {\n\n (b'Z', _, _, _) | (b'_', Some(b'Z'), _, _) | (b'_', Some(b'_'), Some(b'Z'), _) => {\n\n return Some(i)\n\n }\n\n (b'_', Some(b'_'), Some(b'_'), Some(b'Z')) => return Some(i),\n\n _ => (),\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "examples/cppfilt.rs", "rank": 8, "score": 75095.9965151663 }, { "content": "/// When formatting a mangled symbol's parsed AST as a demangled symbol, we need\n\n/// to resolve indirect references to template and function arguments with\n\n/// direct `TemplateArg` and `Type` references respectively.\n\n///\n\n/// Note that which set of arguments are implicitly referenced change as we\n\n/// enter and leave different functions' scope. One might usually use de Brujin\n\n/// indices to keep arguments within scopes separated from each other, but the\n\n/// Itanium C++ ABI does not allow us the luxury. AFAIK, when the ABI was first\n\n/// drafted, C++ did not have lambdas, and the issue did not come up at all\n\n/// since a function simply couldn't refer to the types of closed over\n\n/// variables.\n\n///\n\n/// This trait is implemented by anything that can potentially resolve arguments\n\n/// for us.\n\ntrait ArgScope<'me, 'ctx>: fmt::Debug {\n\n /// Get the current scope's leaf name.\n\n fn leaf_name(&'me self) -> Result<LeafName<'ctx>>;\n\n\n\n /// Get the current scope's `index`th template argument.\n\n fn get_template_arg(&'me self, index: usize)\n\n -> Result<(&'ctx TemplateArg, &'ctx TemplateArgs)>;\n\n\n\n /// Get the current scope's `index`th function argument's type.\n\n fn get_function_arg(&'me self, index: usize) -> Result<&'ctx Type>;\n\n}\n\n\n\n/// An `ArgScopeStack` represents the current function and template demangling\n\n/// scope we are within. As we enter new demangling scopes, we construct new\n\n/// `ArgScopeStack`s whose `prev` references point back to the old ones. These\n\n/// `ArgScopeStack`s are kept on the native stack, and as functions return, they\n\n/// go out of scope and we use the previous `ArgScopeStack`s again.\n\n#[derive(Copy, Clone, Debug)]\n\npub struct ArgScopeStack<'prev, 'subs>\n\nwhere\n\n 'subs: 'prev,\n\n{\n\n item: &'subs dyn ArgScope<'subs, 'subs>,\n\n in_arg: Option<(usize, &'subs TemplateArgs)>,\n\n prev: Option<&'prev ArgScopeStack<'prev, 'subs>>,\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 9, "score": 69999.09025573592 }, { "content": "fn get_crate_dir() -> io::Result<path::PathBuf> {\n\n Ok(path::PathBuf::from(\n\n env::var(\"CARGO_MANIFEST_DIR\")\n\n .map_err(|_| io::Error::new(io::ErrorKind::Other, \"no CARGO_MANIFEST_DIR\"))?,\n\n ))\n\n}\n\n\n", "file_path": "build.rs", "rank": 10, "score": 67900.22418698331 }, { "content": "#[doc(hidden)]\n\npub trait DemangleAsInner<'subs, W>: Demangle<'subs, W>\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n /// Write the inner demangling form of this AST node to the given context.\n\n fn demangle_as_inner<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n self.demangle(ctx, scope)\n\n }\n\n\n\n /// Cast this `DemangleAsInner` to a `Type`.\n\n fn downcast_to_type(&self) -> Option<&Type> {\n\n None\n\n }\n\n\n\n /// Cast this `DemangleAsInner` to a `FunctionType`.\n\n fn downcast_to_function_type(&self) -> Option<&FunctionType> {\n", "file_path": "src/ast.rs", "rank": 11, "score": 66157.2358381855 }, { "content": "#[derive(Debug, Copy, Clone)]\n\nstruct DemangleState {\n\n /// How deep in the demangling are we?\n\n pub recursion_level: u32,\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 12, "score": 58461.873349741974 }, { "content": "fn assert_demangles_as(mangled: &str, expected: &str, options: Option<DemangleOptions>) {\n\n let sym = cpp_demangle::BorrowedSymbol::new(mangled.as_bytes())\n\n .expect(\"should parse mangled symbol ok\");\n\n\n\n let actual = if let Some(o) = options {\n\n sym.demangle(&o).expect(\"should demangle ok\")\n\n } else {\n\n let mut actual = vec![];\n\n write!(&mut actual, \"{}\", sym).expect(\"should demangle symbol ok\");\n\n String::from_utf8(actual).expect(\"should demangle to valid utf-8\")\n\n };\n\n\n\n if expected != actual {\n\n println!();\n\n println!(\"Diff:\");\n\n println!(\"--- expected\");\n\n print!(\"+++ actual\");\n\n\n\n let mut last = None;\n\n for cmp in diff::chars(expected, &actual) {\n", "file_path": "tests/tests.rs", "rank": 13, "score": 56396.482091487655 }, { "content": "/// Read `tests/libiberty-demangle-expected`, parse its input mangled symbols,\n\n/// and expected output demangled symbols, and generate test cases for them.\n\n///\n\n/// We do not support all of the options that the libiberty demangler does,\n\n/// therefore we skip tests that use options we do not intend to\n\n/// support. Basically, we only support `--format=gnu-v3` (which is the System V\n\n/// C++ ABI), and none of the legacy C/C++ compiler formats, nor Java/D/etc\n\n/// language symbol mangling.\n\nfn generate_compatibility_tests_from_libiberty() -> io::Result<()> {\n\n let mut tests_dir = get_crate_dir()?;\n\n tests_dir.push(\"tests\");\n\n if !tests_dir.is_dir() {\n\n // We are in `cargo publish` and the `tests/` directory isn't included\n\n // in the distributed package.\n\n return Ok(());\n\n }\n\n\n\n println!(\"cargo:rerun-if-changed=tests/libiberty-demangle-expected\");\n\n\n\n let test_path = get_test_path(\"libiberty.rs\")?;\n\n let _ = fs::remove_file(&test_path);\n\n let mut test_file = fs::File::create(test_path)?;\n\n\n\n writeln!(\n\n &mut test_file,\n\n \"\n\nextern crate cpp_demangle;\n\nextern crate diff;\n", "file_path": "build.rs", "rank": 14, "score": 55879.74716560523 }, { "content": "/// When we first begin demangling, we haven't entered any function or template\n\n/// demangling scope and we don't have any useful `ArgScopeStack`. Therefore, we\n\n/// are never actually dealing with `ArgScopeStack` directly in practice, but\n\n/// always an `Option<ArgScopeStack>` instead. Nevertheless, we want to define\n\n/// useful methods on `Option<ArgScopeStack>`.\n\n///\n\n/// A custom \"extension\" trait with exactly one implementor: Rust's principled\n\n/// monkey patching!\n\ntrait ArgScopeStackExt<'prev, 'subs>: Copy {\n\n /// Push a new `ArgScope` onto this `ArgScopeStack` and return the new\n\n /// `ArgScopeStack` with the pushed resolver on top.\n\n fn push(\n\n &'prev self,\n\n item: &'subs dyn ArgScope<'subs, 'subs>,\n\n ) -> Option<ArgScopeStack<'prev, 'subs>>;\n\n}\n\n\n\nimpl<'prev, 'subs> ArgScopeStackExt<'prev, 'subs> for Option<ArgScopeStack<'prev, 'subs>> {\n\n fn push(\n\n &'prev self,\n\n item: &'subs dyn ArgScope<'subs, 'subs>,\n\n ) -> Option<ArgScopeStack<'prev, 'subs>> {\n\n log!(\"ArgScopeStack::push: {:?}\", item);\n\n Some(ArgScopeStack {\n\n prev: self.as_ref(),\n\n in_arg: None,\n\n item: item,\n\n })\n", "file_path": "src/ast.rs", "rank": 15, "score": 55767.239439887766 }, { "content": "/// The `<number>` production.\n\n///\n\n/// ```text\n\n/// <number> ::= [n] <non-negative decimal integer>\n\n/// ```\n\ntype Number = isize;\n\n\n\nimpl Parse for Number {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n _subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(isize, IndexStr<'b>)> {\n\n try_begin_parse!(\"Number\", ctx, input);\n\n parse_number(10, true, input)\n\n }\n\n}\n\n\n\n/// A <seq-id> production encoding a base-36 positive number.\n\n///\n\n/// ```text\n\n/// <seq-id> ::= <0-9A-Z>+\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct SeqId(usize);\n", "file_path": "src/ast.rs", "rank": 16, "score": 55728.14544576074 }, { "content": "fn get_crate_test_path(file_name: &str) -> io::Result<path::PathBuf> {\n\n let mut test_path = get_crate_dir()?;\n\n test_path.push(\"tests\");\n\n assert!(test_path.is_dir());\n\n test_path.push(file_name);\n\n Ok(test_path)\n\n}\n\n\n", "file_path": "build.rs", "rank": 17, "score": 55339.85983939213 }, { "content": "#[derive(Debug, Default, Clone, Copy)]\n\nstruct ParseContextState {\n\n // The current recursion level. Should always be less than or equal to the\n\n // maximum.\n\n recursion_level: u32,\n\n // Whether or not we are currently parsing a conversion operator.\n\n in_conversion: bool,\n\n}\n\n\n\n/// Common context needed when parsing.\n\n#[derive(Debug, Clone)]\n\npub struct ParseContext {\n\n // Maximum amount of recursive parsing calls we will allow. If this is too\n\n // large, we can blow the stack.\n\n max_recursion: u32,\n\n // Mutable state within the `ParseContext`.\n\n state: Cell<ParseContextState>,\n\n}\n\n\n\nimpl ParseContext {\n\n /// Construct a new `ParseContext`.\n", "file_path": "src/ast.rs", "rank": 18, "score": 55066.78033581306 }, { "content": "/// Generate tests that ensure that we don't panic when parsing and demangling\n\n/// the seed test cases that we pass to AFL.rs assert (including the failing\n\n/// test cases historically found by AFL.rs).\n\nfn generate_sanity_tests_from_afl_seeds() -> io::Result<()> {\n\n let mut in_dir = get_crate_dir()?;\n\n in_dir.push(\"in\");\n\n if !in_dir.is_dir() {\n\n // We are in `cargo publish` and the `in/` directory isn't included in\n\n // the distributed package.\n\n return Ok(());\n\n }\n\n\n\n let test_path = get_test_path(\"afl_seeds.rs\")?;\n\n let mut test_file = fs::File::create(test_path)?;\n\n\n\n writeln!(\n\n &mut test_file,\n\n \"\n\nextern crate cpp_demangle;\n\nuse std::fs;\n\nuse std::io::Read;\n\n\"\n\n )?;\n", "file_path": "build.rs", "rank": 19, "score": 53263.9748797856 }, { "content": "fn zero_or_more<'a, 'b, P>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n) -> Result<(Vec<P>, IndexStr<'b>)>\n\nwhere\n\n P: Parse,\n\n{\n\n let mut tail = input;\n\n let mut results = vec![];\n\n loop {\n\n if let Ok((parsed, tail_tail)) = P::parse(ctx, subs, tail) {\n\n results.push(parsed);\n\n tail = tail_tail;\n\n } else {\n\n return Ok((results, tail));\n\n }\n\n }\n\n}\n\n\n\n/// Parse a number with the given `base`. Do not allow negative numbers\n\n/// (prefixed with an 'n' instead of a '-') if `allow_signed` is false.\n", "file_path": "src/ast.rs", "rank": 20, "score": 53064.92418791747 }, { "content": "fn one_or_more<'a, 'b, P>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n) -> Result<(Vec<P>, IndexStr<'b>)>\n\nwhere\n\n P: Parse,\n\n{\n\n let (first, mut tail) = P::parse(ctx, subs, input)?;\n\n let mut results = vec![first];\n\n loop {\n\n if let Ok((parsed, tail_tail)) = P::parse(ctx, subs, tail) {\n\n results.push(parsed);\n\n tail = tail_tail;\n\n } else {\n\n return Ok((results, tail));\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 21, "score": 53064.92418791747 }, { "content": "/// An RAII type to automatically check the recursion level against the\n\n/// maximum. If the maximum has been crossed, return an error. Otherwise,\n\n/// increment the level upon construction, and decrement it upon destruction.\n\nstruct AutoParseDemangle<'a, 'b, W: 'a + DemangleWrite>(&'b mut DemangleContext<'a, W>);\n\n\n\nimpl<'a, 'b, W: 'a + DemangleWrite> AutoParseDemangle<'a, 'b, W> {\n\n #[inline]\n\n fn new(ctx: &'b mut DemangleContext<'a, W>) -> std::result::Result<Self, fmt::Error> {\n\n ctx.enter_recursion()?;\n\n Ok(AutoParseDemangle(ctx))\n\n }\n\n}\n\n\n\nimpl<'a, 'b, W: 'a + DemangleWrite> std::ops::Deref for AutoParseDemangle<'a, 'b, W> {\n\n type Target = DemangleContext<'a, W>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<'a, 'b, W: 'a + DemangleWrite> std::ops::DerefMut for AutoParseDemangle<'a, 'b, W> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n", "file_path": "src/ast.rs", "rank": 22, "score": 51771.99747803782 }, { "content": "#[doc(hidden)]\n\npub trait Parse: Sized {\n\n /// Parse the `Self` value from `input` and return it, updating the\n\n /// substitution table as needed.\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(Self, IndexStr<'b>)>;\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 23, "score": 50780.943196454566 }, { "content": "fn get_out_dir() -> io::Result<path::PathBuf> {\n\n Ok(path::PathBuf::from(env::var(\"OUT_DIR\").map_err(|_| {\n\n io::Error::new(io::ErrorKind::Other, \"no OUT_DIR\")\n\n })?))\n\n}\n\n\n", "file_path": "build.rs", "rank": 24, "score": 49990.148681640654 }, { "content": "/// An RAII type to automatically check the recursion level against the\n\n/// maximum. If the maximum has been crossed, return an error. Otherwise,\n\n/// increment the level upon construction, and decrement it upon destruction.\n\nstruct AutoParseRecursion<'a>(&'a ParseContext);\n\n\n\nimpl<'a> AutoParseRecursion<'a> {\n\n #[inline]\n\n fn new(ctx: &'a ParseContext) -> error::Result<AutoParseRecursion<'a>> {\n\n ctx.enter_recursion()?;\n\n Ok(AutoParseRecursion(ctx))\n\n }\n\n}\n\n\n\nimpl<'a> Drop for AutoParseRecursion<'a> {\n\n #[inline]\n\n fn drop(&mut self) {\n\n self.0.exit_recursion();\n\n }\n\n}\n\n\n\n/// A trait for anything that can be parsed from an `IndexStr` and return a\n\n/// `Result` of the parsed `Self` value and the rest of the `IndexStr` input\n\n/// that has not been consumed in parsing the `Self` value.\n\n///\n\n/// For AST types representing productions which have `<substitution>` as a\n\n/// possible right hand side, do not implement this trait directly. Instead,\n\n/// make a newtype over `usize`, parse either the `<substitution>` back\n\n/// reference or \"real\" value, insert the \"real\" value into the substitution\n\n/// table if needed, and *always* return the newtype index into the substitution\n\n/// table.\n", "file_path": "src/ast.rs", "rank": 25, "score": 48607.23393673239 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n generate_sanity_tests_from_afl_seeds()\n\n .expect(\"should generate sanity tests from AFL.rs seed test cases\");\n\n\n\n generate_compatibility_tests_from_libiberty()\n\n .expect(\"should generate compatibility tests from tests/libiberty-demangle-expected\");\n\n}\n", "file_path": "build.rs", "rank": 26, "score": 42093.75971219857 }, { "content": "fn get_test_path(file_name: &str) -> io::Result<path::PathBuf> {\n\n let mut test_path = get_out_dir()?;\n\n assert!(test_path.is_dir());\n\n test_path.push(file_name);\n\n Ok(test_path)\n\n}\n\n\n", "file_path": "build.rs", "rank": 27, "score": 40275.672495102 }, { "content": "fn main() {\n\n let matches = App::new(\"cppfilt\")\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(\"A c++filt clone as an example of how to use the cpp_demangle crate!\")\n\n .arg(\n\n Arg::with_name(\"noparams\")\n\n .short(\"p\")\n\n .long(\"no-params\")\n\n .help(\"Do not display function arguments\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"mangled_names\")\n\n .multiple(true)\n\n .value_delimiter(\" \"),\n\n )\n\n .get_matches();\n\n\n\n let stdin = io::stdin();\n\n let mut stdin = stdin.lock();\n", "file_path": "examples/cppfilt.rs", "rank": 28, "score": 39463.4211910264 }, { "content": "#[test]\n\nfn test_afl_seed_{}() {{\n\n let mut file = fs::File::open(\"{}\").unwrap();\n\n let mut contents = Vec::new();\n\n file.read_to_end(&mut contents).unwrap();\n\n let _ = cpp_demangle::Symbol::new(contents);\n\n assert!(true, \"did not panic when parsing\");\n\n}}\n\n\"#,\n\n file_name.to_string_lossy(),\n\n path.to_string_lossy()\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 29, "score": 37226.61937000838 }, { "content": " uint32_t recursion_limit;\n", "file_path": "c_api/include/cpp_demangle.h", "rank": 30, "score": 35303.4581306785 }, { "content": "#[test]\n\nfn libxul_symbols_demangle() {\n\n let mut total = 0;\n\n let mut num_parsed = 0;\n\n let mut num_demangled = 0;\n\n let mut num_match_libiberty = 0;\n\n\n\n let libxul_txt_file = fs::File::open(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/tests/libxul.txt\"))\n\n .expect(\"should open libxul.txt\");\n\n let mut libxul_txt_file = BufReader::new(libxul_txt_file);\n\n\n\n let log_file = fs::File::create(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/tests/libxul.log\"))\n\n .expect(\"should create log file\");\n\n let mut log_file = BufWriter::new(log_file);\n\n\n\n let mut line = Vec::new();\n\n let mut demangled = Vec::new();\n\n let mut libiberty_sym = Vec::new();\n\n\n\n let which_cppfilt = get_cppfilt();\n\n let mut cppfilt = process::Command::new(which_cppfilt)\n", "file_path": "tests/libxul.rs", "rank": 31, "score": 35301.18307128251 }, { "content": "#[test]\n\nfn test_libiberty_demangle_{}_() {{\n\n let mangled = br#\"{}\"#;\n\n let mangled_str = String::from_utf8_lossy(mangled).into_owned();\n\n println!(\"Parsing mangled symbol: {{}}\", mangled_str);\n\n\n\n let expected = r#\"{}\"#;\n\n\n\n let sym = match cpp_demangle::Symbol::new(&mangled[..]) {{\n\n Ok(sym) => sym,\n\n Err(_) if mangled_str == expected => return,\n\n Err(e) => panic!(\"Should parse mangled symbol {{}}\", e),\n\n }};\n\n\n\n let mut actual = String::new();\n\n if let Err(e) = write!(&mut actual, \"{{}}\", sym) {{\n\n panic!(\"Error while demangling '{{}}': {{}}\",\n\n mangled_str,\n\n e);\n\n }}\n\n\n", "file_path": "build.rs", "rank": 32, "score": 34481.157585963636 }, { "content": "#[test]\n\nfn test_stackoverflow_does_not_occur_issue_186() {\n\n assert_does_not_demangle(\"__ZNSt3__18__bind_rINS_4pairINS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE8cc_errorEEZN5stlab2v15asyncIZNSB_14serial_queue_tclIZN12_GLOBAL__N_114future_adaptorIN10redacteLib12ValueOrErrorIS7_EEZNK10cc_element17rendition_requestEmbE4$_14EEDaNS_6futureIT_EEOT0_EUlSO_E_JNSN_ISJ_EEEEESM_OSO_DpOT0_EUlSU_E_SS_JST_EEENSB_6futureINS_9result_ofIFNS_5decayISQ_E4typeEDpNS11_IT1_E4typeEEE4typeEvEESO_SR_DpOS14_EUlRST_E_JST_EEC1IS1F_JST_EvEESU_SX_\");\n\n}\n\n\n\ndemangles!(\n\n _ZN7mozilla6detail12ListenerImplINS_14AbstractThreadEZNS_20MediaEventSourceImplILNS_14ListenerPolicyE0EJNS_13TimedMetadataEEE15ConnectInternalIS2_NS_12MediaDecoderEMS8_FvOS5_EEENS_8EnableIfIXsr8TakeArgsIT1_EE5valueENS_18MediaEventListenerEE4TypeEPT_PT0_SD_EUlS9_E_JS5_EE17ApplyWithArgsImplISL_EENSC_IXsr8TakeArgsISH_EE5valueEvE4TypeERKSH_S9_,\n\n // This does not match llvm-cxxfilt\n\n \"mozilla::EnableIf<TakeArgs<mozilla::EnableIf<TakeArgs<void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>::value, mozilla::MediaEventListener>::Type mozilla::MediaEventSourceImpl<(mozilla::ListenerPolicy)0, mozilla::TimedMetadata>::ConnectInternal<mozilla::AbstractThread, mozilla::MediaDecoder, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>(mozilla::AbstractThread*, mozilla::MediaDecoder*, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&))::{lambda(mozilla::TimedMetadata&&)#1}>::value, void>::Type mozilla::detail::ListenerImpl<mozilla::AbstractThread, mozilla::EnableIf<TakeArgs<void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>::value, mozilla::MediaEventListener>::Type mozilla::MediaEventSourceImpl<(mozilla::ListenerPolicy)0, mozilla::TimedMetadata>::ConnectInternal<mozilla::AbstractThread, mozilla::MediaDecoder, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>(mozilla::AbstractThread*, mozilla::MediaDecoder*, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&))::{lambda(mozilla::TimedMetadata&&)#1}, mozilla::TimedMetadata>::ApplyWithArgsImpl<mozilla::EnableIf<TakeArgs<void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>::value, mozilla::MediaEventListener>::Type mozilla::MediaEventSourceImpl<(mozilla::ListenerPolicy)0, mozilla::TimedMetadata>::ConnectInternal<mozilla::AbstractThread, mozilla::MediaDecoder, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>(mozilla::AbstractThread*, mozilla::MediaDecoder*, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&))::{lambda(mozilla::TimedMetadata&&)#1}>(mozilla::EnableIf<TakeArgs<void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>::value, mozilla::MediaEventListener>::Type mozilla::MediaEventSourceImpl<(mozilla::ListenerPolicy)0, mozilla::TimedMetadata>::ConnectInternal<mozilla::AbstractThread, mozilla::MediaDecoder, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&)>(mozilla::AbstractThread*, mozilla::MediaDecoder*, void (mozilla::MediaDecoder::*)(mozilla::TimedMetadata&&))::{lambda(mozilla::TimedMetadata&&)#1} const&, mozilla::TimedMetadata&&)\"\n\n);\n\n\n\ndemangles!(\n\n _Z20instantiate_with_intI3FooET_IiEv,\n\n \"Foo<int> instantiate_with_int<Foo>()\"\n\n);\n\ndemangles!(_Z3fooISt6vectorIiEEvv, \"void foo<std::vector<int> >()\");\n\ndemangles!(__ZN3foo3barE3quxS0_, \"foo::bar(qux, qux)\");\n\ndemangles!(__ZN3foo3barE3quxS_, \"foo::bar(qux, foo)\");\n\n\n\ndemangles!(\n\n _ZN4funcI2TyEEN6ResultIT_EES3_,\n", "file_path": "tests/tests.rs", "rank": 33, "score": 33626.32915175456 }, { "content": "//! Custom `Error` and `Result` types for the `cpp_demangle` crate.\n\n\n\n#[cfg(feature = \"std\")]\n\nuse std::error;\n\nuse std::fmt;\n\n\n\n/// Errors that can occur while demangling a symbol.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum Error {\n\n /// The mangled symbol ends abruptly.\n\n UnexpectedEnd,\n\n\n\n /// The mangled symbol is not well-formed.\n\n UnexpectedText,\n\n\n\n /// Found a back reference that is out-of-bounds of the substitution\n\n /// table.\n\n BadBackReference,\n\n\n\n /// Found a reference to a template arg that is either out-of-bounds, or in\n", "file_path": "src/error.rs", "rank": 35, "score": 32694.183236243978 }, { "content": "#[cfg(feature = \"logging\")]\n\nmacro_rules! log {\n\n ( $fmt:expr ) => {\n\n println!($fmt);\n\n };\n\n ( $fmt:expr, $($x:tt)* ) => {\n\n println!($fmt, $($x)*);\n\n }\n\n}\n\n\n\n#[cfg(not(feature = \"logging\"))]\n\nmacro_rules! log {\n\n ( $fmt:expr ) => {};\n\n ( $fmt:expr, $($x:tt)* ) => {\n\n if false { let _ = format!($fmt, $($x)*); }\n\n };\n\n}\n", "file_path": "src/logging.rs", "rank": 36, "score": 32693.30010439536 }, { "content": " ),\n\n Error::TooMuchRecursion => {\n\n write!(f, \"encountered too much recursion when demangling symbol\")\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl error::Error for Error {\n\n fn description(&self) -> &str {\n\n match *self {\n\n Error::UnexpectedEnd => \"mangled symbol ends abruptly\",\n\n Error::UnexpectedText => \"mangled symbol is not well-formed\",\n\n Error::BadBackReference => {\n\n \"back reference that is out-of-bounds of the substitution table\"\n\n }\n\n Error::BadTemplateArgReference => {\n\n \"reference to a template arg that is either out-of-bounds, or in a context \\\n\n without template args\"\n", "file_path": "src/error.rs", "rank": 37, "score": 32678.078797358077 }, { "content": " }\n\n Error::ForwardTemplateArgReference => {\n\n \"reference to a template arg from itself or a later template arg\"\n\n }\n\n Error::BadFunctionArgReference => {\n\n \"reference to a function arg that is either out-of-bounds, or in a context \\\n\n without function args\"\n\n }\n\n Error::BadLeafNameReference => {\n\n \"reference to a leaf name in a context where there is no current leaf name\"\n\n }\n\n Error::Overflow => {\n\n \"an overflow or underflow would occur when parsing an integer in a mangled symbol\"\n\n }\n\n Error::TooMuchRecursion => \"encountered too much recursion when demangling symbol\",\n\n }\n\n }\n\n}\n\n\n\n/// A demangling result of `T` or a `cpp_demangle::error::Error`.\n\npub type Result<T> = ::std::result::Result<T, Error>;\n", "file_path": "src/error.rs", "rank": 38, "score": 32676.396778901522 }, { "content": " \"reference to a template arg that is either out-of-bounds, or in a context \\\n\n without template args\"\n\n ),\n\n Error::ForwardTemplateArgReference => write!(\n\n f,\n\n \"reference to a template arg from itself or a later template arg\"\n\n ),\n\n Error::BadFunctionArgReference => write!(\n\n f,\n\n \"reference to a function arg that is either out-of-bounds, or in a context \\\n\n without function args\"\n\n ),\n\n Error::BadLeafNameReference => write!(\n\n f,\n\n \"reference to a leaf name in a context where there is no current leaf name\"\n\n ),\n\n Error::Overflow => write!(\n\n f,\n\n \"an overflow or underflow would occur when parsing an integer in a mangled \\\n\n symbol\"\n", "file_path": "src/error.rs", "rank": 39, "score": 32666.21376262476 }, { "content": " /// a context without template args.\n\n BadTemplateArgReference,\n\n\n\n /// Found a reference to a template arg from within the arg itself (or from\n\n /// within an earlier arg).\n\n ForwardTemplateArgReference,\n\n\n\n /// Found a reference to a function arg that is either out-of-bounds, or in\n\n /// a context without function args.\n\n BadFunctionArgReference,\n\n\n\n /// Found a reference to a leaf name in a context where there is no current\n\n /// leaf name.\n\n BadLeafNameReference,\n\n\n\n /// An overflow or underflow would occur when parsing an integer in a\n\n /// mangled symbol.\n\n Overflow,\n\n\n\n /// Encountered too much recursion when demangling symbol.\n\n TooMuchRecursion,\n\n}\n\n\n\n#[test]\n", "file_path": "src/error.rs", "rank": 40, "score": 32661.49072196717 }, { "content": "\n\n /// A `<template-template-param>` production.\n\n TemplateTemplateParam(ast::TemplateTemplateParam),\n\n\n\n /// An `<unresolved-type>` production.\n\n UnresolvedType(ast::UnresolvedType),\n\n\n\n /// A `<prefix>` production.\n\n Prefix(ast::Prefix),\n\n}\n\n\n\nimpl<'subs, W> ast::Demangle<'subs, W> for Substitutable\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut ast::DemangleContext<'subs, W>,\n\n scope: Option<ast::ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n", "file_path": "src/subs.rs", "rank": 41, "score": 32649.951081810384 }, { "content": " non_substitutions: Vec<Substitutable>,\n\n}\n\n\n\nimpl fmt::Debug for SubstitutionTable {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.pad(\"SubstitutionTable \")?;\n\n f.debug_map()\n\n .entries(self.substitutions.iter().enumerate())\n\n .finish()?;\n\n f.pad(\"non_substitutions \")?;\n\n f.debug_map()\n\n .entries(self.non_substitutions.iter().enumerate())\n\n .finish()\n\n }\n\n}\n\n\n\nimpl SubstitutionTable {\n\n /// Construct a new `SubstitutionTable`.\n\n pub fn new() -> SubstitutionTable {\n\n Default::default()\n", "file_path": "src/subs.rs", "rank": 42, "score": 32648.058003898364 }, { "content": "//! Types dealing with the substitutions table.\n\n\n\nuse super::DemangleWrite;\n\nuse ast;\n\nuse std::fmt;\n\nuse std::iter::FromIterator;\n\nuse std::ops::Deref;\n\nuse vec::Vec;\n\n\n\n/// An enumeration of all of the types that can end up in the substitution\n\n/// table.\n\n#[doc(hidden)]\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\n#[allow(clippy::large_enum_variant)]\n\npub enum Substitutable {\n\n /// An `<unscoped-template-name>` production.\n\n UnscopedTemplateName(ast::UnscopedTemplateName),\n\n\n\n /// A `<type>` production.\n\n Type(ast::Type),\n", "file_path": "src/subs.rs", "rank": 43, "score": 32646.698374675023 }, { "content": " pub fn contains(&self, idx: usize) -> bool {\n\n idx < self.substitutions.len()\n\n }\n\n\n\n /// Get the type referenced by the given handle, or None if there is no such\n\n /// entry, or there is an entry that is not a type.\n\n pub fn get_type(&self, handle: &ast::TypeHandle) -> Option<&ast::Type> {\n\n if let ast::TypeHandle::BackReference(idx) = *handle {\n\n self.substitutions.get(idx).and_then(|s| match *s {\n\n Substitutable::Type(ref ty) => Some(ty),\n\n _ => None,\n\n })\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Remove the last entry from the substitutions table and return it, or\n\n /// `None` if the table is empty.\n\n pub fn pop(&mut self) -> Option<Substitutable> {\n", "file_path": "src/subs.rs", "rank": 44, "score": 32646.39836981994 }, { "content": " match *self {\n\n Substitutable::UnscopedTemplateName(ref name) => name.demangle(ctx, scope),\n\n Substitutable::Type(ref ty) => ty.demangle(ctx, scope),\n\n Substitutable::TemplateTemplateParam(ref ttp) => ttp.demangle(ctx, scope),\n\n Substitutable::UnresolvedType(ref ty) => ty.demangle(ctx, scope),\n\n Substitutable::Prefix(ref prefix) => prefix.demangle(ctx, scope),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> ast::GetLeafName<'a> for Substitutable {\n\n fn get_leaf_name(&'a self, subs: &'a SubstitutionTable) -> Option<ast::LeafName<'a>> {\n\n match *self {\n\n Substitutable::UnscopedTemplateName(ref name) => name.get_leaf_name(subs),\n\n Substitutable::Prefix(ref prefix) => prefix.get_leaf_name(subs),\n\n Substitutable::Type(ref ty) => ty.get_leaf_name(subs),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "src/subs.rs", "rank": 45, "score": 32643.117622880716 }, { "content": "\n\nimpl ast::IsCtorDtorConversion for Substitutable {\n\n fn is_ctor_dtor_conversion(&self, subs: &SubstitutionTable) -> bool {\n\n match *self {\n\n Substitutable::Prefix(ref prefix) => prefix.is_ctor_dtor_conversion(subs),\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\n/// The table of substitutable components that we have parsed thus far, and for\n\n/// which there are potential back-references.\n\n#[doc(hidden)]\n\n#[derive(Clone, Default, PartialEq, Eq)]\n\npub struct SubstitutionTable {\n\n substitutions: Vec<Substitutable>,\n\n // There are components which are typically candidates for substitution, but\n\n // in some particular circumstances are not. Instances of such components\n\n // which are not candidates for substitution end up in this part of the\n\n // table. See `<prefix>` parsing for further details.\n", "file_path": "src/subs.rs", "rank": 46, "score": 32640.59149956724 }, { "content": " log!(\"SubstitutionTable::pop @ {}: {:?}\", self.len(), self.last());\n\n self.substitutions.pop()\n\n }\n\n\n\n /// Get the `idx`th entity that is not a candidate for substitution. Panics\n\n /// if `idx` is out of bounds.\n\n pub fn non_substitution(&self, idx: usize) -> &Substitutable {\n\n &self.non_substitutions[idx]\n\n }\n\n\n\n /// Get the `idx`th entity that is not a candidate for substitution. Returns\n\n /// `None` if `idx` is out of bounds.\n\n pub fn get_non_substitution(&self, idx: usize) -> Option<&Substitutable> {\n\n self.non_substitutions.get(idx)\n\n }\n\n}\n\n\n\nimpl FromIterator<Substitutable> for SubstitutionTable {\n\n fn from_iter<I: IntoIterator<Item = Substitutable>>(iter: I) -> Self {\n\n SubstitutionTable {\n", "file_path": "src/subs.rs", "rank": 47, "score": 32638.432851458263 }, { "content": " }\n\n\n\n /// Insert a freshly-parsed substitutable component into the table and\n\n /// return the index at which it now lives.\n\n pub fn insert(&mut self, entity: Substitutable) -> usize {\n\n let idx = self.substitutions.len();\n\n log!(\"SubstitutionTable::insert @ {}: {:?}\", idx, entity);\n\n self.substitutions.push(entity);\n\n idx\n\n }\n\n\n\n /// Insert a an entity into the table that is not a candidate for\n\n /// substitution.\n\n pub fn insert_non_substitution(&mut self, entity: Substitutable) -> usize {\n\n let idx = self.non_substitutions.len();\n\n self.non_substitutions.push(entity);\n\n idx\n\n }\n\n\n\n /// Does this substitution table contain a component at the given index?\n", "file_path": "src/subs.rs", "rank": 48, "score": 32634.336695799782 }, { "content": " substitutions: Vec::from_iter(iter),\n\n non_substitutions: vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl Deref for SubstitutionTable {\n\n type Target = [Substitutable];\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.substitutions[..]\n\n }\n\n}\n", "file_path": "src/subs.rs", "rank": 49, "score": 32631.83116185195 }, { "content": "fn assert_does_not_parse(s: &str) {\n\n if let Ok(sym) = cpp_demangle::BorrowedSymbol::new(s.as_bytes()) {\n\n panic!(\"Unexpectedly parsed '{}' as '{}'\", s, sym);\n\n }\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 50, "score": 31035.081733336232 }, { "content": "fn assert_does_not_demangle(s: &str) {\n\n match cpp_demangle::BorrowedSymbol::new(s.as_bytes()) {\n\n Ok(sym) => {\n\n if let Ok(d) = sym.demangle(&DemangleOptions::default()) {\n\n panic!(\"Unexpectedly demangled '{}' as '{}'\", s, d);\n\n }\n\n }\n\n Err(e) => {\n\n panic!(\"Failed to parse '{}': {}\", s, e);\n\n }\n\n }\n\n}\n\n\n\nmacro_rules! demangles {\n\n ( $mangled:ident , $demangled:expr ) => {\n\n demangles!($mangled, stringify!($mangled), $demangled);\n\n };\n\n ( $name:ident , $mangled:expr , $demangled:expr ) => {\n\n #[test]\n\n fn $name() {\n", "file_path": "tests/tests.rs", "rank": 51, "score": 31035.081733336232 }, { "content": "fn get_cppfilt() -> &'static str {\n\n if cfg!(not(target_os = \"macos\")) {\n\n return \"c++filt\";\n\n }\n\n\n\n // Prefer `gc++filt` (from the homebrew binutils package) since it is built\n\n // with a newer libiberty than the system `c++filt` (and maybe the system\n\n // `c++filt` will be backed by the LLVM demangler one day).\n\n match process::Command::new(\"gc++filt\").spawn() {\n\n Ok(mut child) => {\n\n child.kill().expect(\"should kill child\");\n\n child.wait().expect(\"should wait on child\");\n\n \"gc++filt\"\n\n }\n\n Err(_) => \"c++filt\",\n\n }\n\n}\n\n\n", "file_path": "tests/libxul.rs", "rank": 52, "score": 29360.22781380828 }, { "content": " P: Debug + Parse + PartialEq,\n\n S: AsRef<[Substitutable]>,\n\n I: AsRef<[u8]>,\n\n {\n\n let input = input.as_ref();\n\n let ctx = ParseContext::new(Default::default());\n\n let mut subs = SubstitutionTable::from_iter(subs.as_ref().iter().cloned());\n\n\n\n match P::parse(&ctx, &mut subs, IndexStr::from(input)) {\n\n Err(ref error) if *error == expected_error => {}\n\n Err(ref error) => {\n\n panic!(\n\n \"Parsing {:?} as {} produced an error of kind {:?}, but we expected kind {:?}\",\n\n String::from_utf8_lossy(input),\n\n production,\n\n error,\n\n expected_error\n\n );\n\n }\n\n Ok((value, tail)) => {\n", "file_path": "src/ast.rs", "rank": 53, "score": 24815.272250596714 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct DataMemberPrefix(SourceName);\n\n\n\nimpl Parse for DataMemberPrefix {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(DataMemberPrefix, IndexStr<'b>)> {\n\n try_begin_parse!(\"DataMemberPrefix\", ctx, input);\n\n\n\n let (name, tail) = SourceName::parse(ctx, subs, input)?;\n\n let tail = consume(b\"M\", tail)?;\n\n Ok((DataMemberPrefix(name), tail))\n\n }\n\n}\n\n\n\nimpl<'a> GetLeafName<'a> for DataMemberPrefix {\n\n #[inline]\n\n fn get_leaf_name(&'a self, _: &'a SubstitutionTable) -> Option<LeafName<'a>> {\n", "file_path": "src/ast.rs", "rank": 54, "score": 24814.494606021584 }, { "content": " subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(BareFunctionType, IndexStr<'b>)> {\n\n try_begin_parse!(\"BareFunctionType\", ctx, input);\n\n\n\n let (types, tail) = one_or_more::<TypeHandle>(ctx, subs, input)?;\n\n Ok((BareFunctionType(types), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for BareFunctionType\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n let ctx = try_begin_demangle!(self, ctx, scope);\n", "file_path": "src/ast.rs", "rank": 55, "score": 24814.21169932806 }, { "content": "/// The `<pointer-to-member-type>` production.\n\n///\n\n/// ```text\n\n/// <pointer-to-member-type> ::= M <class type> <member type>\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct PointerToMemberType(TypeHandle, TypeHandle);\n\n\n\nimpl Parse for PointerToMemberType {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(PointerToMemberType, IndexStr<'b>)> {\n\n try_begin_parse!(\"PointerToMemberType\", ctx, input);\n\n\n\n let tail = consume(b\"M\", input)?;\n\n let (ty1, tail) = TypeHandle::parse(ctx, subs, tail)?;\n\n let (ty2, tail) = TypeHandle::parse(ctx, subs, tail)?;\n\n Ok((PointerToMemberType(ty1, ty2), tail))\n", "file_path": "src/ast.rs", "rank": 56, "score": 24813.645554313614 }, { "content": " expected_tail: I2,\n\n expected_new_subs: S2,\n\n ) where\n\n P: Debug + Parse + PartialEq,\n\n S1: AsRef<[Substitutable]>,\n\n S2: AsRef<[Substitutable]>,\n\n I1: AsRef<[u8]>,\n\n I2: AsRef<[u8]>,\n\n {\n\n let ctx = ParseContext::new(Default::default());\n\n let input = input.as_ref();\n\n let expected_tail = expected_tail.as_ref();\n\n\n\n let expected_subs = SubstitutionTable::from_iter(\n\n subs.as_ref()\n\n .iter()\n\n .cloned()\n\n .chain(expected_new_subs.as_ref().iter().cloned()),\n\n );\n\n let mut subs = SubstitutionTable::from_iter(subs.as_ref().iter().cloned());\n", "file_path": "src/ast.rs", "rank": 57, "score": 24813.367080481614 }, { "content": " ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(FunctionType, IndexStr<'b>)> {\n\n try_begin_parse!(\"FunctionType\", ctx, input);\n\n\n\n let (cv_qualifiers, tail) =\n\n if let Ok((cv_qualifiers, tail)) = CvQualifiers::parse(ctx, subs, input) {\n\n (cv_qualifiers, tail)\n\n } else {\n\n (Default::default(), input)\n\n };\n\n\n\n let (transaction_safe, tail) = if let Ok(tail) = consume(b\"Dx\", tail) {\n\n (true, tail)\n\n } else {\n\n (false, tail)\n\n };\n\n\n\n let tail = consume(b\"F\", tail)?;\n", "file_path": "src/ast.rs", "rank": 58, "score": 24813.142258870514 }, { "content": "}\n\n\n\nimpl Parse for VectorType {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(VectorType, IndexStr<'b>)> {\n\n try_begin_parse!(\"VectorType\", ctx, input);\n\n\n\n let tail = consume(b\"Dv\", input)?;\n\n\n\n if let Ok((num, tail)) = parse_number(10, false, tail) {\n\n debug_assert!(num >= 0);\n\n let tail = consume(b\"_\", tail)?;\n\n let (ty, tail) = TypeHandle::parse(ctx, subs, tail)?;\n\n return Ok((VectorType::DimensionNumber(num as _, ty), tail));\n\n }\n\n\n\n let tail = consume(b\"_\", tail)?;\n", "file_path": "src/ast.rs", "rank": 59, "score": 24812.711271804725 }, { "content": " self.bytes_written += s.len();\n\n })\n\n }\n\n}\n\n\n\nimpl<'a, W> DemangleContext<'a, W>\n\nwhere\n\n W: 'a + DemangleWrite,\n\n{\n\n /// Construct a new `DemangleContext`.\n\n pub fn new(\n\n subs: &'a SubstitutionTable,\n\n input: &'a [u8],\n\n options: DemangleOptions,\n\n out: &'a mut W,\n\n ) -> DemangleContext<'a, W> {\n\n DemangleContext {\n\n subs: subs,\n\n max_recursion: options.recursion_limit.map(|v| v.get()).unwrap_or(128),\n\n inner: vec![],\n", "file_path": "src/ast.rs", "rank": 60, "score": 24811.895896926948 }, { "content": " _subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(UnnamedTypeName, IndexStr<'b>)> {\n\n try_begin_parse!(\"UnnamedTypeName\", ctx, input);\n\n\n\n let input = consume(b\"Ut\", input)?;\n\n let (number, input) = match parse_number(10, false, input) {\n\n Ok((number, input)) => (Some(number as _), input),\n\n Err(_) => (None, input),\n\n };\n\n let input = consume(b\"_\", input)?;\n\n Ok((UnnamedTypeName(number), input))\n\n }\n\n}\n\n\n\nimpl UnnamedTypeName {\n\n #[inline]\n\n fn starts_with(byte: u8) -> bool {\n\n byte == b'U'\n\n }\n", "file_path": "src/ast.rs", "rank": 61, "score": 24811.664218679965 }, { "content": " }\n\n}\n\n\n\nimpl Parse for TypeHandle {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(TypeHandle, IndexStr<'b>)> {\n\n try_begin_parse!(\"TypeHandle\", ctx, input);\n\n\n\n /// Insert the given type into the substitution table, and return a\n\n /// handle referencing the index in the table where it ended up.\n\n fn insert_and_return_handle<'a, 'b>(\n\n ty: Type,\n\n subs: &'a mut SubstitutionTable,\n\n tail: IndexStr<'b>,\n\n ) -> Result<(TypeHandle, IndexStr<'b>)> {\n\n let ty = Substitutable::Type(ty);\n\n let idx = subs.insert(ty);\n", "file_path": "src/ast.rs", "rank": 62, "score": 24811.212301713942 }, { "content": "/// <initializer> ::= pi <expression>* E # parenthesized initialization\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct Initializer(Vec<Expression>);\n\n\n\nimpl Parse for Initializer {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(Initializer, IndexStr<'b>)> {\n\n try_begin_parse!(\"Initializer\", ctx, input);\n\n\n\n let tail = consume(b\"pi\", input)?;\n\n let (exprs, tail) = zero_or_more::<Expression>(ctx, subs, tail)?;\n\n let tail = consume(b\"E\", tail)?;\n\n Ok((Initializer(exprs), tail))\n\n }\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 63, "score": 24810.93965366381 }, { "content": "impl Parse for LambdaSig {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(LambdaSig, IndexStr<'b>)> {\n\n try_begin_parse!(\"LambdaSig\", ctx, input);\n\n\n\n let (types, tail) = if let Ok(tail) = consume(b\"v\", input) {\n\n (vec![], tail)\n\n } else {\n\n one_or_more::<TypeHandle>(ctx, subs, input)?\n\n };\n\n Ok((LambdaSig(types), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for LambdaSig\n\nwhere\n\n W: 'subs + DemangleWrite,\n", "file_path": "src/ast.rs", "rank": 64, "score": 24810.72512497221 }, { "content": "//! Abstract syntax tree types for mangled symbols.\n\n\n\nuse super::{DemangleNodeType, DemangleOptions, DemangleWrite, ParseOptions};\n\nuse boxed::Box;\n\nuse error::{self, Result};\n\nuse index_str::IndexStr;\n\nuse std::cell::Cell;\n\n#[cfg(feature = \"logging\")]\n\nuse std::cell::RefCell;\n\nuse std::fmt::{self, Write};\n\nuse std::hash::{Hash, Hasher};\n\nuse std::mem;\n\nuse std::ops;\n\nuse std::ptr;\n\nuse string::String;\n\nuse subs::{Substitutable, SubstitutionTable};\n\nuse vec::Vec;\n\n\n", "file_path": "src/ast.rs", "rank": 65, "score": 24810.725574417444 }, { "content": "\n\nimpl Parse for CvQualifiers {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n _subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(CvQualifiers, IndexStr<'b>)> {\n\n try_begin_parse!(\"CvQualifiers\", ctx, input);\n\n\n\n let (restrict, tail) = if let Ok(tail) = consume(b\"r\", input) {\n\n (true, tail)\n\n } else {\n\n (false, input)\n\n };\n\n\n\n let (volatile, tail) = if let Ok(tail) = consume(b\"V\", tail) {\n\n (true, tail)\n\n } else {\n\n (false, tail)\n\n };\n", "file_path": "src/ast.rs", "rank": 66, "score": 24810.341101054466 }, { "content": " extra NonSubstitution(NonSubstitution),\n\n }\n\n}\n\n\n\nimpl Parse for PrefixHandle {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(PrefixHandle, IndexStr<'b>)> {\n\n try_begin_parse!(\"PrefixHandle\", ctx, input);\n\n\n\n #[inline]\n\n fn save(\n\n subs: &mut SubstitutionTable,\n\n prefix: Prefix,\n\n tail_tail: IndexStr<'_>,\n\n ) -> PrefixHandle {\n\n if let Some(b'E') = tail_tail.peek() {\n\n // An `E` means that we just finished parsing a `<nested-name>`\n", "file_path": "src/ast.rs", "rank": 67, "score": 24810.090639172002 }, { "content": "pub enum ArrayType {\n\n /// An array with a number-literal dimension.\n\n DimensionNumber(usize, TypeHandle),\n\n\n\n /// An array with an expression for its dimension.\n\n DimensionExpression(Expression, TypeHandle),\n\n\n\n /// An array with no dimension.\n\n NoDimension(TypeHandle),\n\n}\n\n\n\nimpl Parse for ArrayType {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(ArrayType, IndexStr<'b>)> {\n\n try_begin_parse!(\"ArrayType\", ctx, input);\n\n\n\n let tail = consume(b\"A\", input)?;\n", "file_path": "src/ast.rs", "rank": 68, "score": 24810.030123114877 }, { "content": "pub struct TemplateArgs(Vec<TemplateArg>);\n\n\n\nimpl Parse for TemplateArgs {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(TemplateArgs, IndexStr<'b>)> {\n\n try_begin_parse!(\"TemplateArgs\", ctx, input);\n\n\n\n let tail = consume(b\"I\", input)?;\n\n\n\n let (args, tail) = one_or_more::<TemplateArg>(ctx, subs, tail)?;\n\n let tail = consume(b\"E\", tail)?;\n\n Ok((TemplateArgs(args), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for TemplateArgs\n\nwhere\n", "file_path": "src/ast.rs", "rank": 69, "score": 24809.853729370734 }, { "content": "/// <source-name> ::= <positive length number> <identifier>\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct SourceName(Identifier);\n\n\n\nimpl Parse for SourceName {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(SourceName, IndexStr<'b>)> {\n\n try_begin_parse!(\"SourceName\", ctx, input);\n\n\n\n let (source_name_len, input) = parse_number(10, false, input)?;\n\n debug_assert!(source_name_len >= 0);\n\n if source_name_len == 0 {\n\n return Err(error::Error::UnexpectedText);\n\n }\n\n\n\n let (head, tail) = match input.try_split_at(source_name_len as _) {\n", "file_path": "src/ast.rs", "rank": 70, "score": 24809.77638528423 }, { "content": " end: usize,\n\n}\n\n\n\nimpl Parse for CloneTypeIdentifier {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n _subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(CloneTypeIdentifier, IndexStr<'b>)> {\n\n try_begin_parse!(\"CloneTypeIdentifier\", ctx, input);\n\n\n\n if input.is_empty() {\n\n return Err(error::Error::UnexpectedEnd);\n\n }\n\n\n\n let end = input\n\n .as_ref()\n\n .iter()\n\n .map(|&c| c as char)\n\n .take_while(|&c| c == '$' || c == '_' || c.is_digit(36))\n", "file_path": "src/ast.rs", "rank": 71, "score": 24809.74865030145 }, { "content": " (b't', tail) | (_, tail) => {\n\n let (base, tail) = Encoding::parse(ctx, subs, tail)?;\n\n Ok((SpecialName::TransactionClone(Box::new(base)), tail))\n\n }\n\n }\n\n }\n\n _ => Err(error::Error::UnexpectedText),\n\n }\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for SpecialName\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n", "file_path": "src/ast.rs", "rank": 72, "score": 24809.74063922428 }, { "content": "\n\nimpl Parse for NestedName {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(NestedName, IndexStr<'b>)> {\n\n try_begin_parse!(\"NestedName\", ctx, input);\n\n\n\n let tail = consume(b\"N\", input)?;\n\n\n\n let (cv_qualifiers, tail) = if let Ok((q, tail)) = CvQualifiers::parse(ctx, subs, tail) {\n\n (q, tail)\n\n } else {\n\n (Default::default(), tail)\n\n };\n\n\n\n let (ref_qualifier, tail) = if let Ok((r, tail)) = RefQualifier::parse(ctx, subs, tail) {\n\n (Some(r), tail)\n\n } else {\n", "file_path": "src/ast.rs", "rank": 73, "score": 24809.73470477918 }, { "content": " subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(Substitution, IndexStr<'b>)> {\n\n try_begin_parse!(\"Substitution\", ctx, input);\n\n\n\n if let Ok((well_known, tail)) = WellKnownComponent::parse(ctx, subs, input) {\n\n return Ok((Substitution::WellKnown(well_known), tail));\n\n }\n\n\n\n let tail = consume(b\"S\", input)?;\n\n let (idx, tail) = if let Ok((idx, tail)) = SeqId::parse(ctx, subs, tail) {\n\n (idx.0 + 1, tail)\n\n } else {\n\n (0, tail)\n\n };\n\n\n\n if !subs.contains(idx) {\n\n return Err(error::Error::BadBackReference);\n\n }\n\n\n", "file_path": "src/ast.rs", "rank": 74, "score": 24809.72192687848 }, { "content": "\n\nimpl Parse for FunctionParam {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(FunctionParam, IndexStr<'b>)> {\n\n try_begin_parse!(\"FunctionParam\", ctx, input);\n\n\n\n let tail = consume(b\"f\", input)?;\n\n if tail.is_empty() {\n\n return Err(error::Error::UnexpectedEnd);\n\n }\n\n\n\n let (scope, tail) = if let Ok(tail) = consume(b\"L\", tail) {\n\n parse_number(10, false, tail)?\n\n } else {\n\n (0, tail)\n\n };\n\n\n", "file_path": "src/ast.rs", "rank": 75, "score": 24809.62218689584 }, { "content": "\n\n /// A non-standard, vendor extension type.\n\n ///\n\n /// ```text\n\n /// <builtin-type> ::= u <source-name> # vendor extended type\n\n /// ```\n\n Extension(SourceName),\n\n}\n\n\n\nimpl Parse for BuiltinType {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(BuiltinType, IndexStr<'b>)> {\n\n try_begin_parse!(\"BuiltinType\", ctx, input);\n\n\n\n if let Ok((ty, tail)) = StandardBuiltinType::parse(ctx, subs, input) {\n\n return Ok((BuiltinType::Standard(ty), tail));\n\n }\n", "file_path": "src/ast.rs", "rank": 76, "score": 24809.40688364524 }, { "content": " LocalSourceName(SourceName, Option<Discriminator>),\n\n /// A generated name for an unnamed type.\n\n UnnamedType(UnnamedTypeName),\n\n /// An ABI tag.\n\n ABITag(TaggedName),\n\n /// A closure type name\n\n ClosureType(ClosureTypeName),\n\n}\n\n\n\nimpl Parse for UnqualifiedName {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(UnqualifiedName, IndexStr<'b>)> {\n\n try_begin_parse!(\"UnqualifiedName\", ctx, input);\n\n\n\n if let Ok((op, tail)) = OperatorName::parse(ctx, subs, input) {\n\n return Ok((UnqualifiedName::Operator(op), tail));\n\n }\n", "file_path": "src/ast.rs", "rank": 77, "score": 24809.35235584957 }, { "content": "pub struct TaggedName(SourceName);\n\n\n\nimpl Parse for TaggedName {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(TaggedName, IndexStr<'b>)> {\n\n try_begin_parse!(\"TaggedName\", ctx, input);\n\n\n\n let tail = consume(b\"B\", input)?;\n\n let (source_name, tail) = SourceName::parse(ctx, subs, tail)?;\n\n Ok((TaggedName(source_name), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for TaggedName\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n", "file_path": "src/ast.rs", "rank": 78, "score": 24809.33641191669 }, { "content": " pub fn new(options: ParseOptions) -> ParseContext {\n\n ParseContext {\n\n max_recursion: options.recursion_limit.map(|v| v.get()).unwrap_or(96),\n\n state: Cell::new(ParseContextState::default()),\n\n }\n\n }\n\n\n\n /// Get the current recursion level for this context.\n\n pub fn recursion_level(&self) -> u32 {\n\n self.state.get().recursion_level\n\n }\n\n\n\n #[inline]\n\n fn enter_recursion(&self) -> error::Result<()> {\n\n let mut state = self.state.get();\n\n let new_recursion_level = state.recursion_level + 1;\n\n\n\n if new_recursion_level >= self.max_recursion {\n\n log!(\"Hit too much recursion at level {}\", self.max_recursion);\n\n Err(error::Error::TooMuchRecursion)\n", "file_path": "src/ast.rs", "rank": 79, "score": 24809.177959056076 }, { "content": "\n\nimpl Parse for ClosureTypeName {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(ClosureTypeName, IndexStr<'b>)> {\n\n try_begin_parse!(\"ClosureTypeName\", ctx, input);\n\n\n\n let tail = consume(b\"Ul\", input)?;\n\n let (sig, tail) = LambdaSig::parse(ctx, subs, tail)?;\n\n let tail = consume(b\"E\", tail)?;\n\n let (num, tail) = if let Ok((num, tail)) = parse_number(10, false, tail) {\n\n (Some(num as _), tail)\n\n } else {\n\n (None, tail)\n\n };\n\n let tail = consume(b\"_\", tail)?;\n\n Ok((ClosureTypeName(sig, num), tail))\n\n }\n", "file_path": "src/ast.rs", "rank": 80, "score": 24809.088331504234 }, { "content": "\n\n let tail = consume(b\"u\", input)?;\n\n let (name, tail) = SourceName::parse(ctx, subs, tail)?;\n\n Ok((BuiltinType::Extension(name), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for BuiltinType\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n let ctx = try_begin_demangle!(self, ctx, scope);\n\n\n\n match *self {\n\n BuiltinType::Standard(ref ty) => ty.demangle(ctx, scope),\n", "file_path": "src/ast.rs", "rank": 81, "score": 24809.039443870013 }, { "content": "where\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle_as_leaf<'me, 'ctx>(\n\n &'me self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n ) -> fmt::Result {\n\n match *self {\n\n LeafName::SourceName(sn) => sn.demangle(ctx, None),\n\n LeafName::Closure(c) => c.demangle(ctx, None),\n\n LeafName::WellKnownComponent(wkc) => wkc.demangle_as_leaf(ctx),\n\n LeafName::UnnamedType(utn) => utn.demangle_as_leaf(ctx),\n\n }\n\n }\n\n}\n\n\n\n/// Determine whether this AST node is some kind (potentially namespaced) name\n\n/// and if so get its leaf name.\n\npub(crate) trait GetLeafName<'a> {\n\n fn get_leaf_name(&'a self, subs: &'a SubstitutionTable) -> Option<LeafName<'a>>;\n\n}\n\n\n\n/// Determine whether this AST node is a constructor, destructor, or conversion\n\n/// function.\n\npub(crate) trait IsCtorDtorConversion {\n\n fn is_ctor_dtor_conversion(&self, subs: &SubstitutionTable) -> bool;\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 82, "score": 24809.037503369007 }, { "content": " ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(NvOffset, IndexStr<'b>)> {\n\n try_begin_parse!(\"NvOffset\", ctx, input);\n\n\n\n Number::parse(ctx, subs, input).map(|(num, tail)| (NvOffset(num), tail))\n\n }\n\n}\n\n\n\n/// A virtual offset, as described by the <v-offset> production.\n\n///\n\n/// ```text\n\n/// <v-offset> ::= <offset number> _ <virtual offset number>\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct VOffset(isize, isize);\n\n\n\nimpl Parse for VOffset {\n\n fn parse<'a, 'b>(\n", "file_path": "src/ast.rs", "rank": 83, "score": 24808.97390623331 }, { "content": " Function(Name, BareFunctionType),\n\n\n\n /// An encoded static variable.\n\n Data(Name),\n\n\n\n /// A special encoding.\n\n Special(SpecialName),\n\n}\n\n\n\nimpl Parse for Encoding {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(Encoding, IndexStr<'b>)> {\n\n try_begin_parse!(\"Encoding\", ctx, input);\n\n\n\n if let Ok((name, tail)) = Name::parse(ctx, subs, input) {\n\n if let Ok((ty, tail)) = BareFunctionType::parse(ctx, subs, tail) {\n\n return Ok((Encoding::Function(name, ty), tail));\n", "file_path": "src/ast.rs", "rank": 84, "score": 24808.936428294757 }, { "content": "pub enum GlobalCtorDtor {\n\n /// A global constructor.\n\n Ctor(Box<MangledName>),\n\n /// A global destructor.\n\n Dtor(Box<MangledName>),\n\n}\n\n\n\nimpl Parse for GlobalCtorDtor {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(GlobalCtorDtor, IndexStr<'b>)> {\n\n try_begin_parse!(\"GlobalCtorDtor\", ctx, input);\n\n\n\n let tail = match input.next_or(error::Error::UnexpectedEnd)? {\n\n (b'_', t) | (b'.', t) | (b'$', t) => t,\n\n _ => return Err(error::Error::UnexpectedText),\n\n };\n\n\n", "file_path": "src/ast.rs", "rank": 85, "score": 24808.875164389567 }, { "content": " &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n FunctionArgSlice::new(&self.0[1..]).demangle(ctx, scope)\n\n }\n\n}\n\n\n\nimpl<'subs, W> DemangleAsInner<'subs, W> for FunctionArgListAndReturnType where\n\n W: 'subs + DemangleWrite\n\n{\n\n}\n\n\n\n/// Define a handle to a AST type that lives inside the substitution table. A\n\n/// handle is always either an index into the substitution table, or it is a\n\n/// reference to a \"well-known\" component.\n\n///\n\n/// This declares:\n\n///\n\n/// - The enum of either a back reference into the substitution table or a\n", "file_path": "src/ast.rs", "rank": 86, "score": 24808.75844174132 }, { "content": "\n\n if let Ok(tail) = consume(b\"St\", input) {\n\n let (name, tail) = UnqualifiedName::parse(ctx, subs, tail)?;\n\n return Ok((UnscopedName::Std(name), tail));\n\n }\n\n\n\n let (name, tail) = UnqualifiedName::parse(ctx, subs, input)?;\n\n Ok((UnscopedName::Unqualified(name), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for UnscopedName\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n", "file_path": "src/ast.rs", "rank": 87, "score": 24808.753441874713 }, { "content": " fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(DestructorName, IndexStr<'b>)> {\n\n try_begin_parse!(\"DestructorName\", ctx, input);\n\n\n\n if let Ok((ty, tail)) = UnresolvedTypeHandle::parse(ctx, subs, input) {\n\n return Ok((DestructorName::Unresolved(ty), tail));\n\n }\n\n\n\n let (name, tail) = SimpleId::parse(ctx, subs, input)?;\n\n Ok((DestructorName::Name(name), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for DestructorName\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n", "file_path": "src/ast.rs", "rank": 88, "score": 24808.749732616256 }, { "content": " fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n let ctx = try_begin_demangle!(self, ctx, scope);\n\n\n\n write!(ctx, \"[abi:\")?;\n\n self.0.demangle(ctx, scope)?;\n\n write!(ctx, \"]\")\n\n }\n\n}\n\n\n\nimpl TaggedName {\n\n #[inline]\n\n fn starts_with(byte: u8) -> bool {\n\n byte == b'B'\n\n }\n\n}\n\n\n", "file_path": "src/ast.rs", "rank": 89, "score": 24808.736630177827 }, { "content": "\n\n fn parse_internal<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n from_expr: bool,\n\n ) -> Result<(OperatorName, IndexStr<'b>)> {\n\n try_begin_parse!(\"OperatorName\", ctx, input);\n\n\n\n if let Ok((simple, tail)) = SimpleOperatorName::parse(ctx, subs, input) {\n\n return Ok((OperatorName::Simple(simple), tail));\n\n }\n\n\n\n if let Ok(tail) = consume(b\"cv\", input) {\n\n // If we came through the expression path, we're a cast. If not,\n\n // we're a conversion.\n\n let previously_in_conversion = ctx.set_in_conversion(!from_expr);\n\n let parse_result = TypeHandle::parse(ctx, subs, tail);\n\n ctx.set_in_conversion(previously_in_conversion);\n\n let (ty, tail) = parse_result?;\n", "file_path": "src/ast.rs", "rank": 90, "score": 24808.733614247325 }, { "content": " input: IndexStr<'b>,\n\n ) -> Result<(UnresolvedQualifierLevel, IndexStr<'b>)> {\n\n try_begin_parse!(\"UnresolvedQualifierLevel\", ctx, input);\n\n\n\n let (id, tail) = SimpleId::parse(ctx, subs, input)?;\n\n Ok((UnresolvedQualifierLevel(id), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for UnresolvedQualifierLevel\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n #[inline]\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n let ctx = try_begin_demangle!(self, ctx, scope);\n", "file_path": "src/ast.rs", "rank": 91, "score": 24808.63654094539 }, { "content": " ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(UnresolvedTypeHandle, IndexStr<'b>)> {\n\n try_begin_parse!(\"UnresolvedTypeHandle\", ctx, input);\n\n\n\n if let Ok((param, tail)) = TemplateParam::parse(ctx, subs, input) {\n\n let (args, tail) = if let Ok((args, tail)) = TemplateArgs::parse(ctx, subs, tail) {\n\n (Some(args), tail)\n\n } else {\n\n (None, tail)\n\n };\n\n let ty = UnresolvedType::Template(param, args);\n\n let ty = Substitutable::UnresolvedType(ty);\n\n let idx = subs.insert(ty);\n\n let handle = UnresolvedTypeHandle::BackReference(idx);\n\n return Ok((handle, tail));\n\n }\n\n\n\n if let Ok((decltype, tail)) = Decltype::parse(ctx, subs, input) {\n", "file_path": "src/ast.rs", "rank": 92, "score": 24808.475446228524 }, { "content": "/// ```text\n\n/// <unscoped-name> ::= <unqualified-name>\n\n/// ::= St <unqualified-name> # ::std::\n\n/// ```\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum UnscopedName {\n\n /// An unqualified name.\n\n Unqualified(UnqualifiedName),\n\n\n\n /// A name within the `std::` namespace.\n\n Std(UnqualifiedName),\n\n}\n\n\n\nimpl Parse for UnscopedName {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(UnscopedName, IndexStr<'b>)> {\n\n try_begin_parse!(\"UnscopedName\", ctx, input);\n", "file_path": "src/ast.rs", "rank": 93, "score": 24808.46133945733 }, { "content": " fn get_function_arg(&'a self, _: usize) -> Result<&'a Type> {\n\n Err(error::Error::BadFunctionArgReference)\n\n }\n\n}\n\n\n\nimpl<'subs, W> DemangleAsLeaf<'subs, W> for WellKnownComponent\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle_as_leaf<'me, 'ctx>(\n\n &'me self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n ) -> fmt::Result {\n\n match *self {\n\n WellKnownComponent::Std => {\n\n panic!(\"should never treat `WellKnownComponent::Std` as a leaf name\")\n\n }\n\n WellKnownComponent::StdAllocator => write!(ctx, \"allocator\"),\n\n WellKnownComponent::StdString1 => write!(ctx, \"basic_string\"),\n\n WellKnownComponent::StdString2 => write!(ctx, \"string\"),\n", "file_path": "src/ast.rs", "rank": 94, "score": 24808.449376602326 }, { "content": "\n\nimpl Parse for TemplateArg {\n\n fn parse<'a, 'b>(\n\n ctx: &'a ParseContext,\n\n subs: &'a mut SubstitutionTable,\n\n input: IndexStr<'b>,\n\n ) -> Result<(TemplateArg, IndexStr<'b>)> {\n\n try_begin_parse!(\"TemplateArg\", ctx, input);\n\n\n\n if let Ok(tail) = consume(b\"X\", input) {\n\n let (expr, tail) = Expression::parse(ctx, subs, tail)?;\n\n let tail = consume(b\"E\", tail)?;\n\n return Ok((TemplateArg::Expression(expr), tail));\n\n }\n\n\n\n if let Ok((expr, tail)) = ExprPrimary::parse(ctx, subs, input) {\n\n return Ok((TemplateArg::SimpleExpression(expr), tail));\n\n }\n\n\n\n if let Ok((ty, tail)) = TypeHandle::parse(ctx, subs, input) {\n", "file_path": "src/ast.rs", "rank": 95, "score": 24808.40049274833 }, { "content": " let tail = consume(b\"e\", tail)?;\n\n let (name, tail) = Name::parse(ctx, subs, tail)?;\n\n Ok((ClassEnumType::ElaboratedEnum(name), tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for ClassEnumType\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n let ctx = try_begin_demangle!(self, ctx, scope);\n\n\n\n match *self {\n\n ClassEnumType::Named(ref name) => name.demangle(ctx, scope),\n\n ClassEnumType::ElaboratedStruct(ref name) => {\n", "file_path": "src/ast.rs", "rank": 96, "score": 24808.386505524755 }, { "content": " Some(LeafName::SourceName(&self.0))\n\n }\n\n}\n\n\n\nimpl DataMemberPrefix {\n\n fn starts_with(byte: u8) -> bool {\n\n SourceName::starts_with(byte)\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for DataMemberPrefix\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n #[inline]\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n", "file_path": "src/ast.rs", "rank": 97, "score": 24808.150374823195 }, { "content": " transaction_safe: transaction_safe,\n\n extern_c: extern_c,\n\n bare: bare,\n\n ref_qualifier: ref_qualifier,\n\n };\n\n Ok((func_ty, tail))\n\n }\n\n}\n\n\n\nimpl<'subs, W> Demangle<'subs, W> for FunctionType\n\nwhere\n\n W: 'subs + DemangleWrite,\n\n{\n\n fn demangle<'prev, 'ctx>(\n\n &'subs self,\n\n ctx: &'ctx mut DemangleContext<'subs, W>,\n\n scope: Option<ArgScopeStack<'prev, 'subs>>,\n\n ) -> fmt::Result {\n\n let ctx = try_begin_demangle!(self, ctx, scope);\n\n\n", "file_path": "src/ast.rs", "rank": 98, "score": 24808.127821308786 } ]
Rust
crates/nodes/src/logic_data.rs
gents83/NRG
62743a54ac873a8dea359f3816e24c189a323ebb
use sabi_serialize::{Deserialize, Serialize, SerializeFile}; use crate::{LogicContext, LogicExecution, NodeExecutionType, NodeState, NodeTree, PinId}; #[derive(Default, PartialEq, Eq, Hash, Clone)] struct LinkInfo { node: usize, pin: PinId, } #[derive(Default, Clone)] struct PinInfo { id: PinId, links: Vec<LinkInfo>, } #[derive(Default, Clone)] struct NodeInfo { inputs: Vec<PinInfo>, outputs: Vec<PinInfo>, } #[derive(Default, Serialize, Deserialize, Clone)] #[serde(crate = "sabi_serialize")] pub struct LogicData { #[serde(flatten)] tree: NodeTree, #[serde(skip)] active_nodes: Vec<LinkInfo>, #[serde(skip)] nodes_info: Vec<NodeInfo>, #[serde(skip)] execution_state: Vec<NodeState>, #[serde(skip)] context: LogicContext, } impl SerializeFile for LogicData { fn extension() -> &'static str { "logic" } } impl From<NodeTree> for LogicData { fn from(tree: NodeTree) -> Self { Self { tree, active_nodes: Vec::new(), nodes_info: Vec::new(), execution_state: Vec::new(), context: LogicContext::default(), } } } impl LogicData { pub fn context(&self) -> &LogicContext { &self.context } pub fn context_mut(&mut self) -> &mut LogicContext { &mut self.context } pub fn is_initialized(&self) -> bool { !self.execution_state.is_empty() } pub fn init(&mut self) { let nodes = self.tree.nodes(); nodes.iter().enumerate().for_each(|(node_index, n)| { if !n.node().has_input::<LogicExecution>() && n.node().has_output::<LogicExecution>() { self.active_nodes.push(LinkInfo { node: node_index, pin: PinId::invalid(), }); } let mut node_info = NodeInfo::default(); n.node().inputs().iter().for_each(|(id, _)| { let mut pin_info = PinInfo { id: id.clone(), ..Default::default() }; let links = self.tree.get_links_to_pin(n.name(), id.name()); links.iter().for_each(|l| { if let Some(from_node_index) = self.tree.find_node_index(l.from_node()) { if let Some((from_pin_id, _)) = nodes[from_node_index] .node() .outputs() .iter() .find(|(id, _)| id.name() == l.from_pin()) { let link_info = LinkInfo { node: from_node_index, pin: from_pin_id.clone(), }; pin_info.links.push(link_info); } else { eprintln!( "Unable to find output pin {} of node {}", l.from_pin(), nodes[from_node_index].name() ); } } }); node_info.inputs.push(pin_info); }); n.node().outputs().iter().for_each(|(id, _)| { let mut pin_info = PinInfo { id: id.clone(), ..Default::default() }; let links = self.tree.get_links_from_pin(n.name(), id.name()); links.iter().for_each(|l| { if let Some(to_node_index) = self.tree.find_node_index(l.to_node()) { if let Some((to_pin_id, _)) = nodes[to_node_index] .node() .inputs() .iter() .find(|(id, _)| id.name() == l.to_pin()) { let link_info = LinkInfo { node: to_node_index, pin: to_pin_id.clone(), }; pin_info.links.push(link_info); } else { eprintln!( "Unable to find input pin {} of node {}", l.to_pin(), nodes[to_node_index].name() ); } } }); node_info.outputs.push(pin_info); }); self.nodes_info.push(node_info); }); self.execution_state.resize(nodes.len(), NodeState::Active); } pub fn execute(&mut self) { self.execute_active_nodes(self.active_nodes.clone()); } fn execute_active_nodes(&mut self, mut nodes_to_execute: Vec<LinkInfo>) { if nodes_to_execute.is_empty() { return; } let mut new_nodes = Vec::new(); nodes_to_execute.iter().for_each(|l| { let mut nodes = Self::execute_node( &mut self.tree, &self.context, l, &self.nodes_info, &mut self.execution_state, ); new_nodes.append(&mut nodes); }); nodes_to_execute.retain(|link_info| { let node_state = &self.execution_state[link_info.node]; match node_state { NodeState::Active => true, NodeState::Running(_) => { if !self.active_nodes.contains(link_info) { self.active_nodes.push(link_info.clone()); } false } NodeState::Executed(_) => false, } }); new_nodes.iter().for_each(|l| { if self.execution_state[l.node] == NodeState::Active && !nodes_to_execute.contains(l) { nodes_to_execute.push(l.clone()); } }); self.active_nodes.retain(|l| { self.tree.nodes()[l.node].execytion_type() != NodeExecutionType::OneShot || self.tree.nodes()[l.node].execytion_type() == NodeExecutionType::Continuous }); self.execute_active_nodes(nodes_to_execute); } fn execute_node( tree: &mut NodeTree, context: &LogicContext, link_info: &LinkInfo, nodes_info: &[NodeInfo], execution_state: &mut [NodeState], ) -> Vec<LinkInfo> { let mut new_nodes_to_execute = Vec::new(); let info = &nodes_info[link_info.node]; info.inputs.iter().for_each(|pin_info| { let node = tree.nodes_mut()[link_info.node].node(); if node.is_input::<LogicExecution>(&pin_info.id) { return; } pin_info.links.iter().for_each(|l| { if execution_state[l.node] == NodeState::Active { let mut nodes = Self::execute_node(tree, context, l, nodes_info, execution_state); new_nodes_to_execute.append(&mut nodes); } let nodes = tree.nodes_mut(); let (from_node, to_node) = if l.node < link_info.node { let (start, end) = nodes.split_at_mut(link_info.node); (start[l.node].node(), end[0].node_mut()) } else { let (start, end) = nodes.split_at_mut(l.node); (end[0].node(), start[link_info.node].node_mut()) }; if let Some(input) = to_node.inputs_mut().get_mut(&pin_info.id) { input.copy_from(from_node, &l.pin); } }); }); let node = &mut tree.nodes_mut()[link_info.node]; execution_state[link_info.node] = node.execute(&link_info.pin, context); match &execution_state[link_info.node] { NodeState::Executed(output_pins) | NodeState::Running(output_pins) => { if let Some(pins) = output_pins { pins.iter().for_each(|pin_id| { info.outputs.iter().for_each(|o| { if pin_id == &o.id { o.links.iter().for_each(|link_info| { new_nodes_to_execute.push(link_info.clone()); }); } }); }); } } _ => {} } new_nodes_to_execute } }
use sabi_serialize::{Deserialize, Serialize, SerializeFile}; use crate::{LogicContext, LogicExecution, NodeExecutionType, NodeState, NodeTree, PinId}; #[derive(Default, PartialEq, Eq, Hash, Clone)] struct LinkInfo { node: usize, pin: PinId, } #[derive(Default, Clone)] struct PinInfo { id: PinId, links: Vec<LinkInfo>, } #[derive(Default, Clone)] struct NodeInfo { inputs: Vec<PinInfo>, outputs: Vec<PinInfo>, } #[derive(Default, Serialize, Deserialize, Clone)] #[serde(crate = "sabi_serialize")] pub struct LogicData { #[serde(flatten)] tree: NodeTree, #[serde(skip)] active_nodes: Vec<LinkInfo>, #[serde(skip)] nodes_info: Vec<NodeInfo>, #[serde(skip)] execution_state: Vec<NodeState>, #[serde(skip)] context: LogicContext, } impl SerializeFile for LogicData { fn extension() -> &'static str { "logic" } } impl From<NodeTree> for LogicData { fn from(tree: NodeTree) -> Self { Self { tree, active_nodes: Vec::new(), nodes_info: Vec::new(), execution_state: Vec::new(), context: LogicContext::default(), } } } impl LogicData { pub fn context(&self) -> &LogicContext { &self.context } pub fn context_mut(&mut self) -> &mut LogicContext { &mut self.context } pub fn is_initialized(&self) -> bool { !self.execution_state.is_empty() } pub fn init(&mut self) { let nodes = self.tree.nodes(); nodes.iter().enumerate().for_each(|(node_index, n)| { if !n.node().has_input::<LogicExecution>() && n.node().has_output::<LogicExecution>() { self.active_nodes.push(LinkInfo { node: node_index, pin: PinId::invalid(), }); } let mut node_info = NodeInfo::default(); n.node().inputs().iter().for_each(|(id, _)| { let mut pin_info = PinInfo { id: id.clone(), ..Default::default() }; let links = self.tree.get_links_to_pin(n.name(), id.name()); links.iter().for_each(|l| { if let Some(from_node_index) = self.tree.find_node_index(l.from_node()) { if let Some((from_pin_id, _)) = nodes[from_node_index] .node() .outputs() .iter() .find(|(id, _)| id.name() == l.from_pin()) { let link_info = LinkInfo { node: from_node_index, pin: from_pin_id.clone(), }; pin_info.links.push(link_info); } else { eprintln!( "Unable to find output pin {} of node {}", l.from_pin(), nodes[from_node_index].name() ); } } }); node_info.inputs.push(pin_info); }); n.node().outputs().iter().for_each(|(id, _)| { let mut pin_info = PinInfo { id: id.clone(), ..Default::default() }; let links = self.tree.get_links_from_pin(n.name(), id.name()); links.iter().for_each(|l| { if let Some(to_node_index) = self.tree.find_node_index(l.to_node()) { if let Some((to_pin_id, _)) = nodes[to_node_index] .node() .inputs() .iter() .find(|(id, _)| id.name() == l.to_pin()) { let link_info = LinkInfo { node: to_node_index, pin: to_pin_id.clone(), }; pin_info.links.push(link_info); } else { eprintln!( "Unable to find input pin {} of node {}", l.to_pin(), nodes[to_node_index].name() ); } } }); node_info.outputs.push(pin_info); }); self.nodes_info.push(node_info); }); self.execution_state.resize(nodes.len(), NodeState::Active); } pub fn execute(&mut self) { self.execute_active_nodes(self.active_nodes.clone()); } fn execute_active_nodes(&mut self, mut nodes_to_execute: Vec<LinkInfo>) { if nodes_to_execute.is_empty() { return; } let mut new_nodes = Vec::new(); nodes_to_execute.iter().for_each(|l| { let mut nodes = Self::execute_node( &mut self.tree, &self.context, l, &self.nodes_info, &mut self.execution_state, ); new_nodes.append(&mut nodes); }); nodes_to_execute.retain(|link_info| { let node_state = &self.execution_state[link_info.node]; match node_state { NodeState::Active => true, NodeState::Running(_) => { if !self.active_nodes.contains(link_info) { self.active_nodes.push(link_info.clone()); } false } NodeState::Executed(_) => false, } }); new_nodes.iter().for_each(|l| { if self.execution_state[l.node] == NodeState::Active && !nodes_to_execute.contains(l) { nodes_to_execute.push(l.clone()); } }); self.active_nodes.retain(|l| { self.tree.nodes()[l.node].execytion_type() != NodeExecutionType::OneShot || self.tree.nodes()[l.node].execytion_type() == NodeExecutionType::Continuous }); self.execute_active_nodes(nodes_to_execute); }
}
fn execute_node( tree: &mut NodeTree, context: &LogicContext, link_info: &LinkInfo, nodes_info: &[NodeInfo], execution_state: &mut [NodeState], ) -> Vec<LinkInfo> { let mut new_nodes_to_execute = Vec::new(); let info = &nodes_info[link_info.node]; info.inputs.iter().for_each(|pin_info| { let node = tree.nodes_mut()[link_info.node].node(); if node.is_input::<LogicExecution>(&pin_info.id) { return; } pin_info.links.iter().for_each(|l| { if execution_state[l.node] == NodeState::Active { let mut nodes = Self::execute_node(tree, context, l, nodes_info, execution_state); new_nodes_to_execute.append(&mut nodes); } let nodes = tree.nodes_mut(); let (from_node, to_node) = if l.node < link_info.node { let (start, end) = nodes.split_at_mut(link_info.node); (start[l.node].node(), end[0].node_mut()) } else { let (start, end) = nodes.split_at_mut(l.node); (end[0].node(), start[link_info.node].node_mut()) }; if let Some(input) = to_node.inputs_mut().get_mut(&pin_info.id) { input.copy_from(from_node, &l.pin); } }); }); let node = &mut tree.nodes_mut()[link_info.node]; execution_state[link_info.node] = node.execute(&link_info.pin, context); match &execution_state[link_info.node] { NodeState::Executed(output_pins) | NodeState::Running(output_pins) => { if let Some(pins) = output_pins { pins.iter().for_each(|pin_id| { info.outputs.iter().for_each(|o| { if pin_id == &o.id { o.links.iter().for_each(|link_info| { new_nodes_to_execute.push(link_info.clone()); }); } }); }); } } _ => {} } new_nodes_to_execute }
function_block-full_function
[]
Rust
components/resource_metering/src/recorder/cpu.rs
lroolle/tikv
f3f02d7fc6cf7e94abcf8cdb9b9ff52b110a72ba
use crate::localstorage::LocalStorage; use crate::recorder::SubRecorder; use crate::utils; use crate::utils::Stat; use crate::{RawRecord, RawRecords, SharedTagPtr}; use collections::HashMap; use fail::fail_point; use lazy_static::lazy_static; lazy_static! { static ref STAT_TASK_COUNT: prometheus::IntCounter = prometheus::register_int_counter!( "tikv_req_cpu_stat_task_count", "Counter of stat_task call" ) .unwrap(); } #[derive(Default)] pub struct CpuRecorder { thread_stats: HashMap<usize, ThreadStat>, } impl SubRecorder for CpuRecorder { fn tick(&mut self, records: &mut RawRecords, _: &mut HashMap<usize, LocalStorage>) { let records = &mut records.records; self.thread_stats.iter_mut().for_each(|(tid, thread_stat)| { let cur_tag = thread_stat.shared_ptr.take_clone(); fail_point!( "cpu-record-test-filter", cur_tag.as_ref().map_or(false, |t| !t .infos .extra_attachment .starts_with(crate::TEST_TAG_PREFIX)), |_| {} ); if let Some(cur_tag) = cur_tag { if let Ok(cur_stat) = utils::stat_task(utils::process_id(), *tid) { STAT_TASK_COUNT.inc(); let last_stat = &thread_stat.stat; let last_cpu_tick = last_stat.utime.wrapping_add(last_stat.stime); let cur_cpu_tick = cur_stat.utime.wrapping_add(cur_stat.stime); let delta_ticks = cur_cpu_tick.wrapping_sub(last_cpu_tick); if delta_ticks > 0 { let delta_ms = delta_ticks * 1_000 / utils::clock_tick(); let record = records.entry(cur_tag).or_insert_with(RawRecord::default); record.cpu_time += delta_ms as u32; } thread_stat.stat = cur_stat; } } }); } fn cleanup(&mut self) { const THREAD_STAT_LEN_THRESHOLD: usize = 500; if self.thread_stats.capacity() > THREAD_STAT_LEN_THRESHOLD && self.thread_stats.len() < THREAD_STAT_LEN_THRESHOLD / 2 { self.thread_stats.shrink_to(THREAD_STAT_LEN_THRESHOLD); } } fn reset(&mut self) { for (thread_id, stat) in &mut self.thread_stats { stat.stat = utils::stat_task(utils::process_id(), *thread_id).unwrap_or_default(); } } fn thread_created(&mut self, id: usize, shared_ptr: SharedTagPtr) { self.thread_stats.insert( id, ThreadStat { shared_ptr, stat: Stat::default(), }, ); } } struct ThreadStat { shared_ptr: SharedTagPtr, stat: Stat, } #[cfg(test)] #[cfg(not(target_os = "linux"))] mod tests { use super::*; #[test] fn test_record() { let mut recorder = CpuRecorder::default(); let mut records = RawRecords::default(); recorder.tick(&mut records, &mut HashMap::default()); assert!(records.records.is_empty()); } } #[cfg(test)] #[cfg(target_os = "linux")] mod tests { use super::*; use crate::{utils, RawRecords, TagInfos}; use std::sync::atomic::AtomicPtr; use std::sync::Arc; fn heavy_job() -> u64 { let m: u64 = rand::random(); let n: u64 = rand::random(); let m = m ^ n; let n = m.wrapping_mul(n); let m = m.wrapping_add(n); let n = m & n; let m = m | n; m.wrapping_sub(n) } #[test] fn test_record() { let info = Arc::new(TagInfos { store_id: 0, region_id: 0, peer_id: 0, extra_attachment: b"abc".to_vec(), }); let shared_ptr = SharedTagPtr { ptr: Arc::new(AtomicPtr::new(Arc::into_raw(info) as _)), }; let mut recorder = CpuRecorder::default(); recorder.thread_created(utils::thread_id(), shared_ptr); let thread_id = utils::thread_id(); let prev_stat = &recorder.thread_stats.get(&thread_id).unwrap().stat; let prev_cpu_ticks = prev_stat.utime.wrapping_add(prev_stat.stime); loop { let stat = utils::stat_task(utils::process_id(), thread_id).unwrap(); let cpu_ticks = stat.utime.wrapping_add(stat.stime); let delta_ms = cpu_ticks.wrapping_sub(prev_cpu_ticks) * 1_000 / utils::clock_tick(); if delta_ms != 0 { break; } heavy_job(); } let mut records = RawRecords::default(); recorder.tick(&mut records, &mut HashMap::default()); assert!(!records.records.is_empty()); } }
use crate::localstorage::LocalStorage; use crate::recorder::SubRecorder; use crate::utils; use crate::utils::Stat; use crate::{RawRecord, RawRecords, SharedTagPtr}; use collections::HashMap; use fail::fail_point; use lazy_static::lazy_static; lazy_static! { static ref STAT_TASK_COUNT: prometheus::IntCounter = prometheus::register_int_counter!( "tikv_req_cpu_stat_task_count", "Counter of stat_task call" ) .unwrap(); } #[derive(Default)] pub struct CpuRecorder { thread_stats: HashMap<usize, ThreadStat>, } impl SubRecorder for CpuRecorder { fn tick(&mut self, records: &mut RawRecords, _: &mut HashMap<usize, LocalStorage>) { let records = &mut records.records; self.thread_stats.iter_mut().for_each(|(tid, thread_stat)| { let cur_tag = thread_stat.shared_ptr.take_clone(); fail_point!( "cpu-record-test-filter", cur_tag.as_ref().map_or(false, |t| !t .infos .extra_attachment .starts_with(crate::TEST_TAG_PREFIX)), |_| {} ); if let Some(cur_tag) = cur_tag { if let Ok(cur_stat) = utils::stat_task(utils::process_id(), *tid) { STAT_TASK_COUNT.inc(); let last_stat = &thread_stat.stat; let last_cpu_tick = last_stat.utime.wrapping_add(last_stat.stime); let cur_cpu_tick = cur_stat.utime.wrapping_add(cur_stat.stime); let delta_ticks = cur_cpu_tick.wrapping_sub(last_cpu_tick); if delta_ticks > 0 { let delta_ms = delta_ticks * 1_000 / utils::clock_tick(); let record = records.entry(cur_tag).or_insert_with(RawRecord::default); record.cpu_time += delta_ms as u32; } thread_stat.stat = cur_stat; } } }); } fn cleanup(&mut self) { const THREAD_STAT_LEN_THRESHOLD: usize = 500; if self.thread_stats.capacity() > THREAD_STAT_LEN_THRESHOLD && self.thread_stats.len() < THREAD_STAT_LEN_THRESHOLD / 2 { self.thread_stats.shrink_to(THREAD_STAT_LEN_THRESHOLD); } } fn reset(&mut self) { for (thread_id, stat) in &mut self.thread_stats { stat.stat = utils::stat_task(utils::process_id(), *thread_id).unwrap_or_default(); } } fn thread_created(&mut self, id: usize, shared_ptr: SharedTagPtr) { self.thread_stats.insert( id, ThreadStat { shared_ptr, stat: Stat::default(), }, ); } } struct ThreadStat { shared_ptr: SharedTagPtr, stat: Stat, } #[cfg(test)] #[cfg(not(target_os = "linux"))] mod tests { use super::*; #[test] fn test_record() { let mut recorder = CpuRecorder::default(); let mut records = RawRecords::default(); recorder.tick(&mut records, &mut HashMap::default()); assert!(records.records.is_empty()); } } #[cfg(test)] #[cfg(target_os = "linux")] mod tests { use super::*; use crate::{utils, RawRecords, TagInfos}; use std::sync::atomic::AtomicPtr; use std::sync::Arc;
#[test] fn test_record() { let info = Arc::new(TagInfos { store_id: 0, region_id: 0, peer_id: 0, extra_attachment: b"abc".to_vec(), }); let shared_ptr = SharedTagPtr { ptr: Arc::new(AtomicPtr::new(Arc::into_raw(info) as _)), }; let mut recorder = CpuRecorder::default(); recorder.thread_created(utils::thread_id(), shared_ptr); let thread_id = utils::thread_id(); let prev_stat = &recorder.thread_stats.get(&thread_id).unwrap().stat; let prev_cpu_ticks = prev_stat.utime.wrapping_add(prev_stat.stime); loop { let stat = utils::stat_task(utils::process_id(), thread_id).unwrap(); let cpu_ticks = stat.utime.wrapping_add(stat.stime); let delta_ms = cpu_ticks.wrapping_sub(prev_cpu_ticks) * 1_000 / utils::clock_tick(); if delta_ms != 0 { break; } heavy_job(); } let mut records = RawRecords::default(); recorder.tick(&mut records, &mut HashMap::default()); assert!(!records.records.is_empty()); } }
fn heavy_job() -> u64 { let m: u64 = rand::random(); let n: u64 = rand::random(); let m = m ^ n; let n = m.wrapping_mul(n); let m = m.wrapping_add(n); let n = m & n; let m = m | n; m.wrapping_sub(n) }
function_block-full_function
[ { "content": "#[cfg(target_os = \"linux\")]\n\npub fn stat_task(pid: usize, tid: usize) -> std::io::Result<Stat> {\n\n procinfo::pid::stat_task(pid as _, tid as _).map(Into::into)\n\n}\n\n\n\n/// Get the [Stat] of the thread (tid) in the process (pid).\n", "file_path": "components/resource_metering/src/utils.rs", "rank": 0, "score": 414871.6945349161 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn stat_task(_pid: usize, _tid: usize) -> std::io::Result<Stat> {\n\n Ok(Stat::default())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_thread_id() {\n\n let id = thread_id();\n\n assert_ne!(id, 0);\n\n std::thread::spawn(move || {\n\n // Two threads should have different ids.\n\n assert_ne!(thread_id(), id);\n\n })\n\n .join()\n\n .unwrap();\n\n }\n\n\n\n #[test]\n\n #[cfg(target_os = \"linux\")]\n\n fn test_thread_ids() {\n\n let ids = thread_ids();\n\n assert!(ids.is_some());\n\n assert!(!ids.unwrap().is_empty());\n\n }\n\n}\n", "file_path": "components/resource_metering/src/utils.rs", "rank": 1, "score": 383275.06008354435 }, { "content": "#[bench]\n\nfn _bench_check_requirement(_: &mut test::Bencher) {\n\n tikv_util::config::check_max_open_fds(4096).unwrap();\n\n}\n", "file_path": "tests/benches/misc/mod.rs", "rank": 2, "score": 381817.39426935423 }, { "content": "/// Connects std tests and custom test framework.\n\npub fn run_test_with_hook(cases: &[&TestDescAndFn], hook: impl TestHook + Send + Clone + 'static) {\n\n crate::setup_for_ci();\n\n let cases: Vec<_> = cases\n\n .iter()\n\n .map(|case| {\n\n let name = case.desc.name.as_slice().to_owned();\n\n let h = hook.clone();\n\n let f = match case.testfn {\n\n TestFn::StaticTestFn(f) => TestFn::DynTestFn(Box::new(move || {\n\n let _watcher = CaseLifeWatcher::new(name, h);\n\n f();\n\n })),\n\n TestFn::StaticBenchFn(f) => TestFn::DynTestFn(Box::new(move || {\n\n let _watcher = CaseLifeWatcher::new(name, h);\n\n bench::run_once(move |b| f(b));\n\n })),\n\n ref f => panic!(\"unexpected testfn {:?}\", f),\n\n };\n\n TestDescAndFn {\n\n desc: case.desc.clone(),\n\n testfn: f,\n\n }\n\n })\n\n .collect();\n\n let args = env::args().collect::<Vec<_>>();\n\n test_main(&args, cases, None)\n\n}\n\n\n\nthread_local!(static FS: RefCell<Option<fail::FailScenario<'static>>> = RefCell::new(None));\n\n\n", "file_path": "components/test_util/src/runner.rs", "rank": 3, "score": 377697.2278800625 }, { "content": "#[inline]\n\npub fn decode_u32(data: &mut BytesSlice<'_>) -> Result<u32> {\n\n read_num_bytes(mem::size_of::<u32>(), data, BigEndian::read_u32)\n\n}\n\n\n\n/// Decodes value encoded by `encode_u16` before.\n", "file_path": "components/tikv_util/src/codec/number.rs", "rank": 4, "score": 370661.96942442015 }, { "content": "#[inline]\n\npub fn decode_u32_le(data: &mut BytesSlice<'_>) -> Result<u32> {\n\n read_num_bytes(mem::size_of::<u32>(), data, LittleEndian::read_u32)\n\n}\n\n\n\n/// Decodes value encoded by `encode_i32_le` before.\n", "file_path": "components/tikv_util/src/codec/number.rs", "rank": 5, "score": 366728.13739186624 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn thread_id() -> usize {\n\n thread_id::get()\n\n}\n\n\n\n/// Get all thread id collections under the current process.\n", "file_path": "components/resource_metering/src/utils.rs", "rank": 6, "score": 346418.995189075 }, { "content": "/// Gets the value of `TIKV_BENCH_LEVEL`. The larger value it is, the more comprehensive benchmarks\n\n/// will be.\n\npub fn bench_level() -> usize {\n\n if let Ok(s) = std::env::var(\"TIKV_BENCH_LEVEL\") {\n\n s.parse::<usize>().unwrap()\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "tests/benches/coprocessor_executors/util/mod.rs", "rank": 7, "score": 346011.7767523279 }, { "content": "/// A simple helper function to build the DAG handler.\n\npub fn build_dag_handler<TargetTxnStore: TxnStore + 'static>(\n\n executors: &[PbExecutor],\n\n ranges: &[KeyRange],\n\n store: &Store<RocksEngine>,\n\n) -> Box<dyn RequestHandler> {\n\n use tipb::DagRequest;\n\n\n\n let mut dag = DagRequest::default();\n\n dag.set_executors(executors.to_vec().into());\n\n\n\n tikv::coprocessor::dag::DagHandlerBuilder::new(\n\n black_box(dag),\n\n black_box(ranges.to_vec()),\n\n black_box(ToTxnStore::<TargetTxnStore>::to_store(store)),\n\n tikv_util::deadline::Deadline::from_now(std::time::Duration::from_secs(10)),\n\n 64,\n\n false,\n\n false,\n\n )\n\n .build()\n", "file_path": "tests/benches/coprocessor_executors/util/mod.rs", "rank": 8, "score": 343306.37623683567 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![];\n\n\n\n let mut rows_options = vec![5000];\n\n if crate::util::bench_level() >= 1 {\n\n rows_options.push(5);\n\n }\n\n if crate::util::bench_level() >= 2 {\n\n rows_options.push(1);\n\n }\n\n let bencher_options: Vec<Box<dyn util::TopNBencher<M>>> = vec![Box::new(util::BatchBencher)];\n\n\n\n for rows in &rows_options {\n\n for bencher in &bencher_options {\n\n inputs.push(Input {\n\n src_rows: *rows,\n\n bencher: bencher.box_clone(),\n", "file_path": "tests/benches/coprocessor_executors/top_n/mod.rs", "rank": 9, "score": 341143.2192473582 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![];\n\n\n\n let mut rows_options = vec![5000];\n\n if crate::util::bench_level() >= 1 {\n\n rows_options.push(5);\n\n }\n\n if crate::util::bench_level() >= 2 {\n\n rows_options.push(1);\n\n }\n\n let bencher_options: Vec<Box<dyn util::SelectionBencher<M>>> =\n\n vec![Box::new(util::BatchBencher)];\n\n\n\n for rows in &rows_options {\n\n for bencher in &bencher_options {\n\n inputs.push(Input {\n\n src_rows: *rows,\n", "file_path": "tests/benches/coprocessor_executors/selection/mod.rs", "rank": 10, "score": 341143.21924735815 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![];\n\n\n\n let mut rows_options = vec![5000];\n\n if crate::util::bench_level() >= 1 {\n\n rows_options.push(5);\n\n }\n\n if crate::util::bench_level() >= 2 {\n\n rows_options.push(1);\n\n }\n\n let mut bencher_options: Vec<Box<dyn util::IntegratedBencher<M>>> = vec![\n\n Box::new(util::DAGBencher::<RocksStore>::new(false)),\n\n Box::new(util::DAGBencher::<RocksStore>::new(true)),\n\n ];\n\n if crate::util::bench_level() >= 2 {\n\n let mut additional_inputs: Vec<Box<dyn util::IntegratedBencher<M>>> = vec![\n\n Box::new(util::BatchBencher::<MemStore>::new()),\n", "file_path": "tests/benches/coprocessor_executors/integrated/mod.rs", "rank": 11, "score": 341143.21924735815 }, { "content": "/// Writes log message to decorator. See [log-message](https://github.com/tikv/rfcs/blob/master/text/2018-12-19-unified-log-format.md#log-message-section)\n\nfn write_log_msg(decorator: &mut dyn RecordDecorator, record: &Record<'_>) -> io::Result<()> {\n\n decorator.start_whitespace()?;\n\n write!(decorator, \" \")?;\n\n\n\n decorator.start_msg()?;\n\n write!(decorator, \"[\")?;\n\n let msg = format!(\"{}\", record.msg());\n\n formatter::write_escaped_str(decorator, &msg)?;\n\n write!(decorator, \"]\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "components/tikv_util/src/logger/mod.rs", "rank": 12, "score": 336663.288518501 }, { "content": "/// Writes log header to decorator. See [log-header](https://github.com/tikv/rfcs/blob/master/text/2018-12-19-unified-log-format.md#log-header-section)\n\nfn write_log_header(decorator: &mut dyn RecordDecorator, record: &Record<'_>) -> io::Result<()> {\n\n decorator.start_timestamp()?;\n\n write!(\n\n decorator,\n\n \"[{}]\",\n\n chrono::Local::now().format(TIMESTAMP_FORMAT)\n\n )?;\n\n\n\n decorator.start_whitespace()?;\n\n write!(decorator, \" \")?;\n\n\n\n decorator.start_level()?;\n\n write!(decorator, \"[{}]\", get_unified_log_level(record.level()))?;\n\n\n\n decorator.start_whitespace()?;\n\n write!(decorator, \" \")?;\n\n\n\n // Writes source file info.\n\n decorator.start_msg()?; // There is no `start_file()` or `start_line()`.\n\n if let Some(path) = Path::new(record.file())\n", "file_path": "components/tikv_util/src/logger/mod.rs", "rank": 13, "score": 336663.288518501 }, { "content": "#[bench]\n\nfn bench_async_write(b: &mut test::Bencher) {\n\n let leader = util::new_peer(2, 3);\n\n let mut region = Region::default();\n\n region.set_id(1);\n\n region.set_start_key(vec![]);\n\n region.set_end_key(vec![]);\n\n region.mut_peers().push(leader.clone());\n\n region.mut_region_epoch().set_version(2);\n\n region.mut_region_epoch().set_conf_ver(5);\n\n let (_tmp, db) = new_engine();\n\n let kv = RaftKv::new(\n\n SyncBenchRouter::new(region.clone(), db.clone()),\n\n RocksEngine::from_db(db),\n\n );\n\n\n\n let mut ctx = Context::default();\n\n ctx.set_region_id(region.get_id());\n\n ctx.set_region_epoch(region.get_region_epoch().clone());\n\n ctx.set_peer(leader);\n\n b.iter(|| {\n", "file_path": "tests/benches/misc/raftkv/mod.rs", "rank": 14, "score": 336079.1372280927 }, { "content": "#[bench]\n\nfn bench_async_snapshot(b: &mut test::Bencher) {\n\n let leader = util::new_peer(2, 3);\n\n let mut region = Region::default();\n\n region.set_id(1);\n\n region.set_start_key(vec![]);\n\n region.set_end_key(vec![]);\n\n region.mut_peers().push(leader.clone());\n\n region.mut_region_epoch().set_version(2);\n\n region.mut_region_epoch().set_conf_ver(5);\n\n let (_tmp, db) = new_engine();\n\n let kv = RaftKv::new(\n\n SyncBenchRouter::new(region.clone(), db.clone()),\n\n RocksEngine::from_db(db),\n\n );\n\n\n\n let mut ctx = Context::default();\n\n ctx.set_region_id(region.get_id());\n\n ctx.set_region_epoch(region.get_region_epoch().clone());\n\n ctx.set_peer(leader);\n\n b.iter(|| {\n", "file_path": "tests/benches/misc/raftkv/mod.rs", "rank": 15, "score": 336079.1372280927 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![\n\n Input::new(util::BatchTableScanNext1024Bencher::<MemStore>::new()),\n\n Input::new(util::TableScanDAGBencher::<RocksStore>::new(false, ROWS)),\n\n Input::new(util::TableScanDAGBencher::<RocksStore>::new(true, ROWS)),\n\n ];\n\n if crate::util::bench_level() >= 2 {\n\n let mut additional_inputs = vec![\n\n Input::new(util::BatchTableScanNext1024Bencher::<RocksStore>::new()),\n\n Input::new(util::TableScanDAGBencher::<MemStore>::new(false, ROWS)),\n\n Input::new(util::TableScanDAGBencher::<MemStore>::new(true, ROWS)),\n\n ];\n\n inputs.append(&mut additional_inputs);\n\n }\n\n\n\n let mut cases = vec![\n\n BenchCase::new(\"table_scan_primary_key\", bench_table_scan_primary_key),\n", "file_path": "tests/benches/coprocessor_executors/table_scan/mod.rs", "rank": 16, "score": 335682.1449879195 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![];\n\n\n\n let mut rows_options = vec![5000];\n\n if crate::util::bench_level() >= 1 {\n\n rows_options.push(5);\n\n }\n\n if crate::util::bench_level() >= 2 {\n\n rows_options.push(1);\n\n }\n\n let bencher_options: Vec<Box<dyn util::HashAggrBencher<M>>> =\n\n vec![Box::new(util::BatchBencher)];\n\n\n\n for rows in &rows_options {\n\n for bencher in &bencher_options {\n\n inputs.push(Input {\n\n src_rows: *rows,\n", "file_path": "tests/benches/coprocessor_executors/hash_aggr/mod.rs", "rank": 17, "score": 335682.1449879195 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![\n\n Input::new(util::BatchIndexScanNext1024Bencher::<MemStore>::new()),\n\n Input::new(util::IndexScanDAGBencher::<RocksStore>::new(false, ROWS)),\n\n Input::new(util::IndexScanDAGBencher::<RocksStore>::new(true, ROWS)),\n\n ];\n\n if crate::util::bench_level() >= 2 {\n\n let mut additional_inputs = vec![\n\n Input::new(util::BatchIndexScanNext1024Bencher::<RocksStore>::new()),\n\n Input::new(util::IndexScanDAGBencher::<MemStore>::new(false, ROWS)),\n\n Input::new(util::IndexScanDAGBencher::<MemStore>::new(true, ROWS)),\n\n ];\n\n inputs.append(&mut additional_inputs);\n\n }\n\n\n\n let mut cases = vec![\n\n BenchCase::new(\"index_scan_primary_key\", bench_index_scan_primary_key),\n", "file_path": "tests/benches/coprocessor_executors/index_scan/mod.rs", "rank": 18, "score": 335682.1449879195 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![];\n\n\n\n let mut rows_options = vec![5000];\n\n if crate::util::bench_level() >= 1 {\n\n rows_options.push(5);\n\n }\n\n if crate::util::bench_level() >= 2 {\n\n rows_options.push(1);\n\n }\n\n let bencher_options: Vec<Box<dyn util::StreamAggrBencher<M>>> =\n\n vec![Box::new(util::BatchBencher)];\n\n\n\n for rows in &rows_options {\n\n for bencher in &bencher_options {\n\n inputs.push(Input {\n\n src_rows: *rows,\n", "file_path": "tests/benches/coprocessor_executors/stream_aggr/mod.rs", "rank": 19, "score": 335682.1449879195 }, { "content": "pub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n let mut inputs = vec![];\n\n\n\n let mut rows_options = vec![5000];\n\n if crate::util::bench_level() >= 1 {\n\n rows_options.push(5);\n\n }\n\n if crate::util::bench_level() >= 2 {\n\n rows_options.push(1);\n\n }\n\n let bencher_options: Vec<Box<dyn util::SimpleAggrBencher<M>>> =\n\n vec![Box::new(util::BatchBencher)];\n\n\n\n for rows in &rows_options {\n\n for bencher in &bencher_options {\n\n inputs.push(Input {\n\n src_rows: *rows,\n", "file_path": "tests/benches/coprocessor_executors/simple_aggr/mod.rs", "rank": 20, "score": 335682.1449879195 }, { "content": "pub fn test_rollback() {\n\n let (cluster, client, ctx) = must_new_and_configure_cluster_and_kv_client(|cluster| {\n\n cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(50);\n\n });\n\n let key = b\"key2\".to_vec();\n\n let store_id = 1;\n\n put(&cluster, &client, &ctx, store_id, key.clone());\n\n let start_ts = block_on(cluster.pd_client.get_tso()).unwrap();\n\n\n\n let mut rollback_req = BatchRollbackRequest::default();\n\n rollback_req.set_context(ctx.clone());\n\n rollback_req.start_version = start_ts.into_inner();\n\n rollback_req.set_keys(vec![key].into_iter().collect());\n\n let rollback_resp = client.kv_batch_rollback(&rollback_req).unwrap();\n\n assert!(\n\n !rollback_resp.has_region_error(),\n\n \"{:?}\",\n\n rollback_resp.get_region_error()\n\n );\n\n assert!(\n", "file_path": "tests/integrations/raftstore/test_stats.rs", "rank": 21, "score": 332228.2368281018 }, { "content": "#[bench]\n\nfn bench_async_snapshots_noop(b: &mut test::Bencher) {\n\n let (_dir, db) = new_engine();\n\n let snapshot = RocksSnapshot::new(Arc::clone(&db));\n\n let resp = ReadResponse {\n\n response: RaftCmdResponse::default(),\n\n snapshot: Some(RegionSnapshot::from_snapshot(\n\n Arc::new(snapshot),\n\n Arc::new(Region::default()),\n\n )),\n\n txn_extra_op: TxnExtraOp::Noop,\n\n };\n\n\n\n b.iter(|| {\n\n let cb1: EngineCallback<RegionSnapshot<RocksSnapshot>> = Box::new(\n\n move |(_, res): (CbContext, EngineResult<RegionSnapshot<RocksSnapshot>>)| {\n\n assert!(res.is_ok());\n\n },\n\n );\n\n let cb2: EngineCallback<CmdRes<RocksSnapshot>> = Box::new(\n\n move |(ctx, res): (CbContext, EngineResult<CmdRes<RocksSnapshot>>)| {\n", "file_path": "tests/benches/misc/raftkv/mod.rs", "rank": 22, "score": 331455.8732759777 }, { "content": "pub fn case_receiver_shutdown(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n test_suite.cfg_enabled(true);\n\n test_suite.cfg_max_resource_groups(5);\n\n test_suite.cfg_receiver_address(format!(\"127.0.0.1:{}\", port));\n\n\n\n // Workload\n\n // [req-{1..5} * 10, req-{6..10} * 1]\n\n let mut wl = iter::repeat(1..=5)\n\n .take(10)\n\n .flatten()\n\n .chain(6..=10)\n\n .map(|n| format!(\"req-{}\", n))\n\n .collect::<Vec<_>>();\n\n wl.shuffle(&mut rand::thread_rng());\n\n test_suite.setup_workload(wl);\n\n\n\n // | Receiver Alive |\n", "file_path": "tests/integrations/resource_metering/test_receiver.rs", "rank": 23, "score": 330150.3795590665 }, { "content": "pub fn case_precision(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n test_suite.cfg_report_receiver_interval(\"10s\");\n\n test_suite.cfg_enabled(true);\n\n test_suite.cfg_receiver_address(format!(\"127.0.0.1:{}\", port));\n\n\n\n // Workload\n\n // [req-1]\n\n test_suite.setup_workload(vec![\"req-1\"]);\n\n\n\n // | Precision |\n\n // | 1s |\n\n sleep(test_suite.get_current_cfg().report_receiver_interval.0 + ONE_SEC);\n\n let res = test_suite.fetch_reported_cpu_time();\n\n let (secs, _) = res.get(\"req-1\").unwrap();\n\n for (l, r) in secs.iter().zip({\n\n let mut next_secs = secs.iter();\n\n next_secs.next();\n", "file_path": "tests/integrations/resource_metering/test_dynamic_config.rs", "rank": 24, "score": 330150.3795590665 }, { "content": "pub fn case_receiver_blocking(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n test_suite.cfg_enabled(true);\n\n test_suite.cfg_max_resource_groups(5);\n\n test_suite.cfg_receiver_address(format!(\"127.0.0.1:{}\", port));\n\n\n\n // Workload\n\n // [req-{1..5} * 10, req-{6..10} * 1]\n\n let mut wl = iter::repeat(1..=5)\n\n .take(10)\n\n .flatten()\n\n .chain(6..=10)\n\n .map(|n| format!(\"req-{}\", n))\n\n .collect::<Vec<_>>();\n\n wl.shuffle(&mut rand::thread_rng());\n\n test_suite.setup_workload(wl);\n\n\n\n // | Block Receiver |\n", "file_path": "tests/integrations/resource_metering/test_receiver.rs", "rank": 25, "score": 330150.3795590665 }, { "content": "pub fn case_enable(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n\n\n // Workload\n\n // [req-1, req-2]\n\n test_suite.setup_workload(vec![\"req-1\", \"req-2\"]);\n\n\n\n // | Address | Enabled |\n\n // | x | o |\n\n test_suite.cfg_enabled(true);\n\n sleep(test_suite.get_current_cfg().report_receiver_interval.0 + ONE_SEC);\n\n assert!(test_suite.fetch_reported_cpu_time().is_empty());\n\n\n\n // Workload\n\n // []\n\n test_suite.cancel_workload();\n\n\n\n // | Address | Enabled |\n", "file_path": "tests/integrations/resource_metering/test_dynamic_config.rs", "rank": 26, "score": 330150.3795590665 }, { "content": "#[bench]\n\nfn bench_record_prefix_start_with(b: &mut Bencher) {\n\n let key: &[u8] = b\"_rabc\";\n\n b.iter(|| {\n\n let n = black_box(1000);\n\n (0..n).all(|_| black_box(key.starts_with(RECORD_PREFIX_SEP)))\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/coprocessor/codec/mod.rs", "rank": 27, "score": 330060.16126065404 }, { "content": "fn test_region_info_accessor_impl(cluster: &mut Cluster<NodeCluster>, c: &RegionInfoAccessor) {\n\n for i in 0..9 {\n\n let k = format!(\"k{}\", i).into_bytes();\n\n let v = format!(\"v{}\", i).into_bytes();\n\n cluster.must_put(&k, &v);\n\n }\n\n\n\n let pd_client = Arc::clone(&cluster.pd_client);\n\n\n\n let init_regions = dump(c);\n\n check_region_ranges(&init_regions, &[(&b\"\"[..], &b\"\"[..])]);\n\n assert_eq!(init_regions[0].0, cluster.get_region(b\"k1\"));\n\n\n\n // Split\n\n {\n\n let r1 = cluster.get_region(b\"k1\");\n\n cluster.must_split(&r1, b\"k1\");\n\n let r2 = cluster.get_region(b\"k4\");\n\n cluster.must_split(&r2, b\"k4\");\n\n let r3 = cluster.get_region(b\"k2\");\n", "file_path": "tests/integrations/raftstore/test_region_info_accessor.rs", "rank": 28, "score": 328730.44684814196 }, { "content": "pub fn case_alter_receiver_addr(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n test_suite.cfg_enabled(true);\n\n test_suite.cfg_max_resource_groups(5);\n\n\n\n // Workload\n\n // [req-{1..5} * 10, req-{6..10} * 1]\n\n let mut wl = iter::repeat(1..=5)\n\n .take(10)\n\n .flatten()\n\n .chain(6..=10)\n\n .map(|n| format!(\"req-{}\", n))\n\n .collect::<Vec<_>>();\n\n wl.shuffle(&mut rand::thread_rng());\n\n test_suite.setup_workload(wl);\n\n\n\n // | Address | Enabled |\n\n // | x | o |\n", "file_path": "tests/integrations/resource_metering/test_receiver.rs", "rank": 29, "score": 326474.90077217575 }, { "content": "pub fn case_report_interval(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n test_suite.cfg_enabled(true);\n\n test_suite.cfg_receiver_address(format!(\"127.0.0.1:{}\", port));\n\n\n\n // Workload\n\n // [req-1, req-2]\n\n test_suite.setup_workload(vec![\"req-1\", \"req-2\"]);\n\n\n\n // | Report Interval |\n\n // | 15s |\n\n test_suite.cfg_report_receiver_interval(\"15s\");\n\n test_suite.flush_receiver();\n\n\n\n sleep(Duration::from_secs(5));\n\n assert!(test_suite.fetch_reported_cpu_time().is_empty());\n\n sleep(Duration::from_secs(5));\n\n assert!(test_suite.fetch_reported_cpu_time().is_empty());\n", "file_path": "tests/integrations/resource_metering/test_dynamic_config.rs", "rank": 30, "score": 326474.90077217575 }, { "content": "pub fn new_server_cluster(id: u64, count: usize) -> Cluster<ServerCluster> {\n\n let pd_client = Arc::new(TestPdClient::new(id, false));\n\n let sim = Arc::new(RwLock::new(ServerCluster::new(Arc::clone(&pd_client))));\n\n Cluster::new(id, count, sim, pd_client)\n\n}\n\n\n", "file_path": "components/test_raftstore/src/server.rs", "rank": 31, "score": 326460.49461396056 }, { "content": "pub fn new_node_cluster(id: u64, count: usize) -> Cluster<NodeCluster> {\n\n let pd_client = Arc::new(TestPdClient::new(id, false));\n\n let sim = Arc::new(RwLock::new(NodeCluster::new(Arc::clone(&pd_client))));\n\n Cluster::new(id, count, sim, pd_client)\n\n}\n\n\n", "file_path": "components/test_raftstore/src/node.rs", "rank": 32, "score": 326460.49461396056 }, { "content": "#[bench]\n\nfn bench_record_prefix_littleendian_check(b: &mut Bencher) {\n\n let key: &[u8] = b\"_rabc\";\n\n let prefix: u16 = LittleEndian::read_u16(RECORD_PREFIX_SEP);\n\n b.iter(|| {\n\n let n = black_box(1000);\n\n (0..n).all(|_| black_box(key.len() > 2 && LittleEndian::read_u16(key) == prefix))\n\n });\n\n}\n", "file_path": "tests/benches/misc/coprocessor/codec/mod.rs", "rank": 33, "score": 324335.4757464562 }, { "content": "#[bench]\n\nfn bench_record_prefix_equal_check(b: &mut Bencher) {\n\n let key: &[u8] = b\"_rabc\";\n\n b.iter(|| {\n\n let n = black_box(1000);\n\n (0..n).all(|_| {\n\n black_box(\n\n key.len() > 2 && key[0] == RECORD_PREFIX_SEP[0] && key[1] == RECORD_PREFIX_SEP[1],\n\n )\n\n })\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/coprocessor/codec/mod.rs", "rank": 34, "score": 324335.4757464562 }, { "content": "#[bench]\n\nfn bench_record_prefix_bigendian_check(b: &mut Bencher) {\n\n let key: &[u8] = b\"_rabc\";\n\n let prefix: u16 = BigEndian::read_u16(RECORD_PREFIX_SEP);\n\n b.iter(|| {\n\n let n = black_box(1000);\n\n (0..n).all(|_| black_box(key.len() > 2 && BigEndian::read_u16(key) == prefix))\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/coprocessor/codec/mod.rs", "rank": 35, "score": 324335.4757464562 }, { "content": "#[bench]\n\nfn bench_get_txn_commit_record_100(c: &mut test::Bencher) {\n\n bench_get_txn_commit_record(c, 100);\n\n}\n\n\n", "file_path": "tests/benches/misc/storage/mvcc_reader.rs", "rank": 36, "score": 323241.46433459676 }, { "content": "#[bench]\n\nfn bench_get_txn_commit_record_5(c: &mut test::Bencher) {\n\n bench_get_txn_commit_record(c, 5);\n\n}\n", "file_path": "tests/benches/misc/storage/mvcc_reader.rs", "rank": 37, "score": 323241.4643345968 }, { "content": "pub fn case_max_resource_groups(test_suite: &mut TestSuite) {\n\n test_suite.reset();\n\n let port = alloc_port();\n\n test_suite.start_receiver_at(port);\n\n test_suite.cfg_enabled(true);\n\n test_suite.cfg_receiver_address(format!(\"127.0.0.1:{}\", port));\n\n\n\n // Workload\n\n // [req-{1..3} * 10, req-{4..5} * 1]\n\n let mut wl = iter::repeat(1..=3)\n\n .take(10)\n\n .flatten()\n\n .chain(4..=5)\n\n .map(|n| format!(\"req-{}\", n))\n\n .collect::<Vec<_>>();\n\n wl.shuffle(&mut rand::thread_rng());\n\n test_suite.setup_workload(wl);\n\n\n\n // | Max Resource Groups |\n\n // | 5000 |\n", "file_path": "tests/integrations/resource_metering/test_dynamic_config.rs", "rank": 38, "score": 322937.2038631118 }, { "content": "type BenchFn<M, I> = Box<dyn Fn(&mut criterion::Bencher<M>, &I) + 'static>;\n\n\n", "file_path": "tests/benches/coprocessor_executors/util/mod.rs", "rank": 39, "score": 322321.54639155563 }, { "content": "pub fn init() {\n\n INIT.call_once(test_util::setup_for_ci);\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ClientReceiver {\n\n receiver: Arc<Mutex<Option<ClientDuplexReceiver<ChangeDataEvent>>>>,\n\n}\n\n\n\nimpl ClientReceiver {\n\n pub fn replace(\n\n &self,\n\n rx: Option<ClientDuplexReceiver<ChangeDataEvent>>,\n\n ) -> Option<ClientDuplexReceiver<ChangeDataEvent>> {\n\n std::mem::replace(&mut *self.receiver.lock().unwrap(), rx)\n\n }\n\n}\n\n\n", "file_path": "components/cdc/tests/mod.rs", "rank": 40, "score": 321809.07625195873 }, { "content": "pub fn new_incompatible_node_cluster(id: u64, count: usize) -> Cluster<NodeCluster> {\n\n let pd_client = Arc::new(TestPdClient::new(id, true));\n\n let sim = Arc::new(RwLock::new(NodeCluster::new(Arc::clone(&pd_client))));\n\n Cluster::new(id, count, sim, pd_client)\n\n}\n", "file_path": "components/test_raftstore/src/node.rs", "rank": 41, "score": 321660.5211035974 }, { "content": "pub fn new_incompatible_server_cluster(id: u64, count: usize) -> Cluster<ServerCluster> {\n\n let pd_client = Arc::new(TestPdClient::new(id, true));\n\n let sim = Arc::new(RwLock::new(ServerCluster::new(Arc::clone(&pd_client))));\n\n Cluster::new(id, count, sim, pd_client)\n\n}\n\n\n", "file_path": "components/test_raftstore/src/server.rs", "rank": 42, "score": 321660.5211035974 }, { "content": "/// Creates a callback that is automatically called on drop if it's not called\n\n/// explicitly.\n\n///\n\n/// Note that leaking the callback can cause it to be never called but it\n\n/// rarely happens.\n\n///\n\n/// Also note that because `callback` and `arg_on_drop` may be called in the `drop`\n\n/// method, do not panic inside them or use `safe_panic` instead.\n\npub fn must_call<T: Send + 'static>(\n\n callback: impl FnOnce(T) + Send + 'static,\n\n arg_on_drop: impl FnOnce() -> T + Send + 'static,\n\n) -> Callback<T> {\n\n let mut must_call = MustCall {\n\n callback: Some(callback),\n\n arg_on_drop: Some(arg_on_drop),\n\n _phantom: PhantomData,\n\n };\n\n Box::new(move |arg: T| {\n\n let callback = must_call.callback.take().unwrap();\n\n callback(arg);\n\n })\n\n}\n\n\n\npub struct MustCall<T, C, A>\n\nwhere\n\n C: FnOnce(T),\n\n A: FnOnce() -> T,\n\n{\n", "file_path": "components/tikv_util/src/callback.rs", "rank": 43, "score": 320588.0573019332 }, { "content": "fn execute<M: criterion::measurement::Measurement + 'static>(c: &mut criterion::Criterion<M>) {\n\n util::fixture::bench(c);\n\n table_scan::bench(c);\n\n index_scan::bench(c);\n\n selection::bench(c);\n\n simple_aggr::bench(c);\n\n hash_aggr::bench(c);\n\n stream_aggr::bench(c);\n\n top_n::bench(c);\n\n integrated::bench(c);\n\n\n\n c.final_summary();\n\n}\n\n\n", "file_path": "tests/benches/coprocessor_executors/mod.rs", "rank": 44, "score": 319375.98837559967 }, { "content": "/// system_info collects system related information, e.g: kernel\n\npub fn system_info(collector: &mut Vec<ServerInfoItem>) {\n\n // sysctl\n\n let sysctl = get_sysctl_list();\n\n let mut pairs = vec![];\n\n for (key, val) in sysctl.into_iter() {\n\n let mut pair = ServerInfoPair::default();\n\n pair.set_key(key);\n\n pair.set_value(val);\n\n pairs.push(pair);\n\n }\n\n // Sort pairs by key to make result stable\n\n pairs.sort_by(|a, b| a.get_key().cmp(b.get_key()));\n\n let mut item = ServerInfoItem::default();\n\n item.set_tp(\"system\".to_string());\n\n item.set_name(\"sysctl\".to_string());\n\n item.set_pairs(pairs.into());\n\n collector.push(item);\n\n if let Some(item) = get_transparent_hugepage() {\n\n collector.push(item);\n\n }\n\n}\n\n\n", "file_path": "src/server/service/diagnostics/sys.rs", "rank": 45, "score": 316098.7753902153 }, { "content": "/// hardware_info collects CPU/Memory/Network/Disk hardware information\n\npub fn hardware_info(collector: &mut Vec<ServerInfoItem>) {\n\n cpu_hardware_info(collector);\n\n mem_hardware_info(collector);\n\n disk_hardware_info(collector);\n\n nic_hardware_info(collector);\n\n}\n\n\n", "file_path": "src/server/service/diagnostics/sys.rs", "rank": 46, "score": 316098.6401696672 }, { "content": "#[allow(dead_code)]\n\npub fn process_info(collector: &mut Vec<ServerInfoItem>) {\n\n let mut system = SYS_INFO.lock().unwrap();\n\n system.refresh_processes();\n\n let processes = system.get_processes();\n\n for (pid, p) in processes.iter() {\n\n if p.cmd().is_empty() {\n\n continue;\n\n }\n\n let mut pairs = vec![];\n\n let infos = vec![\n\n (\"executable\", format!(\"{:?}\", p.exe())),\n\n (\"cmd\", p.cmd().join(\" \")),\n\n (\"cwd\", format!(\"{:?}\", p.cwd())),\n\n (\"start-time\", p.start_time().to_string()),\n\n (\"memory\", p.memory().to_string()),\n\n (\"status\", p.status().to_string().to_owned()),\n\n (\"cpu-usage\", p.cpu_usage().to_string()),\n\n ];\n\n for (key, val) in infos.into_iter() {\n\n let mut pair = ServerInfoPair::default();\n", "file_path": "src/server/service/diagnostics/sys.rs", "rank": 47, "score": 316091.894835147 }, { "content": "pub fn init() {\n\n INIT.call_once(test_util::setup_for_ci);\n\n}\n\n\n\npub struct TestSuite {\n\n pub cluster: Cluster<ServerCluster>,\n\n pub endpoints: HashMap<u64, LazyWorker<Task<RocksSnapshot>>>,\n\n pub obs: HashMap<u64, Observer<RocksEngine>>,\n\n tikv_cli: HashMap<u64, TikvClient>,\n\n concurrency_managers: HashMap<u64, ConcurrencyManager>,\n\n\n\n env: Arc<Environment>,\n\n}\n\n\n\nimpl TestSuite {\n\n pub fn new(count: usize) -> Self {\n\n let mut cluster = new_server_cluster(1, count);\n\n // Increase the Raft tick interval to make this test case running reliably.\n\n configure_for_lease_read(&mut cluster, Some(100), None);\n\n Self::with_cluster(count, cluster)\n", "file_path": "components/resolved_ts/tests/mod.rs", "rank": 48, "score": 315656.5843919154 }, { "content": "#[rpn_fn(nullable, varg, min_args = 2, capture = [ctx])]\n\n#[inline]\n\npub fn least_time(mut ctx: &mut EvalContext, args: &[Option<BytesRef>]) -> Result<Option<Bytes>> {\n\n // Max datetime range defined at https://dev.mysql.com/doc/refman/8.0/en/datetime.html\n\n let mut least = Some(Time::parse_datetime(\n\n &mut ctx,\n\n \"9999-12-31 23:59:59\",\n\n 0,\n\n true,\n\n )?);\n\n for arg in args {\n\n match arg {\n\n Some(arg_val) => {\n\n let s = match str::from_utf8(arg_val) {\n\n Ok(s) => s,\n\n Err(err) => {\n\n return ctx\n\n .handle_invalid_time_error(Error::Encoding(err))\n\n .map(|_| Ok(None))?;\n\n }\n\n };\n\n match Time::parse_datetime(ctx, s, Time::parse_fsp(s), true) {\n", "file_path": "components/tidb_query_expr/src/impl_compare.rs", "rank": 49, "score": 314595.2569199774 }, { "content": "pub fn calc_data_crc32(data: &[u8]) -> u32 {\n\n let mut digest = crc32fast::Hasher::new();\n\n digest.update(data);\n\n digest.finalize()\n\n}\n\n\n", "file_path": "components/test_sst_importer/src/lib.rs", "rank": 50, "score": 310104.2533028495 }, { "content": "fn bench_writebatch_impl(b: &mut Bencher, batch_keys: usize) {\n\n let path = Builder::new()\n\n .prefix(\"/tmp/rocksdb_write_batch_bench\")\n\n .tempdir()\n\n .unwrap();\n\n let db = Arc::new(DB::open_default(path.path().to_str().unwrap()).unwrap());\n\n let key_count = 1 << 13;\n\n let round = key_count / batch_keys;\n\n b.iter(|| {\n\n writebatch(&db, round, batch_keys);\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/writebatch/bench_writebatch.rs", "rank": 51, "score": 309953.5903375172 }, { "content": "#[allow(clippy::type_complexity)]\n\npub fn new_event_feed(\n\n client: &ChangeDataClient,\n\n) -> (\n\n ClientDuplexSender<ChangeDataRequest>,\n\n ClientReceiver,\n\n Box<dyn Fn(bool) -> ChangeDataEvent + Send>,\n\n) {\n\n let (req_tx, resp_rx) = client.event_feed().unwrap();\n\n let event_feed_wrap = Arc::new(Mutex::new(Some(resp_rx)));\n\n let event_feed_wrap_clone = event_feed_wrap.clone();\n\n\n\n let receive_event = move |keep_resolved_ts: bool| loop {\n\n let mut events;\n\n {\n\n let mut event_feed = event_feed_wrap_clone.lock().unwrap();\n\n events = event_feed.take();\n\n }\n\n let mut events_rx = if let Some(events_rx) = events.as_mut() {\n\n events_rx\n\n } else {\n", "file_path": "components/cdc/tests/mod.rs", "rank": 52, "score": 309821.91905239783 }, { "content": "/// `add` adds a and b and carry, stores the sum and new carry.\n\nfn add(a: u32, b: u32, carry: &mut u32, res: &mut u32) {\n\n let sum = a + b + *carry;\n\n if sum >= WORD_BASE {\n\n *res = sum - WORD_BASE;\n\n *carry = 1;\n\n } else {\n\n *res = sum;\n\n *carry = 0;\n\n }\n\n}\n\n\n", "file_path": "components/tidb_query_datatype/src/codec/mysql/decimal.rs", "rank": 53, "score": 308882.2735776366 }, { "content": "fn bench_get_txn_commit_record(b: &mut test::Bencher, n: u64) {\n\n let key = Key::from_raw(&table::encode_row_key(1, 0));\n\n let store = prepare_mvcc_data(&key, n);\n\n b.iter(|| {\n\n let mut mvcc_reader = SnapshotReader::new(\n\n 1.into(),\n\n store.get_engine().snapshot(Default::default()).unwrap(),\n\n true,\n\n );\n\n mvcc_reader\n\n .get_txn_commit_record(&key)\n\n .unwrap()\n\n .unwrap_single_record();\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/storage/mvcc_reader.rs", "rank": 54, "score": 306859.9865887629 }, { "content": "/// Round each component.\n\n/// ```ignore\n\n/// let mut parts = [2019, 12, 1, 23, 59, 59, 1000000];\n\n/// round_components(&mut parts);\n\n/// assert_eq!([2019, 12, 2, 0, 0, 0, 0], parts);\n\n/// ```\n\n/// When year, month or day is zero, there can not have a carry.\n\n/// e.g.: `\"1998-11-00 23:59:59.999\" (fsp = 2, round = true)`, in `hms` it contains a carry,\n\n/// however, the `day` is 0, which is invalid in `MySQL`. When thoese cases encountered, return\n\n/// None.\n\nfn round_components(parts: &mut [u32]) -> Option<()> {\n\n debug_assert_eq!(parts.len(), 7);\n\n let modulus = [\n\n std::u32::MAX,\n\n 12,\n\n last_day_of_month(parts[0], parts[1]),\n\n // hms[.fraction]\n\n 24,\n\n 60,\n\n 60,\n\n 1_000_000,\n\n ];\n\n for i in (1..=6).rev() {\n\n let is_ymd = u32::from(i < 3);\n\n if parts[i] >= modulus[i] + is_ymd {\n\n parts[i] -= modulus[i];\n\n if i < 4 && parts[i - 1] == 0 || parts[i - 1] > modulus[i - 1] {\n\n return None;\n\n }\n\n parts[i - 1] += 1;\n\n }\n\n }\n\n Some(())\n\n}\n\n\n", "file_path": "components/tidb_query_datatype/src/codec/mysql/time/mod.rs", "rank": 55, "score": 303658.89264667383 }, { "content": "#[inline]\n\npub fn start(_name: impl AsRef<str>) -> bool {\n\n // Do nothing\n\n false\n\n}\n\n\n\n/// Stop profiling. Always returns false if `profiling` feature is not enabled.\n", "file_path": "components/profiler/src/profiler_dummy.rs", "rank": 56, "score": 303142.670867526 }, { "content": "#[inline]\n\npub fn start(name: impl AsRef<str>) -> bool {\n\n let mut profiler = ACTIVE_PROFILER.lock().unwrap();\n\n\n\n // Profiling in progress.\n\n if *profiler != Profiler::None {\n\n return false;\n\n }\n\n\n\n if valgrind_request::running_on_valgrind() != 0 {\n\n *profiler = Profiler::CallGrind;\n\n CallgrindClientRequest::start();\n\n } else {\n\n *profiler = Profiler::GPerfTools;\n\n gperftools::PROFILER\n\n .lock()\n\n .unwrap()\n\n .start(name.as_ref())\n\n .unwrap();\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Stop profiling. Returns false if failed, i.e. there is no profiling in progress.\n\n///\n\n/// When `profiling` feature is not enabled, this function will do nothing and there is totally\n\n/// zero cost.\n", "file_path": "components/profiler/src/profiler_unix.rs", "rank": 57, "score": 303142.670867526 }, { "content": "#[inline]\n\npub fn read_slice<'a>(data: &mut BytesSlice<'a>, size: usize) -> Result<BytesSlice<'a>> {\n\n if data.len() >= size {\n\n let buf = &data[0..size];\n\n *data = &data[size..];\n\n Ok(buf)\n\n } else {\n\n Err(Error::unexpected_eof())\n\n }\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n #[error(\"{0}\")]\n\n Io(#[from] io::Error),\n\n #[error(\"bad format key(length)\")]\n\n KeyLength,\n\n #[error(\"bad format key(padding)\")]\n\n KeyPadding,\n\n #[error(\"key not found\")]\n\n KeyNotFound,\n", "file_path": "components/tikv_util/src/codec/mod.rs", "rank": 58, "score": 300796.20176606777 }, { "content": "pub fn cpu_total(state: &pid::Stat) -> f64 {\n\n (state.utime + state.stime) as f64 / *CLK_TCK\n\n}\n\n\n", "file_path": "components/tikv_util/src/metrics/threads_linux.rs", "rank": 59, "score": 300381.2044091511 }, { "content": "pub fn offset_for_column(cols: &[ColumnInfo], col_id: i64) -> i64 {\n\n for (offset, column) in cols.iter().enumerate() {\n\n if column.get_column_id() == col_id {\n\n return offset as i64;\n\n }\n\n }\n\n 0_i64\n\n}\n", "file_path": "components/test_coprocessor/src/util.rs", "rank": 60, "score": 300016.5391853083 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn process_id() -> usize {\n\n std::process::id() as _\n\n}\n\n\n\n/// Gets the ID of the current thread.\n", "file_path": "components/resource_metering/src/utils.rs", "rank": 61, "score": 299944.5584556464 }, { "content": "// The `to_string()` function of `slog::Level` produces values like `erro` and `trce` instead of\n\n// the full words. This produces the full word.\n\npub fn get_string_by_level(lv: Level) -> &'static str {\n\n match lv {\n\n Level::Critical => \"critical\",\n\n Level::Error => \"error\",\n\n Level::Warning => \"warning\",\n\n Level::Debug => \"debug\",\n\n Level::Trace => \"trace\",\n\n Level::Info => \"info\",\n\n }\n\n}\n\n\n", "file_path": "components/tikv_util/src/logger/mod.rs", "rank": 62, "score": 299870.0178113303 }, { "content": "/// Gets the first encoded bytes' length in compactly encoded data.\n\n///\n\n/// Compact-encoding includes a VarInt encoded length prefix (1 ~ 9 bytes) and N bytes payload.\n\n/// This function gets the total bytes length of compact-encoded data, including the length prefix.\n\n///\n\n/// Note:\n\n/// - This function won't check whether the bytes are encoded correctly.\n\n/// - There can be multiple compact-encoded data, placed one by one. This function only returns\n\n/// the length of the first one.\n\npub fn encoded_compact_len(mut encoded: &[u8]) -> usize {\n\n let last_encoded = encoded.as_ptr() as usize;\n\n let total_len = encoded.len();\n\n let vn = match number::decode_var_i64(&mut encoded) {\n\n Ok(vn) => vn as usize,\n\n Err(e) => {\n\n debug!(\"failed to decode bytes' length: {:?}\", e);\n\n return total_len;\n\n }\n\n };\n\n vn + (encoded.as_ptr() as usize - last_encoded)\n\n}\n\n\n", "file_path": "components/tikv_util/src/codec/bytes.rs", "rank": 63, "score": 299737.448649558 }, { "content": "pub fn next_id() -> i64 {\n\n ID_GENERATOR.fetch_add(1, Ordering::Relaxed) as i64\n\n}\n\n\n", "file_path": "components/test_coprocessor/src/util.rs", "rank": 64, "score": 298754.2995382128 }, { "content": "#[rpn_fn(nullable, capture = [ctx])]\n\n#[inline]\n\npub fn password(ctx: &mut EvalContext, input: Option<BytesRef>) -> Result<Option<Bytes>> {\n\n ctx.warnings.append_warning(Error::Other(box_err!(\n\n \"Warning: Deprecated syntax PASSWORD\"\n\n )));\n\n match input {\n\n Some(bytes) => {\n\n if bytes.is_empty() {\n\n Ok(Some(Vec::new()))\n\n } else {\n\n let hash1 = hex_digest(MessageDigest::sha1(), bytes)?;\n\n let mut hash2 = hex_digest(MessageDigest::sha1(), hash1.as_slice())?;\n\n hash2.insert(0, b'*');\n\n Ok(Some(hash2))\n\n }\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "components/tidb_query_expr/src/impl_encryption.rs", "rank": 65, "score": 295668.29397741146 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn record_global_memory_usage() {\n\n GLOBAL_MEMORY_USAGE.store(0, Ordering::Release);\n\n}\n\n\n", "file_path": "components/tikv_util/src/sys/mod.rs", "rank": 66, "score": 295142.77116024855 }, { "content": "fn test_simple_store_stats<T: Simulator>(cluster: &mut Cluster<T>) {\n\n let pd_client = Arc::clone(&cluster.pd_client);\n\n\n\n cluster.cfg.raft_store.pd_store_heartbeat_tick_interval = ReadableDuration::millis(20);\n\n cluster.run();\n\n\n\n // wait store reports stats.\n\n for _ in 0..100 {\n\n sleep_ms(20);\n\n\n\n if pd_client.get_store_stats(1).is_some() {\n\n break;\n\n }\n\n }\n\n\n\n let engine = cluster.get_engine(1);\n\n let raft_engine = cluster.get_raft_engine(1);\n\n raft_engine.flush(true).unwrap();\n\n engine.flush(true).unwrap();\n\n let last_stats = pd_client.get_store_stats(1).unwrap();\n", "file_path": "tests/integrations/raftstore/test_stats.rs", "rank": 67, "score": 294889.0152724751 }, { "content": "pub fn new_sst_meta(crc32: u32, length: u64) -> SstMeta {\n\n let mut m = SstMeta::default();\n\n m.set_uuid(Uuid::new_v4().as_bytes().to_vec());\n\n m.set_crc32(crc32);\n\n m.set_length(length);\n\n m\n\n}\n\n\n", "file_path": "tests/integrations/import/util.rs", "rank": 68, "score": 293572.27204507135 }, { "content": "/// `sub` subtracts rhs and carry from lhs, store the diff and new carry.\n\nfn sub(lhs: u32, rhs: u32, carry: &mut i32, res: &mut u32) {\n\n let diff = lhs as i32 - rhs as i32 - *carry;\n\n if diff < 0 {\n\n *carry = 1;\n\n *res = (diff + WORD_BASE as i32) as u32;\n\n } else {\n\n *carry = 0;\n\n *res = diff as u32;\n\n }\n\n}\n\n\n", "file_path": "components/tidb_query_datatype/src/codec/mysql/decimal.rs", "rank": 69, "score": 293305.8218927447 }, { "content": "/// `sub2` subtracts rhs and carry from lhs, stores the diff and new carry.\n\n/// the new carry may be 2.\n\nfn sub2(lhs: u32, rhs: u32, carry: &mut i32, res: &mut u32) {\n\n let mut diff = lhs as i32 - rhs as i32 - *carry;\n\n if diff < -(WORD_BASE as i32) {\n\n *carry = 2;\n\n diff += WORD_BASE as i32 + WORD_BASE as i32;\n\n } else if diff < 0 {\n\n *carry = 1;\n\n diff += WORD_BASE as i32;\n\n } else {\n\n *carry = 0;\n\n }\n\n *res = diff as u32;\n\n}\n\n\n", "file_path": "components/tidb_query_datatype/src/codec/mysql/decimal.rs", "rank": 70, "score": 293305.8218927447 }, { "content": "/// Checks whether our test utilities themselves are fast enough.\n\npub fn bench<M>(c: &mut criterion::Criterion<M>)\n\nwhere\n\n M: Measurement + 'static,\n\n{\n\n if crate::util::bench_level() >= 1 {\n\n c.bench_function(\n\n \"util_batch_fixture_executor_next_1024\",\n\n bench_util_batch_fixture_executor_next_1024::<M>,\n\n );\n\n }\n\n}\n", "file_path": "tests/benches/coprocessor_executors/util/fixture.rs", "rank": 71, "score": 292207.6011667974 }, { "content": "#[rpn_fn(nullable, capture = [ctx])]\n\n#[inline]\n\npub fn uncompressed_length(ctx: &mut EvalContext, arg: Option<BytesRef>) -> Result<Option<Int>> {\n\n use byteorder::{ByteOrder, LittleEndian};\n\n Ok(arg.as_ref().map(|s| {\n\n if s.is_empty() {\n\n 0\n\n } else if s.len() <= 4 {\n\n ctx.warnings.append_warning(Error::zlib_data_corrupted());\n\n 0\n\n } else {\n\n Int::from(LittleEndian::read_u32(&s[0..4]))\n\n }\n\n }))\n\n}\n\n\n", "file_path": "components/tidb_query_expr/src/impl_encryption.rs", "rank": 72, "score": 291579.91441567603 }, { "content": "#[rpn_fn(nullable, varg, min_args = 2, capture = [ctx])]\n\n#[inline]\n\npub fn greatest_time(ctx: &mut EvalContext, args: &[Option<BytesRef>]) -> Result<Option<Bytes>> {\n\n let mut greatest = None;\n\n for arg in args {\n\n match arg {\n\n Some(arg_val) => {\n\n let s = match str::from_utf8(arg_val) {\n\n Ok(s) => s,\n\n Err(err) => {\n\n return ctx\n\n .handle_invalid_time_error(Error::Encoding(err))\n\n .map(|_| Ok(None))?;\n\n }\n\n };\n\n match Time::parse_datetime(ctx, s, Time::parse_fsp(s), true) {\n\n Ok(t) => greatest = max(greatest, Some(t)),\n\n Err(_) => {\n\n return ctx\n\n .handle_invalid_time_error(Error::invalid_time_format(&s))\n\n .map(|_| Ok(None))?;\n\n }\n", "file_path": "components/tidb_query_expr/src/impl_compare.rs", "rank": 73, "score": 291579.70935657166 }, { "content": "pub fn memory_usage_reaches_high_water(usage: &mut u64) -> bool {\n\n fail_point!(\"memory_usage_reaches_high_water\", |_| true);\n\n *usage = get_global_memory_usage();\n\n *usage >= MEMORY_USAGE_HIGH_WATER.load(Ordering::Acquire)\n\n}\n\n\n", "file_path": "components/tikv_util/src/sys/mod.rs", "rank": 74, "score": 291094.1676107567 }, { "content": "fn create_file_info(id: u64, method: EncryptionMethod) -> FileInfo {\n\n FileInfo {\n\n key_id: id,\n\n method: compat(method),\n\n ..Default::default()\n\n }\n\n}\n", "file_path": "tests/failpoints/cases/test_encryption.rs", "rank": 75, "score": 291078.9968331639 }, { "content": "/// Calculates crc32 and decrypted size for a given reader.\n\npub fn calc_crc32_and_size<R: Read>(reader: &mut R) -> io::Result<(u32, u64)> {\n\n let mut digest = crc32fast::Hasher::new();\n\n let (mut buf, mut fsize) = (vec![0; DIGEST_BUFFER_SIZE], 0);\n\n loop {\n\n match reader.read(&mut buf[..]) {\n\n Ok(0) => {\n\n return Ok((digest.finalize(), fsize as u64));\n\n }\n\n Ok(n) => {\n\n digest.update(&buf[..n]);\n\n fsize += n;\n\n }\n\n Err(ref e) if e.kind() == ErrorKind::Interrupted => {}\n\n Err(err) => return Err(err),\n\n }\n\n }\n\n}\n\n\n", "file_path": "components/file_system/src/lib.rs", "rank": 76, "score": 290624.0277941283 }, { "content": "pub fn tls_collect_scan_details(cmd: &'static str, stats: &Statistics) {\n\n TLS_SCHED_METRICS.with(|m| {\n\n m.borrow_mut()\n\n .local_scan_details\n\n .entry(cmd)\n\n .or_insert_with(Default::default)\n\n .add(stats);\n\n });\n\n}\n\n\n", "file_path": "src/storage/txn/sched_pool.rs", "rank": 77, "score": 290378.6623965822 }, { "content": "pub fn find_peer_mut(region: &mut metapb::Region, store_id: u64) -> Option<&mut metapb::Peer> {\n\n region\n\n .mut_peers()\n\n .iter_mut()\n\n .find(|p| p.get_store_id() == store_id)\n\n}\n\n\n", "file_path": "components/raftstore/src/store/util.rs", "rank": 78, "score": 289317.0722964992 }, { "content": "/// `skip_n_datum_slices` skip `n` datum slices within `buf`\n\n/// and advances the buffer pointer.\n\n/// If the datum buffer contains less than `n` slices, an error will be returned.\n\npub fn skip_n(buf: &mut &[u8], n: usize) -> Result<()> {\n\n let origin = *buf;\n\n for i in 0..n {\n\n if buf.is_empty() {\n\n return Err(box_err!(\n\n \"The {}th slice are missing in the datum buffer: {}\",\n\n i,\n\n log_wrappers::Value::value(origin)\n\n ));\n\n }\n\n let (_, remaining) = split_datum(buf, false)?;\n\n *buf = remaining;\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::codec::mysql::{Decimal, Duration, Time, MAX_FSP};\n", "file_path": "components/tidb_query_datatype/src/codec/datum.rs", "rank": 79, "score": 288885.04655769566 }, { "content": "pub fn bench_mvcc<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n let mut group = c.benchmark_group(\"mvcc\");\n\n for config in configs {\n\n group.bench_with_input(format!(\"prewrite/{:?}\", config), config, mvcc_prewrite);\n\n group.bench_with_input(format!(\"commit/{:?}\", config), config, mvcc_commit);\n\n group.bench_with_input(\n\n format!(\"rollback_prewrote/{:?}\", config),\n\n config,\n\n mvcc_rollback_prewrote,\n\n );\n\n group.bench_with_input(\n\n format!(\"rollback_conflict/{:?}\", config),\n\n config,\n\n mvcc_rollback_conflict,\n\n );\n\n group.bench_with_input(\n\n format!(\"rollback_non_prewrote/{:?}\", config),\n\n config,\n\n mvcc_rollback_non_prewrote,\n\n );\n", "file_path": "tests/benches/hierarchy/mvcc/mod.rs", "rank": 80, "score": 287944.7869017943 }, { "content": "pub fn bench_txn<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n let mut group = c.benchmark_group(\"txn\");\n\n for config in configs {\n\n group.bench_with_input(format!(\"prewrite/{:?}\", config), config, txn_prewrite);\n\n group.bench_with_input(format!(\"commit/{:?}\", config), config, txn_commit);\n\n group.bench_with_input(\n\n format!(\"rollback_prewrote/{:?}\", config),\n\n config,\n\n txn_rollback_prewrote,\n\n );\n\n group.bench_with_input(\n\n format!(\"rollback_conflict/{:?}\", config),\n\n config,\n\n txn_rollback_conflict,\n\n );\n\n group.bench_with_input(\n\n format!(\"rollback_non_prewrote/{:?}\", config),\n\n config,\n\n txn_rollback_non_prewrote,\n\n );\n\n }\n\n group.finish();\n\n}\n", "file_path": "tests/benches/hierarchy/txn/mod.rs", "rank": 81, "score": 287944.7869017943 }, { "content": "pub fn bench_engine<E: Engine, F: EngineFactory<E>>(c: &mut Criterion, configs: &[BenchConfig<F>]) {\n\n let mut group = c.benchmark_group(\"engine\");\n\n for config in configs {\n\n group.bench_with_input(\n\n format!(\"get(exclude snapshot)/{:?}\", config),\n\n config,\n\n bench_engine_get,\n\n );\n\n group.bench_with_input(format!(\"put/{:?}\", config), config, bench_engine_put);\n\n group.bench_with_input(\n\n format!(\"snapshot/{:?}\", config),\n\n config,\n\n bench_engine_snapshot,\n\n );\n\n }\n\n group.finish();\n\n}\n", "file_path": "tests/benches/hierarchy/engine/mod.rs", "rank": 82, "score": 287944.7869017943 }, { "content": "pub fn configure_for_merge<T: Simulator>(cluster: &mut Cluster<T>) {\n\n // Avoid log compaction which will prevent merge.\n\n cluster.cfg.raft_store.raft_log_gc_threshold = 1000;\n\n cluster.cfg.raft_store.raft_log_gc_count_limit = 1000;\n\n cluster.cfg.raft_store.raft_log_gc_size_limit = ReadableSize::mb(20);\n\n // Make merge check resume quickly.\n\n cluster.cfg.raft_store.merge_check_tick_interval = ReadableDuration::millis(100);\n\n // When isolated, follower relies on stale check tick to detect failure leader,\n\n // choose a smaller number to make it recover faster.\n\n cluster.cfg.raft_store.peer_stale_state_check_interval = ReadableDuration::millis(500);\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 83, "score": 287834.2311538547 }, { "content": "pub fn configure_for_hibernate<T: Simulator>(cluster: &mut Cluster<T>) {\n\n // Uses long check interval to make leader keep sleeping during tests.\n\n cluster.cfg.raft_store.abnormal_leader_missing_duration = ReadableDuration::secs(20);\n\n cluster.cfg.raft_store.max_leader_missing_duration = ReadableDuration::secs(40);\n\n cluster.cfg.raft_store.peer_stale_state_check_interval = ReadableDuration::secs(10);\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 84, "score": 287834.2311538547 }, { "content": "pub fn configure_for_encryption<T: Simulator>(cluster: &mut Cluster<T>) {\n\n let manifest_dir = Path::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n let master_key_file = manifest_dir.join(\"src/master-key.data\");\n\n\n\n let cfg = &mut cluster.cfg.security.encryption;\n\n cfg.data_encryption_method = EncryptionMethod::Aes128Ctr;\n\n cfg.data_key_rotation_period = ReadableDuration(Duration::from_millis(100));\n\n cfg.master_key = MasterKeyConfig::File {\n\n config: FileConfig {\n\n path: master_key_file.to_str().unwrap().to_owned(),\n\n },\n\n }\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 85, "score": 287834.2311538547 }, { "content": "pub fn configure_for_snapshot<T: Simulator>(cluster: &mut Cluster<T>) {\n\n // Truncate the log quickly so that we can force sending snapshot.\n\n cluster.cfg.raft_store.raft_log_gc_tick_interval = ReadableDuration::millis(20);\n\n cluster.cfg.raft_store.raft_log_gc_count_limit = 2;\n\n cluster.cfg.raft_store.merge_max_log_gap = 1;\n\n cluster.cfg.raft_store.snap_mgr_gc_tick_interval = ReadableDuration::millis(50);\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 86, "score": 287834.2311538547 }, { "content": "fn counter_closure(counter: &Arc<AtomicUsize>) -> Message {\n\n let c = counter.clone();\n\n Message::Callback(Box::new(move |_: &Handler, _: &mut Runner| {\n\n c.fetch_add(1, Ordering::SeqCst);\n\n }))\n\n}\n\n\n", "file_path": "components/batch-system/tests/cases/router.rs", "rank": 87, "score": 287587.31382943527 }, { "content": "fn bench_dense_detect_with_cleanup(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"bench_dense_detect_with_cleanup\");\n\n\n\n let ttls = vec![1, 3, 5, 10, 100, 500, 1_000, 3_000];\n\n for ttl in &ttls {\n\n let config = Config {\n\n n: 10,\n\n range: 1000,\n\n ttl: Duration::from_millis(*ttl),\n\n };\n\n group.bench_with_input(format!(\"{:?}\", &config), &config, bench_detect);\n\n }\n\n group.finish();\n\n}\n\n\n", "file_path": "tests/benches/deadlock_detector/mod.rs", "rank": 88, "score": 284280.445560234 }, { "content": "pub fn configure_for_request_snapshot<T: Simulator>(cluster: &mut Cluster<T>) {\n\n // We don't want to generate snapshots due to compact log.\n\n cluster.cfg.raft_store.raft_log_gc_threshold = 1000;\n\n cluster.cfg.raft_store.raft_log_gc_count_limit = 1000;\n\n cluster.cfg.raft_store.raft_log_gc_size_limit = ReadableSize::mb(20);\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 89, "score": 283658.70272525965 }, { "content": "pub fn configure_for_transfer_leader<T: Simulator>(cluster: &mut Cluster<T>) {\n\n cluster.cfg.raft_store.raft_reject_transfer_leader_duration = ReadableDuration::secs(1);\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 90, "score": 283658.70272525965 }, { "content": "pub fn configure_for_disable_titan<T: Simulator>(cluster: &mut Cluster<T>) {\n\n cluster.cfg.rocksdb.titan.enabled = false;\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 91, "score": 283658.70272525965 }, { "content": "pub fn build_read_pool_for_test<E: Engine>(\n\n config: &CoprReadPoolConfig,\n\n engine: E,\n\n) -> Vec<FuturePool> {\n\n let configs: Vec<Config> = config.to_yatp_pool_configs();\n\n assert_eq!(configs.len(), 3);\n\n\n\n configs\n\n .into_iter()\n\n .map(|config| {\n\n let engine = Arc::new(Mutex::new(engine.clone()));\n\n YatpPoolBuilder::new(DefaultTicker::default())\n\n .config(config)\n\n .after_start(move || {\n\n set_tls_engine(engine.lock().unwrap().clone());\n\n set_io_type(IOType::ForegroundRead);\n\n })\n\n // Safety: we call `set_` and `destroy_` with the same engine type.\n\n .before_stop(|| unsafe { destroy_tls_engine::<E>() })\n\n .build_future_pool()\n\n })\n\n .collect()\n\n}\n", "file_path": "src/coprocessor/readpool_impl.rs", "rank": 92, "score": 282715.6320271391 }, { "content": "pub fn ignore_merge_target_integrity<T: Simulator>(cluster: &mut Cluster<T>) {\n\n cluster.cfg.raft_store.dev_assert = false;\n\n cluster.pd_client.ignore_merge_target_integrity();\n\n}\n\n\n", "file_path": "components/test_raftstore/src/util.rs", "rank": 93, "score": 279661.93639150594 }, { "content": "#[bench]\n\nfn bench_table_prefix_start_with(b: &mut Bencher) {\n\n let key: &[u8] = b\"tabc\";\n\n b.iter(|| {\n\n let n = black_box(1000);\n\n (0..n).all(|_| black_box(key.starts_with(TABLE_PREFIX)))\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/coprocessor/codec/mod.rs", "rank": 94, "score": 279493.4647546955 }, { "content": "#[bench]\n\nfn bench_table_prefix_check(b: &mut Bencher) {\n\n let key: &[u8] = b\"tabc\";\n\n b.iter(|| {\n\n let n = black_box(1000);\n\n (0..n).all(|_| black_box(key.len() > 1 && key[0] == TABLE_PREFIX[0]))\n\n });\n\n}\n\n\n", "file_path": "tests/benches/misc/coprocessor/codec/mod.rs", "rank": 95, "score": 279493.4647546955 }, { "content": "fn bench_dense_detect_without_cleanup(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"bench_dense_detect_without_cleanup\");\n\n\n\n let ranges = vec![\n\n 10,\n\n 100,\n\n 1_000,\n\n 10_000,\n\n 100_000,\n\n 1_000_000,\n\n 10_000_000,\n\n 100_000_000,\n\n ];\n\n for range in ranges {\n\n let config = Config {\n\n n: 10,\n\n range,\n\n ttl: Duration::from_secs(100000000),\n\n };\n\n group.bench_with_input(format!(\"{:?}\", &config), &config, bench_detect);\n\n }\n\n}\n\n\n", "file_path": "tests/benches/deadlock_detector/mod.rs", "rank": 96, "score": 279493.4647546955 }, { "content": "\n\n#[cfg(not(target_os = \"linux\"))]\n\npub use self::notlinux::{self_thread_inspector, Impl as ThreadInspectorImpl};\n\n\n\n#[cfg(target_os = \"linux\")]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::io::Write;\n\n\n\n fn page_size() -> u64 {\n\n unsafe { libc::sysconf(libc::_SC_PAGE_SIZE) as u64 }\n\n }\n\n\n\n #[test]\n\n fn test_thread_inspector_io_stat() {\n\n let inspector = self_thread_inspector().unwrap();\n\n let io1 = inspector.io_stat().unwrap().unwrap();\n\n\n\n let mut f = tempfile::tempfile().unwrap();\n", "file_path": "components/tikv_util/src/sys/inspector.rs", "rank": 97, "score": 58.229628721758004 }, { "content": " pub fn set_remain(&mut self, remain: usize) {\n\n self.keep_remain = remain;\n\n }\n\n}\n\n\n\nimpl SoftLimitByCpu<RefCell<ThreadInfoStatistics>> {\n\n pub fn with_remain(remain: usize) -> Self {\n\n let total = SysQuota::cpu_cores_quota();\n\n let metrics = RefCell::new(ThreadInfoStatistics::new());\n\n Self {\n\n metrics,\n\n total_time: total,\n\n keep_remain: remain,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod softlimit_test {\n\n use std::{\n", "file_path": "components/backup/src/softlimit.rs", "rank": 98, "score": 56.6264969602417 } ]
Rust
alvr/experiments/graphics_tests/src/compositor/convert.rs
glegoo/ALVR
76d087fdb05f2035486626551c9ab9022ba645c6
use super::{Compositor, Context, Swapchain, TextureType}; use alvr_common::prelude::*; use ash::{extensions::khr, vk}; use openxr_sys as sys; use std::{ffi::CStr, slice}; use wgpu::{ DeviceDescriptor, Extent3d, Features, Texture, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, }; use wgpu_hal as hal; pub const TARGET_VULKAN_VERSION: u32 = vk::make_api_version(1, 0, 0, 0); pub fn get_vulkan_instance_extensions( entry: &ash::Entry, version: u32, ) -> StrResult<Vec<&'static CStr>> { let mut flags = hal::InstanceFlags::empty(); if cfg!(debug_assertions) { flags |= hal::InstanceFlags::VALIDATION; flags |= hal::InstanceFlags::DEBUG; } trace_err!(<hal::api::Vulkan as hal::Api>::Instance::required_extensions(entry, version, flags)) } pub fn create_vulkan_instance( entry: &ash::Entry, info: &vk::InstanceCreateInfo, ) -> StrResult<ash::Instance> { let mut extensions_ptrs = get_vulkan_instance_extensions(entry, unsafe { (*info.p_application_info).api_version })? .iter() .map(|x| x.as_ptr()) .collect::<Vec<_>>(); extensions_ptrs.extend_from_slice(unsafe { slice::from_raw_parts( info.pp_enabled_extension_names, info.enabled_extension_count as _, ) }); unsafe { trace_err!(entry.create_instance( &vk::InstanceCreateInfo { enabled_extension_count: extensions_ptrs.len() as _, pp_enabled_extension_names: extensions_ptrs.as_ptr(), ..*info }, None, )) } } pub fn get_vulkan_graphics_device( instance: &ash::Instance, adapter_index: Option<usize>, ) -> StrResult<vk::PhysicalDevice> { let mut physical_devices = unsafe { trace_err!(instance.enumerate_physical_devices())? }; Ok(physical_devices.remove(adapter_index.unwrap_or(0))) } pub fn get_vulkan_device_extensions(version: u32) -> Vec<&'static CStr> { let mut extensions = vec![khr::Swapchain::name()]; if version < vk::API_VERSION_1_1 { extensions.push(vk::KhrMaintenance1Fn::name()); extensions.push(vk::KhrMaintenance2Fn::name()); } extensions } pub fn create_vulkan_device( entry: &ash::Entry, version: u32, instance: &ash::Instance, physical_device: vk::PhysicalDevice, create_info: &vk::DeviceCreateInfo, ) -> StrResult<ash::Device> { let mut extensions_ptrs = get_vulkan_device_extensions(version) .iter() .map(|x| x.as_ptr()) .collect::<Vec<_>>(); extensions_ptrs.extend_from_slice(unsafe { slice::from_raw_parts( create_info.pp_enabled_extension_names, create_info.enabled_extension_count as _, ) }); let mut features = if !create_info.p_enabled_features.is_null() { unsafe { *create_info.p_enabled_features } } else { vk::PhysicalDeviceFeatures::default() }; features.robust_buffer_access = true as _; features.independent_blend = true as _; features.sample_rate_shading = true as _; unsafe { trace_err!(instance.create_device( physical_device, &vk::DeviceCreateInfo { enabled_extension_count: extensions_ptrs.len() as _, pp_enabled_extension_names: extensions_ptrs.as_ptr(), p_enabled_features: &features as *const _, ..*create_info }, None )) } } impl Context { pub fn from_vulkan( owned: bool, entry: ash::Entry, version: u32, vk_instance: ash::Instance, adapter_index: Option<usize>, vk_device: ash::Device, queue_family_index: u32, queue_index: u32, ) -> StrResult<Self> { let mut flags = hal::InstanceFlags::empty(); if cfg!(debug_assertions) { flags |= hal::InstanceFlags::VALIDATION; flags |= hal::InstanceFlags::DEBUG; }; let extensions = get_vulkan_instance_extensions(&entry, version)?; let instance = unsafe { trace_err!(<hal::api::Vulkan as hal::Api>::Instance::from_raw( entry, vk_instance.clone(), version, extensions, flags, owned.then(|| Box::new(()) as _) ))? }; let physical_device = get_vulkan_graphics_device(&vk_instance, adapter_index)?; let exposed_adapter = trace_none!(instance.expose_adapter(physical_device))?; let open_device = unsafe { trace_err!(exposed_adapter.adapter.device_from_raw( vk_device, owned, &get_vulkan_device_extensions(version), queue_family_index, queue_index, ))? }; #[cfg(not(target_os = "macos"))] { let instance = unsafe { wgpu::Instance::from_hal::<hal::api::Vulkan>(instance) }; let adapter = unsafe { instance.create_adapter_from_hal(exposed_adapter) }; let (device, queue) = unsafe { trace_err!(adapter.create_device_from_hal( open_device, &DeviceDescriptor { label: None, features: Features::PUSH_CONSTANTS, limits: adapter.limits(), }, None, ))? }; Ok(Self { instance, device, queue, }) } #[cfg(target_os = "macos")] unimplemented!() } pub fn new(adapter_index: Option<usize>) -> StrResult<Self> { let entry = unsafe { trace_err!(ash::Entry::new())? }; let vk_instance = trace_err!(create_vulkan_instance( &entry, &vk::InstanceCreateInfo::builder() .application_info( &vk::ApplicationInfo::builder().api_version(TARGET_VULKAN_VERSION) ) .build() ))?; let physical_device = get_vulkan_graphics_device(&vk_instance, adapter_index)?; let queue_family_index = unsafe { vk_instance .get_physical_device_queue_family_properties(physical_device) .into_iter() .enumerate() .find_map(|(queue_family_index, info)| { if info.queue_flags.contains(vk::QueueFlags::GRAPHICS) { Some(queue_family_index as u32) } else { None } }) .unwrap() }; let queue_index = 0; let vk_device = trace_err!(create_vulkan_device( &entry, TARGET_VULKAN_VERSION, &vk_instance, physical_device, &vk::DeviceCreateInfo::builder().queue_create_infos(&[ vk::DeviceQueueCreateInfo::builder() .queue_family_index(queue_family_index) .queue_priorities(&[1.0]) .build() ]) ))?; Self::from_vulkan( true, entry, TARGET_VULKAN_VERSION, vk_instance, adapter_index, vk_device, queue_family_index, queue_index, ) } } pub enum SwapchainCreateData { #[cfg(target_os = "linux")] External { images: Vec<vk::Image>, vk_usage: vk::ImageUsageFlags, vk_format: vk::Format, hal_usage: hal::TextureUses, }, Count(Option<usize>), } pub struct SwapchainCreateInfo { usage: sys::SwapchainUsageFlags, format: TextureFormat, sample_count: u32, width: u32, height: u32, texture_type: TextureType, mip_count: u32, } impl Compositor { pub fn create_swapchain( &self, data: SwapchainCreateData, info: SwapchainCreateInfo, ) -> StrResult<Swapchain> { let wgpu_usage = { let mut wgpu_usage = TextureUsages::TEXTURE_BINDING; if info .usage .contains(sys::SwapchainUsageFlags::COLOR_ATTACHMENT) { wgpu_usage |= TextureUsages::RENDER_ATTACHMENT; } if info .usage .contains(sys::SwapchainUsageFlags::DEPTH_STENCIL_ATTACHMENT) { wgpu_usage |= TextureUsages::RENDER_ATTACHMENT; } if info.usage.contains(sys::SwapchainUsageFlags::TRANSFER_SRC) { wgpu_usage |= TextureUsages::COPY_SRC; } if info.usage.contains(sys::SwapchainUsageFlags::TRANSFER_DST) { wgpu_usage |= TextureUsages::COPY_DST; } wgpu_usage }; let depth_or_array_layers = match info.texture_type { TextureType::D2 { array_size } => array_size, TextureType::Cubemap => 6, }; let texture_descriptor = TextureDescriptor { label: None, size: Extent3d { width: info.width, height: info.height, depth_or_array_layers, }, mip_level_count: info.mip_count, sample_count: info.sample_count, dimension: TextureDimension::D2, format: info.format, usage: wgpu_usage, }; let textures = match data { #[cfg(target_os = "linux")] SwapchainCreateData::External { images, vk_usage, vk_format, hal_usage, } => images .into_iter() .map(|vk_image| { let hal_texture = unsafe { <hal::api::Vulkan as hal::Api>::Device::texture_from_raw( vk_image, &hal::TextureDescriptor { label: None, size: Extent3d { width, height, depth_or_array_layers: array_size, }, mip_level_count: mip_count, sample_count, dimension: TextureDimension::D2, format, usage: hal_usage, memory_flags: hal::MemoryFlags::empty(), }, None, ) }; unsafe { self.context .device .create_texture_from_hal::<hal::api::Vulkan>( hal_texture, &texture_descriptor, ) } }) .collect(), SwapchainCreateData::Count(count) => (0..count.unwrap_or(2)) .map(|_| self.context.device.create_texture(&texture_descriptor)) .collect(), }; let array_size = match info.texture_type { TextureType::D2 { array_size } => array_size, TextureType::Cubemap => 1, }; Ok(self.inner_create_swapchain(textures, array_size)) } } #[cfg(not(target_os = "macos"))] pub fn to_vulkan_images(textures: &[Texture]) -> Vec<vk::Image> { textures .iter() .map(|tex| unsafe { let hal_texture = tex.as_hal::<hal::api::Vulkan>(); hal_texture.as_inner().unwrap().raw_handle() }) .collect() }
use super::{Compositor, Context, Swapchain, TextureType}; use alvr_common::prelude::*; use ash::{extensions::khr, vk}; use openxr_sys as sys; use std::{ffi::CStr, slice}; use wgpu::{ DeviceDescriptor, Extent3d, Features, Texture, TextureDescriptor, TextureDimension, TextureFormat, TextureUsages, }; use wgpu_hal as hal; pub const TARGET_VULKAN_VERSION: u32 = vk::make_api_version(1, 0, 0, 0); pub fn get_vulkan_instance_extensions( entry: &ash::Entry, version: u32, ) -> StrResult<Vec<&'static CStr>> { let mut flags = hal::InstanceFlags::empty(); if cfg!(debug_assertions) { flags |= hal::InstanceFlags::VALIDATION; flags |= hal::InstanceFlags::DEBUG; } trace_err!(<hal::api::Vulkan as hal::Api>::Instance::required_extensions(entry, version, flags)) } pub fn create_vulkan_instance( entry: &ash::Entry, info: &vk::InstanceCreateInfo, ) -> StrResult<ash::Instance> { let mut extensions_ptrs = get_vulkan_instance_extensions(entry, unsafe { (*info.p_application_info).api_version })? .iter() .map(|x| x.as_ptr()) .collect::<Vec<_>>(); extensions_ptrs.extend_from_slice(unsafe { slice::from_raw_parts( info.pp_enabled_extension_names, info.enabled_extension_count as _, ) }); unsafe { trace_err!(entry.create_instance( &vk::InstanceCreateInfo { enabled_extension_count: extensions_ptrs.len() as _, pp_enabled_extension_names: extensions_ptrs.as_ptr(), ..*info }, None, )) } } pub fn get_vulkan_graphics_device( instance: &ash::Instance, adapter_index: Option<usize>, ) -> StrResult<vk::PhysicalDevice> { let mut physical_devices = unsafe { trace_err!(instance.enumerate_physical_devices())? }; Ok(physical_devices.remove(adapter_index.unwrap_or(0))) } pub fn get_vulkan_device_extensions(version: u32) -> Vec<&'static CStr> { let mut extensions = vec![khr::Swapchain::name()]; if version < vk::API_VERSION_1_1 { extensions.push(vk::KhrMaintenance1Fn::name()); extensions.push(vk::KhrMaintenance2Fn::name()); } extensions } pub fn create_vulkan_device( entry: &ash::Entry, version: u32, instance: &ash::Instance, physical_device: vk::PhysicalDevice, create_info: &vk::DeviceCreateInfo, ) -> StrResult<ash::Device> { let mut extensions_ptrs = get_vulkan_device_extensions(version) .iter() .map(|x| x.as_ptr()) .collect::<Vec<_>>(); extensions_ptrs.extend_from_slice(unsafe { slice::from_raw_parts( create_info.pp_enabled_extension_names, create_info.enabled_extension_count as _, ) }); let mut features = if !create_info.p_enabled_features.is_null() { unsafe { *create_info.p_enabled_features } } else { vk::PhysicalDeviceFeatures::default() }; features.robust_buffer_access = true as _; features.independent_blend = true as _; features.sample_rate_shading = true as _; unsafe { trace_err!(instance.create_device( physical_device, &vk::DeviceCreateInfo { enabled_extension_count: extensions_ptrs.len() as _, pp_enabled_extension_names: extensions_ptrs.as_ptr(), p_enabled_features: &features as *const _, ..*create_info }, None )) } } impl Context {
pub fn new(adapter_index: Option<usize>) -> StrResult<Self> { let entry = unsafe { trace_err!(ash::Entry::new())? }; let vk_instance = trace_err!(create_vulkan_instance( &entry, &vk::InstanceCreateInfo::builder() .application_info( &vk::ApplicationInfo::builder().api_version(TARGET_VULKAN_VERSION) ) .build() ))?; let physical_device = get_vulkan_graphics_device(&vk_instance, adapter_index)?; let queue_family_index = unsafe { vk_instance .get_physical_device_queue_family_properties(physical_device) .into_iter() .enumerate() .find_map(|(queue_family_index, info)| { if info.queue_flags.contains(vk::QueueFlags::GRAPHICS) { Some(queue_family_index as u32) } else { None } }) .unwrap() }; let queue_index = 0; let vk_device = trace_err!(create_vulkan_device( &entry, TARGET_VULKAN_VERSION, &vk_instance, physical_device, &vk::DeviceCreateInfo::builder().queue_create_infos(&[ vk::DeviceQueueCreateInfo::builder() .queue_family_index(queue_family_index) .queue_priorities(&[1.0]) .build() ]) ))?; Self::from_vulkan( true, entry, TARGET_VULKAN_VERSION, vk_instance, adapter_index, vk_device, queue_family_index, queue_index, ) } } pub enum SwapchainCreateData { #[cfg(target_os = "linux")] External { images: Vec<vk::Image>, vk_usage: vk::ImageUsageFlags, vk_format: vk::Format, hal_usage: hal::TextureUses, }, Count(Option<usize>), } pub struct SwapchainCreateInfo { usage: sys::SwapchainUsageFlags, format: TextureFormat, sample_count: u32, width: u32, height: u32, texture_type: TextureType, mip_count: u32, } impl Compositor { pub fn create_swapchain( &self, data: SwapchainCreateData, info: SwapchainCreateInfo, ) -> StrResult<Swapchain> { let wgpu_usage = { let mut wgpu_usage = TextureUsages::TEXTURE_BINDING; if info .usage .contains(sys::SwapchainUsageFlags::COLOR_ATTACHMENT) { wgpu_usage |= TextureUsages::RENDER_ATTACHMENT; } if info .usage .contains(sys::SwapchainUsageFlags::DEPTH_STENCIL_ATTACHMENT) { wgpu_usage |= TextureUsages::RENDER_ATTACHMENT; } if info.usage.contains(sys::SwapchainUsageFlags::TRANSFER_SRC) { wgpu_usage |= TextureUsages::COPY_SRC; } if info.usage.contains(sys::SwapchainUsageFlags::TRANSFER_DST) { wgpu_usage |= TextureUsages::COPY_DST; } wgpu_usage }; let depth_or_array_layers = match info.texture_type { TextureType::D2 { array_size } => array_size, TextureType::Cubemap => 6, }; let texture_descriptor = TextureDescriptor { label: None, size: Extent3d { width: info.width, height: info.height, depth_or_array_layers, }, mip_level_count: info.mip_count, sample_count: info.sample_count, dimension: TextureDimension::D2, format: info.format, usage: wgpu_usage, }; let textures = match data { #[cfg(target_os = "linux")] SwapchainCreateData::External { images, vk_usage, vk_format, hal_usage, } => images .into_iter() .map(|vk_image| { let hal_texture = unsafe { <hal::api::Vulkan as hal::Api>::Device::texture_from_raw( vk_image, &hal::TextureDescriptor { label: None, size: Extent3d { width, height, depth_or_array_layers: array_size, }, mip_level_count: mip_count, sample_count, dimension: TextureDimension::D2, format, usage: hal_usage, memory_flags: hal::MemoryFlags::empty(), }, None, ) }; unsafe { self.context .device .create_texture_from_hal::<hal::api::Vulkan>( hal_texture, &texture_descriptor, ) } }) .collect(), SwapchainCreateData::Count(count) => (0..count.unwrap_or(2)) .map(|_| self.context.device.create_texture(&texture_descriptor)) .collect(), }; let array_size = match info.texture_type { TextureType::D2 { array_size } => array_size, TextureType::Cubemap => 1, }; Ok(self.inner_create_swapchain(textures, array_size)) } } #[cfg(not(target_os = "macos"))] pub fn to_vulkan_images(textures: &[Texture]) -> Vec<vk::Image> { textures .iter() .map(|tex| unsafe { let hal_texture = tex.as_hal::<hal::api::Vulkan>(); hal_texture.as_inner().unwrap().raw_handle() }) .collect() }
pub fn from_vulkan( owned: bool, entry: ash::Entry, version: u32, vk_instance: ash::Instance, adapter_index: Option<usize>, vk_device: ash::Device, queue_family_index: u32, queue_index: u32, ) -> StrResult<Self> { let mut flags = hal::InstanceFlags::empty(); if cfg!(debug_assertions) { flags |= hal::InstanceFlags::VALIDATION; flags |= hal::InstanceFlags::DEBUG; }; let extensions = get_vulkan_instance_extensions(&entry, version)?; let instance = unsafe { trace_err!(<hal::api::Vulkan as hal::Api>::Instance::from_raw( entry, vk_instance.clone(), version, extensions, flags, owned.then(|| Box::new(()) as _) ))? }; let physical_device = get_vulkan_graphics_device(&vk_instance, adapter_index)?; let exposed_adapter = trace_none!(instance.expose_adapter(physical_device))?; let open_device = unsafe { trace_err!(exposed_adapter.adapter.device_from_raw( vk_device, owned, &get_vulkan_device_extensions(version), queue_family_index, queue_index, ))? }; #[cfg(not(target_os = "macos"))] { let instance = unsafe { wgpu::Instance::from_hal::<hal::api::Vulkan>(instance) }; let adapter = unsafe { instance.create_adapter_from_hal(exposed_adapter) }; let (device, queue) = unsafe { trace_err!(adapter.create_device_from_hal( open_device, &DeviceDescriptor { label: None, features: Features::PUSH_CONSTANTS, limits: adapter.limits(), }, None, ))? }; Ok(Self { instance, device, queue, }) } #[cfg(target_os = "macos")] unimplemented!() }
function_block-full_function
[ { "content": "#[cfg(target_os = \"macos\")]\n\npub fn get_screen_size() -> StrResult<(u32, u32)> {\n\n Ok((0, 0))\n\n}\n", "file_path": "alvr/server/src/graphics_info.rs", "rank": 0, "score": 257298.96087876664 }, { "content": "pub fn version() -> String {\n\n let manifest_path = packages_dir().join(\"common\").join(\"Cargo.toml\");\n\n println!(\"cargo:rerun-if-changed={}\", manifest_path.to_string_lossy());\n\n\n\n let manifest = fs::read_to_string(manifest_path).unwrap();\n\n let (_, version, _) = split_string(&manifest, \"version = \\\"\", '\\\"');\n\n\n\n version\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 1, "score": 240856.36824349643 }, { "content": "// accept semver-compatible versions\n\n// Note: by not having to set the requirement manually, the major version is constrained to be\n\n// bumped when the packet layouts or some critical behaviour has changed.\n\npub fn is_version_compatible(other_version: &Version) -> bool {\n\n if other_version.pre != Prerelease::EMPTY\n\n || other_version.build != BuildMetadata::EMPTY\n\n || ALVR_VERSION.pre != Prerelease::EMPTY\n\n || ALVR_VERSION.build != BuildMetadata::EMPTY\n\n {\n\n *other_version == *ALVR_VERSION\n\n } else {\n\n other_version.major == ALVR_VERSION.major\n\n }\n\n}\n", "file_path": "alvr/common/src/lib.rs", "rank": 4, "score": 219467.88912240913 }, { "content": "// Due to the nature of immediate mode GUIs, the parent containers cannot conditionally render based\n\n// on the presence of the child container, so it is the child responsibility to format the container\n\npub fn container<R>(ui: &mut Ui, content: impl FnOnce(&mut Ui) -> R) -> R {\n\n ui.horizontal(|ui| {\n\n // Indentation\n\n ui.add_space(20_f32);\n\n\n\n content(ui)\n\n })\n\n .inner\n\n}\n\n\n", "file_path": "alvr/experiments/gui/src/dashboard/components/settings_controls/mod.rs", "rank": 5, "score": 218722.87541870566 }, { "content": "fn get_slicing_layout(combined_size: (u32, u32), slice_count: usize) -> SlicingLayout {\n\n // only 1 or 2 slices are handled for now.\n\n // todo: port complete algorithm from zarik5/bridgevr-dev. It can also split vertically after\n\n // a certain slice count.\n\n if slice_count == 1 {\n\n SlicingLayout {\n\n slice_width: combined_size.0 as i32,\n\n slice_height: combined_size.1 as i32,\n\n columns: 1,\n\n }\n\n } else if slice_count == 2 {\n\n SlicingLayout {\n\n slice_width: combined_size.0 as i32 / 2,\n\n slice_height: combined_size.1 as i32,\n\n columns: 2,\n\n }\n\n } else {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "alvr/experiments/graphics_tests/src/compositor/slicing.rs", "rank": 6, "score": 201438.3457479465 }, { "content": "pub fn bump_version(maybe_version: Option<String>, is_nightly: bool) {\n\n let mut version = maybe_version.unwrap_or_else(version);\n\n\n\n if is_nightly {\n\n version = format!(\"{}+nightly.{}\", version, date_utc_yyyymmdd());\n\n }\n\n\n\n for dir_name in [\n\n \"audio\",\n\n \"client\",\n\n \"commands\",\n\n \"common\",\n\n \"filesystem\",\n\n \"launcher\",\n\n \"server\",\n\n \"session\",\n\n \"sockets\",\n\n \"vrcompositor-wrapper\",\n\n \"vulkan-layer\",\n\n \"xtask\",\n\n ] {\n\n bump_cargo_version(dir_name, &version);\n\n }\n\n bump_client_gradle_version(&version, is_nightly);\n\n bump_rpm_spec_version(&version, is_nightly);\n\n\n\n println!(\"Git tag:\\nv{}\", version);\n\n}\n", "file_path": "alvr/xtask/src/version.rs", "rank": 9, "score": 187001.29563533718 }, { "content": "pub fn switch(ui: &mut Ui, on: &mut bool) -> Response {\n\n let desired_size = ui.spacing().interact_size.y * egui::vec2(2.0, 1.0);\n\n let (rect, mut response) = ui.allocate_exact_size(desired_size, Sense::click());\n\n if response.clicked() {\n\n *on = !*on;\n\n response.mark_changed();\n\n }\n\n response.widget_info(|| WidgetInfo::selected(WidgetType::Checkbox, *on, \"\"));\n\n\n\n let how_on = ui.ctx().animate_bool(response.id, *on);\n\n let visuals = ui.style().interact_selectable(&response, *on);\n\n let rect = rect.expand(visuals.expansion);\n\n let radius = 0.5 * rect.height();\n\n ui.painter()\n\n .rect(rect, radius, visuals.bg_fill, visuals.bg_stroke);\n\n let circle_x = egui::lerp((rect.left() + radius)..=(rect.right() - radius), how_on);\n\n let center = egui::pos2(circle_x, rect.center().y);\n\n ui.painter()\n\n .circle(center, 0.75 * radius, visuals.bg_fill, visuals.fg_stroke);\n\n\n\n response\n\n}\n", "file_path": "alvr/experiments/gui/src/dashboard/basic_components/switch.rs", "rank": 10, "score": 182919.45346687204 }, { "content": "pub fn get_gpu_names() -> Vec<String> {\n\n let instance = wgpu::Instance::new(wgpu::Backends::PRIMARY);\n\n let adapters = instance.enumerate_adapters(wgpu::Backends::PRIMARY);\n\n\n\n adapters\n\n .into_iter()\n\n .map(|a| a.get_info().name)\n\n .collect::<Vec<_>>()\n\n}\n\n\n", "file_path": "alvr/server/src/graphics_info.rs", "rank": 11, "score": 180557.1695782548 }, { "content": "pub fn get_sample_rate(device: &AudioDevice) -> StrResult<u32> {\n\n let maybe_config_range = trace_err!(device.inner.supported_output_configs())?.next();\n\n let config = if let Some(config) = maybe_config_range {\n\n config\n\n } else {\n\n trace_none!(trace_err!(device.inner.supported_input_configs())?.next())?\n\n };\n\n\n\n // Assumption: device is in shared mode: this means that there is one and fixed sample rate,\n\n // format and channel count\n\n Ok(config.min_sample_rate().0)\n\n}\n\n\n\npub async fn record_audio_loop(\n\n device: AudioDevice,\n\n channels_count: u16,\n\n sample_rate: u32,\n\n #[cfg_attr(not(windows), allow(unused_variables))] mute: bool,\n\n mut sender: StreamSender<(), AUDIO>,\n\n) -> StrResult {\n", "file_path": "alvr/audio/src/lib.rs", "rank": 12, "score": 175072.71442853057 }, { "content": "// this will not kill the child process \"ALVR launcher\"\n\npub fn kill_steamvr() {\n\n let mut system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n system.refresh_processes();\n\n\n\n // first kill vrmonitor, then kill vrserver if it is hung.\n\n\n\n for process in system.process_by_name(&afs::exec_fname(\"vrmonitor\")) {\n\n #[cfg(not(windows))]\n\n process.kill(sysinfo::Signal::Term);\n\n #[cfg(windows)]\n\n kill_process(process.pid());\n\n }\n\n\n\n thread::sleep(Duration::from_secs(1));\n\n\n\n for process in system.process_by_name(&afs::exec_fname(\"vrserver\")) {\n\n #[cfg(not(windows))]\n\n process.kill(sysinfo::Signal::Term);\n\n #[cfg(windows)]\n\n kill_process(process.pid());\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 13, "score": 159241.67555424006 }, { "content": "pub fn fix_steamvr() {\n\n // If ALVR driver does not start use a more destructive approach: delete openvrpaths.vrpath then recreate it\n\n if let Ok(path) = alvr_commands::openvr_source_file_path() {\n\n fs::remove_file(path).ok();\n\n\n\n maybe_launch_steamvr();\n\n thread::sleep(Duration::from_secs(5));\n\n kill_steamvr();\n\n thread::sleep(Duration::from_secs(5));\n\n }\n\n\n\n unblock_alvr_addon().ok();\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 14, "score": 159241.67555424006 }, { "content": "pub fn build_server(\n\n is_release: bool,\n\n experiements: bool,\n\n fetch_crates: bool,\n\n bundle_ffmpeg: bool,\n\n root: Option<String>,\n\n) {\n\n // Always use CustomRoot for contructing the build directory. The actual runtime layout is respected\n\n let layout = Layout::new(&afs::server_build_dir());\n\n\n\n let build_type = if is_release { \"release\" } else { \"debug\" };\n\n let build_flag = if is_release { \"--release\" } else { \"\" };\n\n\n\n let mut server_features: Vec<&str> = vec![];\n\n let mut launcher_features: Vec<&str> = vec![];\n\n\n\n if bundle_ffmpeg {\n\n server_features.push(\"bundled_ffmpeg\");\n\n }\n\n if server_features.is_empty() {\n", "file_path": "alvr/xtask/src/main.rs", "rank": 15, "score": 159241.67555424006 }, { "content": "pub fn shutdown_runtime() {\n\n if let Some(window) = MAYBE_WINDOW.lock().take() {\n\n window.close();\n\n }\n\n\n\n SHUTDOWN_NOTIFIER.notify_waiters();\n\n\n\n if let Some(runtime) = MAYBE_RUNTIME.lock().take() {\n\n runtime.shutdown_background();\n\n // shutdown_background() is non blocking and it does not guarantee that every internal\n\n // thread is terminated in a timely manner. Using shutdown_background() instead of just\n\n // dropping the runtime has the benefit of giving SteamVR a chance to clean itself as\n\n // much as possible before the process is killed because of alvr_launcher timeout.\n\n }\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 16, "score": 159241.67555424006 }, { "content": "pub fn invoke_installer() {\n\n try_close_steamvr_gracefully();\n\n\n\n spawn_no_window(Command::new(afs::installer_path()).arg(\"-q\"));\n\n\n\n // delete crash_log.txt (take advantage of the occasion to do some routine cleaning)\n\n fs::remove_file(\n\n afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap()).crash_log(),\n\n )\n\n .ok();\n\n}\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 17, "score": 159241.67555424006 }, { "content": "pub fn restart_steamvr() {\n\n try_close_steamvr_gracefully();\n\n\n\n if logging::show_err(maybe_register_alvr_driver()).is_some() {\n\n maybe_launch_steamvr();\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 18, "score": 159241.67555424006 }, { "content": "// todo: use a custom widget\n\npub fn button_group_clicked(ui: &mut Ui, options: &[LocalizedId], selection: &mut String) -> bool {\n\n let mut clicked = false;\n\n for id in options {\n\n if ui\n\n .selectable_value(selection, (**id).clone(), &id.trans)\n\n .clicked()\n\n {\n\n *selection = (**id).to_owned();\n\n clicked = true;\n\n }\n\n }\n\n\n\n clicked\n\n}\n", "file_path": "alvr/experiments/gui/src/dashboard/basic_components/button_group.rs", "rank": 19, "score": 159097.75513158803 }, { "content": "pub fn align_to_32(size: (i32, i32)) -> (i32, i32) {\n\n (\n\n (size.0 as f32 / 32_f32).ceil() as i32 * 32,\n\n (size.1 as f32 / 32_f32).ceil() as i32 * 32,\n\n )\n\n}\n\n\n\npub enum AlignmentDirection {\n\n Input,\n\n Output,\n\n}\n\n\n\n// Merge k slices then split the result into n slices\n\n// Slices are assumed to be packed and unpacked by this same pass, following a particular layout\n\n// determined by the number of slices and the shape of the reconstructed frame.\n\npub struct SlicingPass {\n\n inputs: Vec<TextureView>,\n\n pipeline: RenderPipeline,\n\n bind_group: BindGroup,\n\n input_slicing_layout: SlicingLayout,\n", "file_path": "alvr/experiments/graphics_tests/src/compositor/slicing.rs", "rank": 20, "score": 158588.86499340297 }, { "content": "pub fn remove_build_dir() {\n\n let build_dir = afs::build_dir();\n\n fs::remove_dir_all(&build_dir).ok();\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 21, "score": 156026.95611729805 }, { "content": "pub fn notify_application_update() {\n\n notify_shutdown_driver();\n\n\n\n alvr_commands::invoke_application_update(&FILESYSTEM_LAYOUT.launcher_exe()).ok();\n\n}\n\n\n\npub enum ClientListAction {\n\n AddIfMissing { display_name: String },\n\n TrustAndMaybeAddIp(Option<IpAddr>),\n\n RemoveIpOrEntry(Option<IpAddr>),\n\n}\n\n\n\npub async fn update_client_list(hostname: String, action: ClientListAction) {\n\n let mut client_connections = SESSION_MANAGER.lock().get().client_connections.clone();\n\n\n\n let maybe_client_entry = client_connections.entry(hostname);\n\n\n\n let mut updated = false;\n\n match action {\n\n ClientListAction::AddIfMissing { display_name } => {\n", "file_path": "alvr/server/src/lib.rs", "rank": 22, "score": 156026.95611729805 }, { "content": "pub fn notify_shutdown_driver() {\n\n thread::spawn(|| {\n\n RESTART_NOTIFIER.notify_waiters();\n\n\n\n // give time to the control loop to send the restart packet (not crucial)\n\n thread::sleep(Duration::from_millis(100));\n\n\n\n shutdown_runtime();\n\n\n\n unsafe { ShutdownSteamvr() };\n\n });\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 23, "score": 156026.95611729805 }, { "content": "// Avoid Oculus link popups when debugging the client\n\npub fn kill_oculus_processes() {\n\n command::run_without_shell(\n\n \"powershell\",\n\n &[\n\n \"Start-Process\",\n\n \"taskkill\",\n\n \"-ArgumentList\",\n\n \"\\\"/F /IM OVR* /T\\\"\",\n\n \"-Verb\",\n\n \"runAs\",\n\n ],\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 24, "score": 156026.95611729805 }, { "content": "pub fn maybe_launch_steamvr() {\n\n let mut system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n system.refresh_processes();\n\n\n\n if system\n\n .process_by_name(&afs::exec_fname(\"vrserver\"))\n\n .is_empty()\n\n {\n\n #[cfg(windows)]\n\n spawn_no_window(Command::new(\"cmd\").args(&[\"/C\", \"start\", \"steam://rungameid/250820\"]));\n\n #[cfg(not(windows))]\n\n spawn_no_window(Command::new(\"steam\").args(&[\"steam://rungameid/250820\"]));\n\n }\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 25, "score": 156026.95611729805 }, { "content": "pub fn init_logging() {\n\n #[cfg(target_os = \"android\")]\n\n android_logger::init_once(\n\n android_logger::Config::default()\n\n .with_tag(\"[ALVR NATIVE-RUST]\")\n\n .with_min_level(log::Level::Info),\n\n );\n\n\n\n alvr_common::logging::set_panic_hook();\n\n}\n", "file_path": "alvr/client/src/logging_backend.rs", "rank": 26, "score": 156026.95611729805 }, { "content": "pub fn notify_restart_driver() {\n\n notify_shutdown_driver();\n\n\n\n alvr_commands::restart_steamvr(&FILESYSTEM_LAYOUT.launcher_exe()).ok();\n\n}\n\n\n", "file_path": "alvr/server/src/lib.rs", "rank": 27, "score": 156026.95611729805 }, { "content": "pub fn set_panic_hook() {\n\n std::panic::set_hook(Box::new(|panic_info| {\n\n let message = panic_info\n\n .payload()\n\n .downcast_ref::<&str>()\n\n .unwrap_or(&\"Unavailable\");\n\n let err_str = format!(\n\n \"Message: {:?}\\nBacktrace:\\n{:?}\",\n\n message,\n\n backtrace::Backtrace::new()\n\n );\n\n\n\n log::error!(\"{}\", err_str);\n\n\n\n #[cfg(windows)]\n\n std::thread::spawn(move || {\n\n msgbox::create(\"ALVR panicked\", &err_str, msgbox::IconType::Error).ok();\n\n });\n\n }))\n\n}\n\n\n", "file_path": "alvr/common/src/logging.rs", "rank": 28, "score": 156026.95611729805 }, { "content": "#[inline]\n\npub fn get_next_frame_batch(\n\n sample_buffer: &mut VecDeque<f32>,\n\n channels_count: usize,\n\n batch_frames_count: usize,\n\n) -> Vec<f32> {\n\n if sample_buffer.len() / channels_count >= batch_frames_count {\n\n let mut batch = sample_buffer\n\n .drain(0..batch_frames_count * channels_count)\n\n .collect::<Vec<_>>();\n\n\n\n if sample_buffer.len() / channels_count < batch_frames_count {\n\n // Render fade-out. It is completely contained in the current batch\n\n for f in 0..batch_frames_count {\n\n let volume = 1. - f as f32 / batch_frames_count as f32;\n\n for c in 0..channels_count {\n\n batch[f * channels_count + c] *= volume;\n\n }\n\n }\n\n }\n\n // fade-ins and cross-fades are rendered in the receive loop directly inside sample_buffer.\n", "file_path": "alvr/audio/src/lib.rs", "rank": 29, "score": 152997.22454900946 }, { "content": " enum class FormatFeatureFlagBits : VkFormatFeatureFlags\n\n {\n\n eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,\n\n eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,\n\n eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,\n\n eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,\n\n eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,\n\n eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,\n\n eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,\n\n eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,\n\n eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,\n\n eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,\n\n eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,\n\n eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,\n\n eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,\n\n eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,\n\n eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,\n\n eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,\n\n eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,\n\n eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 30, "score": 152430.37030454513 }, { "content": " enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags\n\n {\n\n eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,\n\n eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,\n\n eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,\n\n eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,\n\n eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,\n\n eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,\n\n eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,\n\n eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,\n\n ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case SubgroupFeatureFlagBits::eBasic : return \"Basic\";\n\n case SubgroupFeatureFlagBits::eVote : return \"Vote\";\n\n case SubgroupFeatureFlagBits::eArithmetic : return \"Arithmetic\";\n\n case SubgroupFeatureFlagBits::eBallot : return \"Ballot\";\n\n case SubgroupFeatureFlagBits::eShuffle : return \"Shuffle\";\n\n case SubgroupFeatureFlagBits::eShuffleRelative : return \"ShuffleRelative\";\n\n case SubgroupFeatureFlagBits::eClustered : return \"Clustered\";\n\n case SubgroupFeatureFlagBits::eQuad : return \"Quad\";\n\n case SubgroupFeatureFlagBits::ePartitionedNV : return \"PartitionedNV\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 31, "score": 152430.37030454513 }, { "content": "pub fn modal(\n\n ui: &mut Ui,\n\n title: &str,\n\n content: impl FnOnce(&mut Ui, f32), // arg 2: available width\n\n do_not_show_again: Option<&mut bool>,\n\n visible: &mut bool,\n\n t: &SharedTranslation,\n\n) -> Option<ModalResponse> {\n\n let mut response = None;\n\n if *visible {\n\n Window::new(title)\n\n .collapsible(false)\n\n .resizable(false)\n\n .default_width(200_f32)\n\n .anchor(Align2::CENTER_CENTER, (0_f32, 0_f32))\n\n .show(ui.ctx(), |ui| {\n\n ui.vertical_centered_justified(|ui| {\n\n ui.add_space(10_f32);\n\n content(ui, ui.available_width() - 8_f32); // extra offset to avoid window resizing. todo: find origin\n\n ui.add_space(10_f32);\n", "file_path": "alvr/experiments/gui/src/dashboard/basic_components/modal.rs", "rank": 32, "score": 150136.9599979623 }, { "content": " enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR\n\n {\n\n eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,\n\n eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,\n\n eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,\n\n eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,\n\n eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV};\n\n using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )\n\n {\n\n switch ( value )\n\n {\n\n case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return \"TriangleFacingCullDisable\";\n\n case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return \"TriangleFrontCounterclockwise\";\n\n case GeometryInstanceFlagBitsKHR::eForceOpaque : return \"ForceOpaque\";\n\n case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return \"ForceNoOpaque\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 34, "score": 147270.0403577539 }, { "content": " enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags\n\n {\n\n eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,\n\n eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT};\n\n using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalSemaphoreFeatureFlagBits::eExportable : return \"Exportable\";\n\n case ExternalSemaphoreFeatureFlagBits::eImportable : return \"Importable\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 35, "score": 147265.7034294076 }, { "content": " enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags\n\n {\n\n eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,\n\n eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT};\n\n using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalFenceFeatureFlagBits::eExportable : return \"Exportable\";\n\n case ExternalFenceFeatureFlagBits::eImportable : return \"Importable\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 36, "score": 147265.7034294076 }, { "content": " enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags\n\n {\n\n eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,\n\n eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,\n\n eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,\n\n eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT};\n\n using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case PeerMemoryFeatureFlagBits::eCopySrc : return \"CopySrc\";\n\n case PeerMemoryFeatureFlagBits::eCopyDst : return \"CopyDst\";\n\n case PeerMemoryFeatureFlagBits::eGenericSrc : return \"GenericSrc\";\n\n case PeerMemoryFeatureFlagBits::eGenericDst : return \"GenericDst\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 37, "score": 147265.7034294076 }, { "content": " enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags\n\n {\n\n eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,\n\n eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,\n\n eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT};\n\n using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return \"DedicatedOnly\";\n\n case ExternalMemoryFeatureFlagBits::eExportable : return \"Exportable\";\n\n case ExternalMemoryFeatureFlagBits::eImportable : return \"Importable\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 38, "score": 147265.7034294076 }, { "content": " enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR\n\n {\n\n eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,\n\n eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,\n\n eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )\n\n {\n\n switch ( value )\n\n {\n\n case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return \"SplitInstanceBindRegions\";\n\n case SwapchainCreateFlagBitsKHR::eProtected : return \"Protected\";\n\n case SwapchainCreateFlagBitsKHR::eMutableFormat : return \"MutableFormat\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 39, "score": 147251.67086973923 }, { "content": "pub fn is_steamvr_running() -> bool {\n\n let mut system = System::new_with_specifics(RefreshKind::new().with_processes());\n\n system.refresh_processes();\n\n\n\n !system\n\n .process_by_name(&afs::exec_fname(\"vrserver\"))\n\n .is_empty()\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 40, "score": 147047.76809689926 }, { "content": "// Use this when there is no way of determining the current path. The reulting Layout paths will\n\n// be invalid, expect for the ones that disregard the relative path (for example the config dir) and\n\n// the ones that have been overridden.\n\npub fn filesystem_layout_from_invalid() -> Layout {\n\n LAYOUT.clone().unwrap_or_else(|| Layout::new(Path::new(\"\")))\n\n}\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 42, "score": 144023.999955199 }, { "content": "pub fn check_steamvr_installation() -> bool {\n\n alvr_commands::openvr_source_file_path().is_ok()\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 43, "score": 144018.0365286107 }, { "content": "pub fn workspace_dir() -> PathBuf {\n\n Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"../..\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 44, "score": 144018.0365286107 }, { "content": "pub fn target_dir() -> PathBuf {\n\n Path::new(env!(\"OUT_DIR\")).join(\"../../../..\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 45, "score": 144018.0365286107 }, { "content": "pub fn build_dir() -> PathBuf {\n\n workspace_dir().join(\"build\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 46, "score": 144018.0365286107 }, { "content": "pub fn installer_path() -> PathBuf {\n\n env::temp_dir().join(exec_fname(\"alvr_installer\"))\n\n}\n\n\n\n// Layout of the ALVR installation. All paths are absolute\n\n#[derive(Clone)]\n\npub struct Layout {\n\n // directory containing the launcher executable\n\n pub executables_dir: PathBuf,\n\n // (linux only) directory where alvr_vulkan_layer.so is saved\n\n pub libraries_dir: PathBuf,\n\n // parent directory of resources like the dashboard and presets folders\n\n pub static_resources_dir: PathBuf,\n\n // directory for storing configuration files (session.json)\n\n pub config_dir: PathBuf,\n\n // directory for storing log\n\n pub log_dir: PathBuf,\n\n // directory to register in openVR driver path\n\n pub openvr_driver_root_dir: PathBuf,\n\n // (linux only) parent directory of the executable to wrap vrcompositor\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 47, "score": 144018.0365286107 }, { "content": "pub fn deps_dir() -> PathBuf {\n\n workspace_dir().join(\"deps\")\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 48, "score": 144018.0365286107 }, { "content": "pub fn date_utc_yyyymmdd() -> String {\n\n let output = if cfg!(windows) {\n\n Command::new(\"powershell\")\n\n .arg(\"(Get-Date).ToUniversalTime().ToString(\\\"yyyy.MM.dd\\\")\")\n\n .output()\n\n .unwrap()\n\n } else {\n\n Command::new(\"date\")\n\n .args(&[\"-u\", \"+%Y.%m.%d\"])\n\n .output()\n\n .unwrap()\n\n };\n\n\n\n String::from_utf8_lossy(&output.stdout)\n\n .as_ref()\n\n .to_owned()\n\n .replace('\\r', \"\")\n\n .replace('\\n', \"\")\n\n}\n", "file_path": "alvr/xtask/src/command.rs", "rank": 49, "score": 144018.0365286107 }, { "content": " enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV\n\n {\n\n eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,\n\n eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,\n\n eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV};\n\n\n\n VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )\n\n {\n\n switch ( value )\n\n {\n\n case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return \"DedicatedOnly\";\n\n case ExternalMemoryFeatureFlagBitsNV::eExportable : return \"Exportable\";\n\n case ExternalMemoryFeatureFlagBitsNV::eImportable : return \"Importable\";\n\n default: return \"invalid ( \" + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + \" )\";\n\n }\n\n }\n\n\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vulkan.hpp", "rank": 50, "score": 142447.17606224772 }, { "content": "pub fn create_setting_container(\n\n schema: SchemaNode,\n\n session_fragment: json::Value,\n\n trans_path: &str,\n\n trans: &TranslationBundle,\n\n) -> Box<dyn SettingContainer> {\n\n match schema {\n\n SchemaNode::Section { entries } => {\n\n Box::new(Section::new(entries, session_fragment, trans_path, trans))\n\n }\n\n SchemaNode::Choice { default, variants } => Box::new(ChoiceContainer::new(\n\n variants,\n\n session_fragment,\n\n trans_path,\n\n trans,\n\n )),\n\n SchemaNode::Optional {\n\n default_set,\n\n content,\n\n } => Box::new(EmptyContainer),\n", "file_path": "alvr/experiments/gui/src/dashboard/components/settings_controls/mod.rs", "rank": 51, "score": 142441.7884979411 }, { "content": "pub fn create_setting_control(\n\n schema: SchemaNode,\n\n session_fragment: json::Value,\n\n trans_path: &str,\n\n trans: &TranslationBundle,\n\n) -> Box<dyn SettingControl> {\n\n match schema {\n\n SchemaNode::Choice { default, variants } => Box::new(ChoiceControl::new(\n\n default,\n\n variants,\n\n session_fragment,\n\n trans_path,\n\n trans,\n\n )),\n\n SchemaNode::Optional {\n\n default_set,\n\n content,\n\n } => Box::new(EmptyControl),\n\n SchemaNode::Switch {\n\n default_enabled,\n", "file_path": "alvr/experiments/gui/src/dashboard/components/settings_controls/mod.rs", "rank": 52, "score": 142441.7884979411 }, { "content": "pub fn split_string(source: &str, start_pattern: &str, end: char) -> (String, String, String) {\n\n let start_idx = source.find(start_pattern).unwrap() + start_pattern.len();\n\n let end_idx = start_idx + source[start_idx..].find(end).unwrap();\n\n\n\n (\n\n source[..start_idx].to_owned(),\n\n source[start_idx..end_idx].to_owned(),\n\n source[end_idx..].to_owned(),\n\n )\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 53, "score": 142005.36995889703 }, { "content": "pub fn server_build_dir() -> PathBuf {\n\n let server_build_dir = if cfg!(windows) {\n\n \"alvr_server_windows\"\n\n } else if cfg!(target_os = \"linux\") {\n\n \"alvr_server_linux\"\n\n } else if cfg!(target_os = \"macos\") {\n\n \"alvr_server_macos\"\n\n } else {\n\n unimplemented!()\n\n };\n\n\n\n build_dir().join(server_build_dir)\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 54, "score": 141157.7719775635 }, { "content": "pub fn unblock_alvr_addon() -> StrResult {\n\n let config_path = alvr_commands::steam_config_dir()?.join(\"steamvr.vrsettings\");\n\n\n\n let mut fields_ref: json::Map<String, json::Value> = trace_err!(json::from_str(&trace_err!(\n\n fs::read_to_string(&config_path)\n\n )?))?;\n\n\n\n fields_ref.remove(\"driver_alvr_server\");\n\n\n\n trace_err!(fs::write(\n\n config_path,\n\n trace_err!(json::to_string_pretty(&fields_ref))?\n\n ))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 55, "score": 141157.7719775635 }, { "content": "pub fn session_settings_default() -> SettingsDefault {\n\n SettingsDefault {\n\n video: VideoDescDefault {\n\n adapter_index: 0,\n\n preferred_fps: 72.,\n\n render_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n\n },\n\n recommended_target_resolution: FrameSizeDefault {\n\n variant: FrameSizeDefaultVariant::Scale,\n\n Scale: 0.75,\n\n Absolute: FrameSizeAbsoluteDefault {\n\n width: 2880,\n\n height: 1600,\n\n },\n", "file_path": "alvr/session/src/settings.rs", "rank": 56, "score": 141157.7719775635 }, { "content": "fn align32(value: f32) -> u32 {\n\n ((value / 32.).floor() * 32.) as u32\n\n}\n\n\n", "file_path": "alvr/server/src/connection.rs", "rank": 57, "score": 140819.4571580209 }, { "content": "fn gui() -> impl Widget<View> {\n\n ViewSwitcher::new(\n\n |view: &View, _| view.clone(),\n\n |view, _, _| match view {\n\n View::RequirementsCheck { steamvr } => Box::new(\n\n Flex::row()\n\n .with_default_spacer()\n\n .with_flex_child(\n\n Flex::column()\n\n .cross_axis_alignment(CrossAxisAlignment::Start)\n\n .with_flex_spacer(1.0)\n\n .with_child(\n\n Label::new(steamvr.clone())\n\n .with_line_break_mode(LineBreaking::WordWrap),\n\n )\n\n .with_default_spacer()\n\n .with_flex_spacer(1.5),\n\n FlexParams::new(1.0, None),\n\n )\n\n .with_default_spacer(),\n", "file_path": "alvr/launcher/src/main.rs", "rank": 58, "score": 140783.66042652068 }, { "content": "pub fn run_as_shell_in(workdir: &Path, shell: &str, shell_flag: &str, cmd: &str) -> StrResult {\n\n println!(\"\\n> {}\", cmd);\n\n\n\n let output = trace_err!(trace_err!(Command::new(shell)\n\n .args(&[shell_flag, cmd])\n\n .stdout(Stdio::inherit())\n\n .current_dir(workdir)\n\n .spawn())?\n\n .wait_with_output())?;\n\n\n\n if output.status.success() {\n\n Ok(())\n\n } else {\n\n fmt_e!(\n\n \"Command failed: {}\",\n\n String::from_utf8_lossy(&output.stderr)\n\n )\n\n }\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 59, "score": 139097.15657065104 }, { "content": "pub fn maybe_register_alvr_driver() -> StrResult {\n\n let alvr_driver_dir = afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap())\n\n .openvr_driver_root_dir;\n\n\n\n let driver_registered = alvr_commands::get_driver_dir_from_registered()\n\n .ok()\n\n .filter(|dir| *dir == alvr_driver_dir)\n\n .is_some();\n\n\n\n if !driver_registered {\n\n let paths_backup = match alvr_commands::get_registered_drivers() {\n\n Ok(paths) => paths,\n\n Err(e) => {\n\n return fmt_e!(\n\n \"{}\\n{}\\n\\n({})\",\n\n \"Failed to load registered drivers.\",\n\n \"Please reset the drivers installation with the apposite button on the launcher.\",\n\n e\n\n )\n\n }\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 60, "score": 138453.14266082377 }, { "content": "pub fn tabs<R>(\n\n ui: &mut Ui,\n\n tabs: &[LocalizedId],\n\n selected_tab: &mut String,\n\n content: impl FnOnce(&mut Ui) -> R,\n\n right_slot: impl FnOnce(&mut Ui),\n\n) -> R {\n\n ui.with_layout(\n\n Layout::top_down(Align::LEFT).with_cross_justify(true),\n\n |ui| {\n\n ui.with_layout(Layout::left_to_right().with_cross_align(Align::TOP), |ui| {\n\n for id in tabs {\n\n ui.selectable_value(selected_tab, (**id).clone(), &id.trans);\n\n }\n\n\n\n ui.with_layout(\n\n Layout::right_to_left().with_cross_align(Align::TOP),\n\n right_slot,\n\n );\n\n });\n\n\n\n ui.separator();\n\n\n\n content(ui)\n\n },\n\n )\n\n .inner\n\n}\n", "file_path": "alvr/experiments/gui/src/dashboard/basic_components/tabs.rs", "rank": 61, "score": 138453.14266082377 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn maybe_wrap_vrcompositor_launcher() -> StrResult {\n\n let steamvr_bin_dir = alvr_commands::steamvr_root_dir()?\n\n .join(\"bin\")\n\n .join(\"linux64\");\n\n let real_launcher_path = steamvr_bin_dir.join(\"vrcompositor.real\");\n\n let launcher_path = steamvr_bin_dir.join(\"vrcompositor\");\n\n\n\n // In case of SteamVR update, vrcompositor will be restored\n\n match fs::read_link(&launcher_path) {\n\n Err(_) => match fs::metadata(&launcher_path) {\n\n Err(_) => (), //file does not exist, do nothing\n\n Ok(_) => {\n\n trace_err!(fs::rename(&launcher_path, &real_launcher_path))?;\n\n }\n\n },\n\n Ok(_) => trace_err!(fs::remove_file(&launcher_path))?, // recreate the link\n\n };\n\n\n\n trace_err!(std::os::unix::fs::symlink(\n\n afs::filesystem_layout_from_launcher_exe(&env::current_exe().unwrap())\n\n .vrcompositor_wrapper(),\n\n &launcher_path\n\n ))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 62, "score": 138453.14266082377 }, { "content": "#[cfg(not(windows))]\n\nfn spawn_no_window(command: &mut Command) {\n\n command.spawn().ok();\n\n}\n\n\n", "file_path": "alvr/launcher/src/commands.rs", "rank": 63, "score": 137788.79075615987 }, { "content": "fn mbits_to_bytes(value: u64) -> u32 {\n\n (value * 1024 * 1024 / 8) as u32\n\n}\n\n\n", "file_path": "alvr/server/src/connection.rs", "rank": 64, "score": 137788.79075615987 }, { "content": "pub fn publish_client(is_nightly: bool) {\n\n build_client(!is_nightly, is_nightly, false);\n\n build_client(!is_nightly, is_nightly, true);\n\n}\n\n\n", "file_path": "alvr/xtask/src/main.rs", "rank": 65, "score": 137739.6644307641 }, { "content": "pub fn log_event(id: Event) {\n\n log::info!(\"#{}#\", serde_json::to_string(&id).unwrap());\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! fmt_e {\n\n ($($args:tt)+) => {\n\n Err(format!($($args)+))\n\n };\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! trace_str {\n\n () => {\n\n format!(\"At {}:{}\", file!(), line!())\n\n };\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! trace_err {\n", "file_path": "alvr/common/src/logging.rs", "rank": 66, "score": 137739.6644307641 }, { "content": "pub fn build_deps(target_os: &str) {\n\n if target_os == \"windows\" {\n\n command::run(\"cargo install wasm-pack\").unwrap();\n\n } else if target_os == \"android\" {\n\n command::run(\"rustup target add aarch64-linux-android\").unwrap();\n\n build_rust_android_gradle();\n\n } else {\n\n println!(\"Nothing to do for {}!\", target_os)\n\n }\n\n}\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 67, "score": 134879.39987971686 }, { "content": " uint32_t version;\n", "file_path": "alvr/server/cpp/platform/win32/amf/include/core/vulkan/vk_layer.h", "rank": 68, "score": 133935.14516548076 }, { "content": "fn bump_client_gradle_version(new_version: &str, is_nightly: bool) {\n\n let gradle_file_path = afs::workspace_dir()\n\n .join(\"alvr/client/android/app\")\n\n .join(\"build.gradle\");\n\n let file_content = fs::read_to_string(&gradle_file_path).unwrap();\n\n\n\n // Replace versionName\n\n let (file_start, _, file_end) = split_string(&file_content, \"versionName \\\"\", '\\\"');\n\n let file_content = format!(\"{}{}{}\", file_start, new_version, file_end);\n\n\n\n let file_content = if !is_nightly {\n\n // Replace versionCode\n\n let (file_start, old_version_code_string, file_end) =\n\n split_string(&file_content, \"versionCode \", '\\n');\n\n format!(\n\n \"{}{}{}\",\n\n file_start,\n\n old_version_code_string.parse::<usize>().unwrap() + 1,\n\n file_end\n\n )\n\n } else {\n\n file_content\n\n };\n\n\n\n fs::write(gradle_file_path, file_content).unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 69, "score": 132099.03405772668 }, { "content": "fn bump_rpm_spec_version(new_version: &str, is_nightly: bool) {\n\n let spec_path = afs::workspace_dir().join(\"packaging/rpm/alvr.spec\");\n\n let spec = fs::read_to_string(&spec_path).unwrap();\n\n\n\n // If there's a '-', split the version around it\n\n let (version_start, version_end) = {\n\n if new_version.contains('-') {\n\n let (_, tmp_start, mut tmp_end) = split_string(new_version, \"\", '-');\n\n tmp_end.remove(0);\n\n (\n\n tmp_start,\n\n if is_nightly {\n\n format!(\"0.0.1{}+nightly.{}\", tmp_end, date_utc_yyyymmdd())\n\n } else {\n\n format!(\"0.0.1{}\", tmp_end)\n\n },\n\n )\n\n } else {\n\n (new_version.to_string(), \"1.0.0\".to_string())\n\n }\n", "file_path": "alvr/xtask/src/version.rs", "rank": 70, "score": 132099.03405772668 }, { "content": "fn packages_dir() -> PathBuf {\n\n Path::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .parent()\n\n .unwrap()\n\n .into()\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 71, "score": 130668.83097815543 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn do_ffmpeg_pkg_config(build: &mut cc::Build) {\n\n let ffmpeg_path = env::var(\"CARGO_MANIFEST_DIR\").unwrap() + \"/../../deps/linux/FFmpeg-n4.4/\";\n\n\n\n #[cfg(feature = \"bundled_ffmpeg\")]\n\n {\n\n for lib in vec![\"libavutil\", \"libavfilter\", \"libavcodec\", \"libswscale\"] {\n\n let path = ffmpeg_path.clone() + lib;\n\n env::set_var(\n\n \"PKG_CONFIG_PATH\",\n\n env::var(\"PKG_CONFIG_PATH\").map_or(path.clone(), |old| format!(\"{}:{}\", path, old)),\n\n );\n\n }\n\n }\n\n\n\n let pkg = pkg_config::Config::new()\n\n .cargo_metadata(cfg!(not(feature = \"bundled_ffmpeg\")))\n\n .to_owned();\n\n let avutil = pkg.probe(\"libavutil\").unwrap();\n\n let avfilter = pkg.probe(\"libavfilter\").unwrap();\n\n let avcodec = pkg.probe(\"libavcodec\").unwrap();\n", "file_path": "alvr/server/build.rs", "rank": 72, "score": 130288.54192611459 }, { "content": "pub fn zip(source: &Path) -> StrResult {\n\n if cfg!(windows) {\n\n run_without_shell(\n\n \"powershell\",\n\n &[\n\n \"Compress-Archive\",\n\n &source.to_string_lossy(),\n\n &format!(\"{}.zip\", source.to_string_lossy()),\n\n ],\n\n )\n\n } else {\n\n run_without_shell(\n\n \"zip\",\n\n &[\n\n \"-r\",\n\n &format!(\"{}.zip\", source.to_string_lossy()),\n\n &source.to_string_lossy(),\n\n ],\n\n )\n\n }\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 73, "score": 130242.59511515737 }, { "content": "#[cfg(windows)]\n\npub fn exec_fname(name: &str) -> String {\n\n format!(\"{}.exe\", name)\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 74, "score": 130242.59511515737 }, { "content": "pub fn run(cmd: &str) -> StrResult {\n\n run_in(&env::current_dir().unwrap(), cmd)\n\n}\n\n\n", "file_path": "alvr/xtask/src/command.rs", "rank": 75, "score": 130242.59511515737 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn dynlib_fname(name: &str) -> String {\n\n format!(\"lib{}.dylib\", name)\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 76, "score": 130242.59511515737 }, { "content": "fn bump_cargo_version(crate_dir_name: &str, new_version: &str) {\n\n let manifest_path = packages_dir().join(crate_dir_name).join(\"Cargo.toml\");\n\n\n\n let manifest = fs::read_to_string(&manifest_path).unwrap();\n\n\n\n let (file_start, _, file_end) = split_string(&manifest, \"version = \\\"\", '\\\"');\n\n let manifest = format!(\"{}{}{}\", file_start, new_version, file_end);\n\n\n\n fs::write(manifest_path, manifest).unwrap();\n\n}\n\n\n", "file_path": "alvr/xtask/src/version.rs", "rank": 77, "score": 130017.4058554035 }, { "content": "pub fn steam_config_dir() -> StrResult<PathBuf> {\n\n get_single_openvr_path(\"config\")\n\n}\n", "file_path": "alvr/commands/src/openvrpaths.rs", "rank": 78, "score": 129613.41008397835 }, { "content": "pub fn steamvr_root_dir() -> StrResult<PathBuf> {\n\n get_single_openvr_path(\"runtime\")\n\n}\n\n\n", "file_path": "alvr/commands/src/openvrpaths.rs", "rank": 79, "score": 129613.41008397835 }, { "content": "pub fn show_e<E: Display>(e: E) {\n\n show_e_block(e, false);\n\n}\n\n\n", "file_path": "alvr/common/src/logging.rs", "rank": 80, "score": 129538.24256519851 }, { "content": "pub fn show_w<W: Display>(w: W) {\n\n log::warn!(\"{}\", w);\n\n\n\n // GDK crashes because of initialization in multiple thread\n\n #[cfg(windows)]\n\n std::thread::spawn({\n\n let warn_string = w.to_string();\n\n move || {\n\n msgbox::create(\n\n \"ALVR encountered a non-fatal error\",\n\n &warn_string,\n\n msgbox::IconType::Info,\n\n )\n\n .ok();\n\n }\n\n });\n\n}\n\n\n", "file_path": "alvr/common/src/logging.rs", "rank": 81, "score": 129538.24256519851 }, { "content": "#[proc_macro_derive(SettingsSchema, attributes(schema))]\n\npub fn create_settings_schema_fn_and_default_ty(input: TokenStream) -> TokenStream {\n\n let input = syn::parse_macro_input!(input as DeriveInput);\n\n\n\n match schema(input) {\n\n Ok(tokens) => tokens.into(),\n\n Err(e) => e,\n\n }\n\n}\n", "file_path": "alvr/settings-schema-derive/src/lib.rs", "rank": 82, "score": 128748.52260943047 }, { "content": "pub fn set_game_output_audio_device_id(id: String) {\n\n set_property(\n\n vr::k_unTrackedDeviceIndex_Hmd,\n\n vr::Prop_Audio_DefaultPlaybackDeviceId_String,\n\n OpenvrPropValue::String(id),\n\n );\n\n}\n\n\n", "file_path": "alvr/server/src/openvr.rs", "rank": 83, "score": 127184.22837969572 }, { "content": "pub fn get_driver_dir() -> StrResult<PathBuf> {\n\n get_driver_dir_from_registered()\n\n .map_err(|e| format!(\"ALVR driver path not stored and not registered ({})\", e))\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 84, "score": 127184.22837969572 }, { "content": "pub fn openvr_source_file_path() -> StrResult<PathBuf> {\n\n let path = trace_none!(if cfg!(windows) {\n\n dirs::cache_dir()\n\n } else {\n\n dirs::config_dir()\n\n })?\n\n .join(\"openvr/openvrpaths.vrpath\");\n\n\n\n if path.exists() {\n\n Ok(path)\n\n } else {\n\n fmt_e!(\"{} does not exist\", path.to_string_lossy())\n\n }\n\n}\n\n\n\npub(crate) fn load_openvr_paths_json() -> StrResult<json::Value> {\n\n let file = trace_err!(File::open(openvr_source_file_path()?))?;\n\n\n\n let mut file_content_decoded = String::new();\n\n trace_err!(DecodeReaderBytes::new(&file).read_to_string(&mut file_content_decoded))?;\n", "file_path": "alvr/commands/src/openvrpaths.rs", "rank": 85, "score": 127184.22837969572 }, { "content": "pub fn set_headset_microphone_audio_device_id(id: String) {\n\n set_property(\n\n vr::k_unTrackedDeviceIndex_Hmd,\n\n vr::Prop_Audio_DefaultRecordingDeviceId_String,\n\n OpenvrPropValue::String(id),\n\n );\n\n}\n", "file_path": "alvr/server/src/openvr.rs", "rank": 86, "score": 127184.22837969572 }, { "content": "pub fn get_devices_list() -> StrResult<AudioDevicesList> {\n\n let host = cpal::default_host();\n\n\n\n let output = trace_err!(host.output_devices())?\n\n .filter_map(|d| d.name().ok())\n\n .collect::<Vec<_>>();\n\n let input = trace_err!(host.input_devices())?\n\n .filter_map(|d| d.name().ok())\n\n .collect::<Vec<_>>();\n\n\n\n Ok(AudioDevicesList { output, input })\n\n}\n\n\n\npub enum AudioDeviceType {\n\n Output,\n\n Input,\n\n\n\n // for the virtual microphone devices, input and output labels are swapped\n\n VirtualMicrophoneInput,\n\n VirtualMicrophoneOutput { matching_input_device_name: String },\n", "file_path": "alvr/audio/src/lib.rs", "rank": 87, "score": 127184.22837969572 }, { "content": "pub fn show_e_blocking<E: Display>(e: E) {\n\n show_e_block(e, true);\n\n}\n\n\n", "file_path": "alvr/common/src/logging.rs", "rank": 88, "score": 126677.9780141513 }, { "content": "// The path should include the executable file name\n\n// The path argument is used only if ALVR is built as portable\n\npub fn filesystem_layout_from_launcher_exe(path: &Path) -> Layout {\n\n LAYOUT.clone().unwrap_or_else(|| {\n\n let root = if cfg!(any(windows, target_os = \"macos\")) {\n\n path.parent().unwrap().to_owned()\n\n } else if cfg!(target_os = \"linux\") {\n\n // FHS path is expected\n\n path.parent().unwrap().parent().unwrap().to_owned()\n\n } else {\n\n unimplemented!()\n\n };\n\n\n\n Layout::new(&root)\n\n })\n\n}\n\n\n", "file_path": "alvr/filesystem/src/lib.rs", "rank": 89, "score": 124983.00598283297 }, { "content": "pub fn restart_steamvr(launcher_path: &Path) -> StrResult {\n\n invoke_launcher(launcher_path, \"--restart-steamvr\")\n\n}\n\n\n", "file_path": "alvr/commands/src/launcher.rs", "rank": 90, "score": 124976.60531941884 }, { "content": "pub fn build_ffmpeg_linux() -> std::path::PathBuf {\n\n // dependencies: build-essential pkg-config nasm libva-dev libdrm-dev libvulkan-dev libx264-dev libx265-dev\n\n\n\n let download_path = afs::deps_dir().join(\"linux\");\n\n let ffmpeg_path = download_path.join(\"FFmpeg-n4.4\");\n\n if !ffmpeg_path.exists() {\n\n download_and_extract_zip(\n\n \"https://codeload.github.com/FFmpeg/FFmpeg/zip/n4.4\",\n\n &download_path,\n\n );\n\n }\n\n\n\n bash_in(\n\n &ffmpeg_path,\n\n &format!(\n\n \"./configure {} {} {} {} {} {} {} {} {} {}\",\n\n \"--enable-gpl --enable-version3\",\n\n \"--disable-static --enable-shared\",\n\n \"--disable-programs\",\n\n \"--disable-doc\",\n", "file_path": "alvr/xtask/src/dependencies.rs", "rank": 91, "score": 124976.60531941884 }, { "content": "pub fn get_driver_dir_from_registered() -> StrResult<PathBuf> {\n\n for dir in get_registered_drivers()? {\n\n let maybe_driver_name = || -> StrResult<_> {\n\n let manifest_string =\n\n trace_err!(fs::read_to_string(dir.join(\"driver.vrdrivermanifest\")))?;\n\n let mut manifest_map = trace_err!(json::from_str::<HashMap<String, json::Value>>(\n\n &manifest_string\n\n ))?;\n\n\n\n trace_none!(manifest_map.remove(\"name\"))\n\n }();\n\n\n\n if maybe_driver_name == Ok(json::Value::String(\"alvr_server\".to_owned())) {\n\n return Ok(dir);\n\n }\n\n }\n\n fmt_e!(\"ALVR driver path not registered\")\n\n}\n\n\n", "file_path": "alvr/commands/src/openvr_drivers.rs", "rank": 92, "score": 124877.2511997872 }, { "content": "pub fn map_fragment<T: Serialize>(\n\n res: Option<SettingsResponse>,\n\n map: impl FnOnce(json::Value) -> T,\n\n) -> Option<SettingsResponse> {\n\n match res {\n\n Some(SettingsResponse::SessionFragment(fragment)) => {\n\n Some(super::into_fragment(map(fragment)))\n\n }\n\n res => res,\n\n }\n\n}\n\n\n\npub struct SettingsContext {\n\n pub advanced: bool,\n\n pub view_width: f32,\n\n pub t: Arc<SharedTranslation>,\n\n}\n\n\n", "file_path": "alvr/experiments/gui/src/dashboard/components/settings_controls/mod.rs", "rank": 93, "score": 124877.2511997872 }, { "content": "// Errors:\n\n// 1: firewall rule is already set\n\n// 126: pkexec request dismissed\n\n// other: command failed\n\npub fn firewall_rules(add: bool) -> Result<(), i32> {\n\n let exit_status;\n\n\n\n if cfg!(target_os = \"linux\") {\n\n let action = if add { \"add\" } else { \"remove\" };\n\n // run as normal user since we use pkexec to sudo\n\n exit_status = Command::new(\"bash\")\n\n .arg(\"/usr/libexec/alvr/alvr_fw_config.sh\")\n\n .arg(action)\n\n .status()\n\n .map_err(|_| -1)?;\n\n } else {\n\n let script_path = env::temp_dir().join(\"alvr_firewall_rules.bat\");\n\n let firewall_rules_script_content = if add {\n\n format!(\n\n \"{}\\n{}\",\n\n netsh_add_rule_command_string(\n\n \"SteamVR ALVR vrserver\",\n\n &crate::steamvr_root_dir()\n\n .map_err(|_| -1)?\n", "file_path": "alvr/commands/src/firewall.rs", "rank": 94, "score": 123973.34869741155 } ]
Rust
gtk/src/auto/text_tag_table.rs
pop-os/gtk-rs
0a0e50a2f5ea8f816c005bd8c3d145a5a9581d8c
use crate::Buildable; use crate::TextTag; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct TextTagTable(Object<ffi::GtkTextTagTable, ffi::GtkTextTagTableClass>) @implements Buildable; match fn { get_type => || ffi::gtk_text_tag_table_get_type(), } } impl TextTagTable { pub fn new() -> TextTagTable { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::gtk_text_tag_table_new()) } } } impl Default for TextTagTable { fn default() -> Self { Self::new() } } pub const NONE_TEXT_TAG_TABLE: Option<&TextTagTable> = None; pub trait TextTagTableExt: 'static { fn add<P: IsA<TextTag>>(&self, tag: &P) -> bool; fn foreach<P: FnMut(&TextTag)>(&self, func: P); fn get_size(&self) -> i32; fn lookup(&self, name: &str) -> Option<TextTag>; fn remove<P: IsA<TextTag>>(&self, tag: &P); fn connect_tag_added<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_tag_changed<F: Fn(&Self, &TextTag, bool) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_tag_removed<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<TextTagTable>> TextTagTableExt for O { fn add<P: IsA<TextTag>>(&self, tag: &P) -> bool { unsafe { from_glib(ffi::gtk_text_tag_table_add( self.as_ref().to_glib_none().0, tag.as_ref().to_glib_none().0, )) } } fn foreach<P: FnMut(&TextTag)>(&self, func: P) { let func_data: P = func; unsafe extern "C" fn func_func<P: FnMut(&TextTag)>( tag: *mut ffi::GtkTextTag, data: glib::ffi::gpointer, ) { let tag = from_glib_borrow(tag); let callback: *mut P = data as *const _ as usize as *mut P; (*callback)(&tag); } let func = Some(func_func::<P> as _); let super_callback0: &P = &func_data; unsafe { ffi::gtk_text_tag_table_foreach( self.as_ref().to_glib_none().0, func, super_callback0 as *const _ as usize as *mut _, ); } } fn get_size(&self) -> i32 { unsafe { ffi::gtk_text_tag_table_get_size(self.as_ref().to_glib_none().0) } } fn lookup(&self, name: &str) -> Option<TextTag> { unsafe { from_glib_none(ffi::gtk_text_tag_table_lookup( self.as_ref().to_glib_none().0, name.to_glib_none().0, )) } } fn remove<P: IsA<TextTag>>(&self, tag: &P) { unsafe { ffi::gtk_text_tag_table_remove( self.as_ref().to_glib_none().0, tag.as_ref().to_glib_none().0, ); } } fn connect_tag_added<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn tag_added_trampoline<P, F: Fn(&P, &TextTag) + 'static>( this: *mut ffi::GtkTextTagTable, tag: *mut ffi::GtkTextTag, f: glib::ffi::gpointer, ) where P: IsA<TextTagTable>, { let f: &F = &*(f as *const F); f( &TextTagTable::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(tag), ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"tag-added\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( tag_added_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_tag_changed<F: Fn(&Self, &TextTag, bool) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn tag_changed_trampoline<P, F: Fn(&P, &TextTag, bool) + 'static>( this: *mut ffi::GtkTextTagTable, tag: *mut ffi::GtkTextTag, size_changed: glib::ffi::gboolean, f: glib::ffi::gpointer, ) where P: IsA<TextTagTable>, { let f: &F = &*(f as *const F); f( &TextTagTable::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(tag), from_glib(size_changed), ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"tag-changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( tag_changed_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_tag_removed<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn tag_removed_trampoline<P, F: Fn(&P, &TextTag) + 'static>( this: *mut ffi::GtkTextTagTable, tag: *mut ffi::GtkTextTag, f: glib::ffi::gpointer, ) where P: IsA<TextTagTable>, { let f: &F = &*(f as *const F); f( &TextTagTable::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(tag), ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"tag-removed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( tag_removed_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for TextTagTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("TextTagTable") } }
use crate::Buildable; use crate::TextTag; use glib::object::Cast; use glib::object::IsA; use glib::signal::connect_raw; use glib::signal::SignalHandlerId; use glib::translate::*; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; glib::wrapper! { pub struct TextTagTable(Object<ffi::GtkTextTagTable, ffi::GtkTextTagTableClass>) @implements Buildable; match fn { get_type => || ffi::gtk_text_tag_table_get_type(), } } impl TextTagTable { pub fn new() -> TextTagTable { assert_initialized_main_thread!(); unsafe { from_glib_full(ffi::gtk_text_tag_table_new()) } } } impl Default for TextTagTable { fn default() -> Self { Self::new() } } pub const NONE_TEXT_TAG_TABLE: Option<&TextTagTable> = None; pub trait TextTagTableExt: 'static { fn add<P: IsA<TextTag>>(&self, tag: &P) -> bool; fn foreach<P: FnMut(&TextTag)>(&self, func: P); fn get_size(&self) -> i32; fn lookup(&self, name: &str) -> Option<TextTag>; fn remove<P: IsA<TextTag>>(&self, tag: &P); fn connect_tag_added<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_tag_changed<F: Fn(&Self, &TextTag, bool) + 'static>(&self, f: F) -> SignalHandlerId; fn connect_tag_removed<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<TextTagTable>> TextTagTableExt for O { fn add<P: IsA<TextTag>>(&self, tag: &P) -> bool { unsafe { from_glib(ffi::gtk_text_tag_table_add( self.as_ref().to_glib_none().0, tag.as_ref().to_glib_none().0, )) } } fn foreach<P: FnMut(&TextTag)>(&self, func: P) { let func_data: P = func; unsafe extern "C" fn func_func<P: FnMut(&TextTag)>( tag: *mut ffi::GtkTextTag, data: glib::ffi::gpointer, ) { let tag = from_glib_borrow(tag); let callback: *mut P = data as *const _ as usize as *mut P; (*callback)(&tag); } let func = Some(func_func::<P> as _); let super_callback0: &P = &func_data; unsafe { ffi::gtk_text_tag_table_foreach( self.as_ref().to_glib_none().0, func, super_callback0 as *const _ as usize as *mut _, ); } } fn get_size(&self) -> i32 { unsafe { ffi::gtk_text_tag_table_get_size(self.as_ref().to_glib_none().0) } } fn lookup(&self, name: &str) -> Option<TextTag> { unsafe { from_glib_none(ffi::gtk_text_tag_table_lookup( self.as_ref().to_glib_none().0, name.to_glib_none().0, )) } } fn remove<P: IsA<TextTag>>(&self, tag: &P) { unsafe { ffi::gtk_text_tag_table_remove( self.as_ref().to_glib_none().0, tag.as_ref().to_glib_none().0, ); } } fn connect_tag_added<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn tag_added_trampoline<P, F: Fn(&P, &TextTag) + 'static>( this: *mut ffi::GtkTextTagTable, tag: *mut ffi::GtkTextTag, f: glib::ffi::gpointer, ) where P: IsA<TextTagTable>, { let f: &F = &*(f as *const F); f( &TextTagTable::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(tag), ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"tag-added\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( tag_added_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } fn connect_tag_changed<F: Fn(&Self, &TextTag, bool) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn tag_changed_trampoline<P, F: Fn(&P, &TextTag, bool) + 'static>( this: *mut ffi::GtkTextTagTable, tag: *mut ffi::GtkTextTag, size_changed: glib::ffi::gboolean, f: glib::ffi::gpointer, ) where P: IsA<TextTagTable>, {
fn connect_tag_removed<F: Fn(&Self, &TextTag) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn tag_removed_trampoline<P, F: Fn(&P, &TextTag) + 'static>( this: *mut ffi::GtkTextTagTable, tag: *mut ffi::GtkTextTag, f: glib::ffi::gpointer, ) where P: IsA<TextTagTable>, { let f: &F = &*(f as *const F); f( &TextTagTable::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(tag), ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"tag-removed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( tag_removed_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } } } impl fmt::Display for TextTagTable { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str("TextTagTable") } }
let f: &F = &*(f as *const F); f( &TextTagTable::from_glib_borrow(this).unsafe_cast_ref(), &from_glib_borrow(tag), from_glib(size_changed), ) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw( self.as_ptr() as *mut _, b"tag-changed\0".as_ptr() as *const _, Some(transmute::<_, unsafe extern "C" fn()>( tag_changed_trampoline::<Self, F> as *const (), )), Box_::into_raw(f), ) } }
function_block-function_prefix_line
[ { "content": "/// Adds a closure to be called by the main loop the return `Source` is attached to when it's idle.\n\n///\n\n/// `func` will be called repeatedly until it returns `Continue(false)`.\n\npub fn idle_source_new<F>(name: Option<&str>, priority: Priority, func: F) -> Source\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n let source = ffi::g_idle_source_new();\n\n ffi::g_source_set_callback(\n\n source,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n );\n\n ffi::g_source_set_priority(source, priority.to_glib());\n\n\n\n if let Some(name) = name {\n\n ffi::g_source_set_name(source, name.to_glib_none().0);\n\n }\n\n\n\n from_glib_full(source)\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 0, "score": 467387.20000380033 }, { "content": "/// To set back the default print handler, use the [`unset_printerr_handler`] function.\n\npub fn set_printerr_handler<P: Fn(&str) + Send + Sync + 'static>(func: P) {\n\n unsafe extern \"C\" fn func_func(string: *const libc::c_char) {\n\n if let Some(callback) = match *PRINTERR_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock PRINTERR_HANDLER\")\n\n {\n\n Some(ref handler) => Some(Arc::clone(handler)),\n\n None => None,\n\n } {\n\n let string: Borrowed<GString> = from_glib_borrow(string);\n\n (*callback)(string.as_str())\n\n }\n\n }\n\n *PRINTERR_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock PRINTERR_HANDLER to change callback\") = Some(Arc::new(func));\n\n unsafe { ffi::g_set_printerr_handler(Some(func_func as _)) };\n\n}\n\n\n", "file_path": "glib/src/log.rs", "rank": 1, "score": 464308.56255107734 }, { "content": "/// To set back the default print handler, use the [`unset_print_handler`] function.\n\npub fn set_print_handler<P: Fn(&str) + Send + Sync + 'static>(func: P) {\n\n unsafe extern \"C\" fn func_func(string: *const libc::c_char) {\n\n if let Some(callback) = match *PRINT_HANDLER.lock().expect(\"Failed to lock PRINT_HANDLER\") {\n\n Some(ref handler) => Some(Arc::clone(handler)),\n\n None => None,\n\n } {\n\n let string: Borrowed<GString> = from_glib_borrow(string);\n\n (*callback)(string.as_str())\n\n }\n\n }\n\n *PRINT_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock PRINT_HANDLER to change callback\") = Some(Arc::new(func));\n\n unsafe { ffi::g_set_print_handler(Some(func_func as _)) };\n\n}\n\n\n", "file_path": "glib/src/log.rs", "rank": 2, "score": 464308.5625510774 }, { "content": "fn into_raw_child_watch<F: FnMut(Pid, i32) + 'static>(func: F) -> gpointer {\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n\nunsafe extern \"C\" fn trampoline_unix_fd<F: FnMut(RawFd, IOCondition) -> Continue + 'static>(\n\n fd: i32,\n\n condition: ffi::GIOCondition,\n\n func: gpointer,\n\n) -> gboolean {\n\n let func: &RefCell<F> = &*(func as *const RefCell<F>);\n\n (&mut *func.borrow_mut())(fd, from_glib(condition)).to_glib()\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n\nunsafe extern \"C\" fn destroy_closure_unix_fd<F: FnMut(RawFd, IOCondition) -> Continue + 'static>(\n\n ptr: gpointer,\n\n) {\n\n Box::<RefCell<F>>::from_raw(ptr as *mut _);\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 3, "score": 438675.6042279372 }, { "content": "pub fn is_canonical_pspec_name(name: &str) -> bool {\n\n name.as_bytes().iter().enumerate().all(|(i, c)| {\n\n i != 0 && (*c >= b'0' && *c <= b'9' || *c == b'-')\n\n || (*c >= b'A' && *c <= b'Z')\n\n || (*c >= b'a' && *c <= b'z')\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::env;\n\n use std::sync::Mutex;\n\n\n\n //Mutex to prevent run environment tests parallel\n\n static LOCK: once_cell::sync::Lazy<Mutex<()>> = once_cell::sync::Lazy::new(|| Mutex::new(()));\n\n\n\n const VAR_NAME: &str = \"function_environment_test\";\n\n\n\n fn check_getenv(val: &str) {\n\n let _data = LOCK.lock().unwrap();\n", "file_path": "glib/src/utils.rs", "rank": 4, "score": 429251.1223997088 }, { "content": "/// To set back the default print handler, use the [`log_unset_default_handler`] function.\n\npub fn log_set_default_handler<P: Fn(Option<&str>, LogLevel, &str) + Send + Sync + 'static>(\n\n log_func: P,\n\n) {\n\n unsafe extern \"C\" fn func_func(\n\n log_domain: *const libc::c_char,\n\n log_levels: ffi::GLogLevelFlags,\n\n message: *const libc::c_char,\n\n _user_data: ffi::gpointer,\n\n ) {\n\n if let Some(callback) = match *DEFAULT_HANDLER\n\n .lock()\n\n .expect(\"Failed to lock DEFAULT_HANDLER\")\n\n {\n\n Some(ref handler) => Some(Arc::clone(handler)),\n\n None => None,\n\n } {\n\n let log_domain: Borrowed<Option<GString>> = from_glib_borrow(log_domain);\n\n let message: Borrowed<GString> = from_glib_borrow(message);\n\n (*callback)(\n\n (*log_domain).as_deref(),\n", "file_path": "glib/src/log.rs", "rank": 5, "score": 417865.09301895904 }, { "content": "fn into_raw<F: FnMut() -> Continue + 'static>(func: F) -> gpointer {\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nunsafe extern \"C\" fn trampoline_child_watch<F: FnMut(Pid, i32) + 'static>(\n\n pid: ffi::GPid,\n\n status: i32,\n\n func: gpointer,\n\n) {\n\n let func: &RefCell<F> = &*(func as *const RefCell<F>);\n\n (&mut *func.borrow_mut())(Pid(pid), status)\n\n}\n\n\n\nunsafe extern \"C\" fn destroy_closure_child_watch<F: FnMut(Pid, i32) + 'static>(ptr: gpointer) {\n\n Box::<RefCell<F>>::from_raw(ptr as *mut _);\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 6, "score": 392077.0130692315 }, { "content": "/// Adds a closure to be called by the default main loop whenever a UNIX signal is raised.\n\n///\n\n/// `func` will be called repeatedly every time `signum` is raised until it\n\n/// returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// In comparison to `unix_signal_add()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn unix_signal_add_once<F>(signum: i32, func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + Send + 'static,\n\n{\n\n unix_signal_add(signum, fnmut_callback_wrapper(func))\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n", "file_path": "glib/src/source.rs", "rank": 7, "score": 388204.7674042063 }, { "content": "/// Adds a closure to be called by the default main loop whenever a UNIX signal is raised.\n\n///\n\n/// `func` will be called repeatedly every time `signum` is raised until it\n\n/// returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\npub fn unix_signal_add<F>(signum: i32, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n from_glib(ffi::g_unix_signal_add_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n signum,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n", "file_path": "glib/src/source.rs", "rank": 8, "score": 388198.8069245155 }, { "content": "/// Adds a closure to be called by the default main loop whenever a UNIX signal is raised.\n\n///\n\n/// `func` will be called repeatedly every time `signum` is raised until it\n\n/// returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `unix_signal_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\n///\n\n/// In comparison to `unix_signal_add_local()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn unix_signal_add_local_once<F>(signum: i32, func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + 'static,\n\n{\n\n unix_signal_add_local(signum, fnmut_callback_wrapper_local(func))\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n", "file_path": "glib/src/source.rs", "rank": 9, "score": 382811.73654869944 }, { "content": "/// Adds a closure to be called by the default main loop whenever a UNIX signal is raised.\n\n///\n\n/// `func` will be called repeatedly every time `signum` is raised until it\n\n/// returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `unix_signal_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\npub fn unix_signal_add_local<F>(signum: i32, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + 'static,\n\n{\n\n unsafe {\n\n assert!(MainContext::default().is_owner());\n\n from_glib(ffi::g_unix_signal_add_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n signum,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n", "file_path": "glib/src/source.rs", "rank": 10, "score": 382807.02683624777 }, { "content": "pub fn dbus_is_name(string: &str) -> bool {\n\n unsafe { from_glib(ffi::g_dbus_is_name(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 11, "score": 381805.65369735914 }, { "content": "#[cfg(any(feature = \"v2_46\", feature = \"dox\"))]\n\npub fn log_set_handler<P: Fn(Option<&str>, LogLevel, &str) + Send + Sync + 'static>(\n\n log_domain: Option<&str>,\n\n log_levels: LogLevels,\n\n fatal: bool,\n\n recursion: bool,\n\n log_func: P,\n\n) -> LogHandlerId {\n\n let log_func_data: Box_<P> = Box_::new(log_func);\n\n unsafe extern \"C\" fn log_func_func<\n\n P: Fn(Option<&str>, LogLevel, &str) + Send + Sync + 'static,\n\n >(\n\n log_domain: *const libc::c_char,\n\n log_level: ffi::GLogLevelFlags,\n\n message: *const libc::c_char,\n\n user_data: ffi::gpointer,\n\n ) {\n\n let log_domain: Borrowed<Option<GString>> = from_glib_borrow(log_domain);\n\n let message: Borrowed<GString> = from_glib_borrow(message);\n\n let callback: &P = &*(user_data as *mut _);\n\n (*callback)(\n", "file_path": "glib/src/log.rs", "rank": 12, "score": 379568.72017880017 }, { "content": "pub fn pattern_match_simple(pattern: &str, string: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::g_pattern_match_simple(\n\n pattern.to_glib_none().0,\n\n string.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n\n//pub fn pattern_match_string(pspec: /*Ignored*/&mut PatternSpec, string: &str) -> bool {\n\n// unsafe { TODO: call ffi:g_pattern_match_string() }\n\n//}\n\n\n\n//pub fn pointer_bit_lock(address: /*Unimplemented*/Fundamental: Pointer, lock_bit: i32) {\n\n// unsafe { TODO: call ffi:g_pointer_bit_lock() }\n\n//}\n\n\n\n//pub fn pointer_bit_trylock(address: /*Unimplemented*/Fundamental: Pointer, lock_bit: i32) -> bool {\n\n// unsafe { TODO: call ffi:g_pointer_bit_trylock() }\n\n//}\n", "file_path": "glib/src/auto/functions.rs", "rank": 13, "score": 375560.47326837864 }, { "content": "pub fn dbus_is_unique_name(string: &str) -> bool {\n\n unsafe { from_glib(ffi::g_dbus_is_unique_name(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 14, "score": 375134.01131869236 }, { "content": "pub fn dbus_is_member_name(string: &str) -> bool {\n\n unsafe { from_glib(ffi::g_dbus_is_member_name(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 15, "score": 375134.0113186924 }, { "content": "pub fn dbus_is_interface_name(string: &str) -> bool {\n\n unsafe { from_glib(ffi::g_dbus_is_interface_name(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 16, "score": 375134.0113186924 }, { "content": "pub fn assertion_message(domain: &str, file: &str, line: i32, func: &str, message: &str) {\n\n unsafe {\n\n ffi::g_assertion_message(\n\n domain.to_glib_none().0,\n\n file.to_glib_none().0,\n\n line,\n\n func.to_glib_none().0,\n\n message.to_glib_none().0,\n\n );\n\n }\n\n}\n\n\n\n//pub fn assertion_message_cmpnum(domain: &str, file: &str, line: i32, func: &str, expr: &str, arg1: /*Unknown conversion*//*Unimplemented*/Unsupported, cmp: &str, arg2: /*Unknown conversion*//*Unimplemented*/Unsupported, numtype: crate::Char) {\n\n// unsafe { TODO: call ffi:g_assertion_message_cmpnum() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 17, "score": 373897.040744136 }, { "content": "#[cfg(any(feature = \"v2_52\", feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v2_52\")))]\n\npub fn uuid_string_is_valid(str: &str) -> bool {\n\n unsafe { from_glib(ffi::g_uuid_string_is_valid(str.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 19, "score": 354305.4319138087 }, { "content": "pub fn version() -> &'static str {\n\nif cfg!(feature = \"v3_24\") {\n\n \"3.24\"\n\n } else if cfg!(feature = \"v3_22\") {\n\n \"3.22\"\n\n } else if cfg!(feature = \"v3_20\") {\n\n \"3.20\"\n\n } else if cfg!(feature = \"v3_18\") {\n\n \"3.18\"\n\n } else if cfg!(feature = \"v3_16\") {\n\n \"3.16\"\n\n } else {\n\n \"3.14\"\n\n }\n\n}\n", "file_path": "gdk/sys/build_version.rs", "rank": 20, "score": 351571.80204725463 }, { "content": "#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n\nfn into_raw_unix_fd<F: FnMut(RawFd, IOCondition) -> Continue + 'static>(func: F) -> gpointer {\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\n/// Transform a generic FnOnce into a closure that can be used as callback in various glib methods\n\n///\n\n/// The resulting function can only be called once and will panic otherwise. It will return `Continue(false)`\n\n/// in order to prevent being called twice.\n", "file_path": "glib/src/source.rs", "rank": 21, "score": 348722.8504318694 }, { "content": "/// Same as [`set_prgname()`].\n\n///\n\n/// [`set_prgname()`]: fn.set_prgname.html\n\npub fn set_program_name(name: Option<&str>) {\n\n set_prgname(name)\n\n}\n\n\n", "file_path": "glib/src/utils.rs", "rank": 22, "score": 345986.785777921 }, { "content": "pub fn quantize_line_geometry(thickness: &mut i32, position: &mut i32) {\n\n unsafe {\n\n ffi::pango_quantize_line_geometry(thickness, position);\n\n }\n\n}\n\n\n\n//#[cfg_attr(feature = \"v1_38\", deprecated)]\n\n//pub fn read_line(stream: /*Unimplemented*/Option<Fundamental: Pointer>, str: /*Ignored*/&mut glib::String) -> i32 {\n\n// unsafe { TODO: call ffi:pango_read_line() }\n\n//}\n\n\n\n//#[cfg_attr(feature = \"v1_38\", deprecated)]\n\n//pub fn scan_int(pos: /*Unimplemented*/glib::GString) -> Option<i32> {\n\n// unsafe { TODO: call ffi:pango_scan_int() }\n\n//}\n\n\n\n//#[cfg_attr(feature = \"v1_38\", deprecated)]\n\n//pub fn scan_string(pos: /*Unimplemented*/glib::GString, out: /*Ignored*/&mut glib::String) -> bool {\n\n// unsafe { TODO: call ffi:pango_scan_string() }\n\n//}\n\n\n\n//#[cfg_attr(feature = \"v1_38\", deprecated)]\n\n//pub fn scan_word(pos: /*Unimplemented*/glib::GString, out: /*Ignored*/&mut glib::String) -> bool {\n\n// unsafe { TODO: call ffi:pango_scan_word() }\n\n//}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 23, "score": 345741.81377748196 }, { "content": "pub fn get_version_string() -> &'static str {\n\n unsafe {\n\n let ptr = ffi::cairo_version_string();\n\n CStr::from_ptr(ptr)\n\n .to_str()\n\n .expect(\"invalid version string\")\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy)]\n\npub struct Version {\n\n pub major: u8,\n\n pub minor: u8,\n\n pub micro: u8,\n\n}\n\n\n\nimpl Version {\n\n pub fn get_version() -> Version {\n\n let version = unsafe { ffi::cairo_version() };\n\n Version {\n", "file_path": "cairo/src/utils.rs", "rank": 24, "score": 345637.1078253882 }, { "content": "/// Adds a closure to be called by the default main loop when it's idle.\n\n///\n\n/// `func` will be called repeatedly until it returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// In comparison to `idle_add()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn idle_add_once<F>(func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + Send + 'static,\n\n{\n\n idle_add(fnmut_callback_wrapper(func))\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 25, "score": 345288.44971861003 }, { "content": "/// Adds a closure to be called by the default main loop when it's idle.\n\n///\n\n/// `func` will be called repeatedly until it returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\npub fn idle_add<F>(func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n from_glib(ffi::g_idle_add_full(\n\n ffi::G_PRIORITY_DEFAULT_IDLE,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 26, "score": 345282.252964029 }, { "content": "pub trait BuildableExt: 'static {\n\n fn add_child<P: IsA<Builder>, Q: IsA<glib::Object>>(\n\n &self,\n\n builder: &P,\n\n child: &Q,\n\n type_: Option<&str>,\n\n );\n\n\n\n fn construct_child<P: IsA<Builder>>(&self, builder: &P, name: &str) -> Option<glib::Object>;\n\n\n\n //fn custom_finished<P: IsA<Builder>, Q: IsA<glib::Object>>(&self, builder: &P, child: Option<&Q>, tagname: &str, data: /*Unimplemented*/Option<Fundamental: Pointer>);\n\n\n\n //fn custom_tag_end<P: IsA<Builder>, Q: IsA<glib::Object>>(&self, builder: &P, child: Option<&Q>, tagname: &str, data: /*Unimplemented*/Option<Fundamental: Pointer>);\n\n\n\n //fn custom_tag_start<P: IsA<Builder>, Q: IsA<glib::Object>>(&self, builder: &P, child: Option<&Q>, tagname: &str, parser: /*Ignored*/glib::MarkupParser, data: /*Unimplemented*/&mut Option<Fundamental: Pointer>) -> bool;\n\n\n\n fn get_internal_child<P: IsA<Builder>>(\n\n &self,\n\n builder: &P,\n\n childname: &str,\n", "file_path": "gtk/src/auto/buildable.rs", "rank": 27, "score": 343822.3408189542 }, { "content": "pub trait BuildableExtManual: 'static {\n\n fn get_buildable_name(&self) -> Option<String>;\n\n\n\n fn set_buildable_name(&self, name: &str);\n\n}\n\n\n\nimpl<O: IsA<Buildable>> BuildableExtManual for O {\n\n fn get_buildable_name(&self) -> Option<String> {\n\n unsafe { from_glib_none(ffi::gtk_buildable_get_name(self.as_ref().to_glib_none().0)) }\n\n }\n\n\n\n fn set_buildable_name(&self, name: &str) {\n\n unsafe {\n\n ffi::gtk_buildable_set_name(self.as_ref().to_glib_none().0, name.to_glib_none().0);\n\n }\n\n }\n\n}\n", "file_path": "gtk/src/buildable.rs", "rank": 28, "score": 343822.3408189543 }, { "content": "pub fn path_is_absolute<P: AsRef<std::path::Path>>(file_name: P) -> bool {\n\n unsafe { from_glib(ffi::g_path_is_absolute(file_name.as_ref().to_glib_none().0)) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 29, "score": 340420.5389563604 }, { "content": "/// Adds a closure to be called by the default main loop when it's idle.\n\n///\n\n/// `func` will be called repeatedly until it returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `idle_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\n///\n\n/// In comparison to `idle_add_local()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn idle_add_local_once<F>(func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + 'static,\n\n{\n\n idle_add_local(fnmut_callback_wrapper_local(func))\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 30, "score": 340368.66409136856 }, { "content": "/// Adds a closure to be called by the default main loop when it's idle.\n\n///\n\n/// `func` will be called repeatedly until it returns `Continue(false)`.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `idle_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\npub fn idle_add_local<F>(func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + 'static,\n\n{\n\n unsafe {\n\n assert!(MainContext::default().is_owner());\n\n from_glib(ffi::g_idle_add_full(\n\n ffi::G_PRIORITY_DEFAULT_IDLE,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 31, "score": 340363.73723249964 }, { "content": "pub fn find_paragraph_boundary(text: &str) -> (i32, i32) {\n\n let length = text.len() as i32;\n\n unsafe {\n\n let mut paragraph_delimiter_index = mem::MaybeUninit::uninit();\n\n let mut next_paragraph_start = mem::MaybeUninit::uninit();\n\n ffi::pango_find_paragraph_boundary(\n\n text.to_glib_none().0,\n\n length,\n\n paragraph_delimiter_index.as_mut_ptr(),\n\n next_paragraph_start.as_mut_ptr(),\n\n );\n\n let paragraph_delimiter_index = paragraph_delimiter_index.assume_init();\n\n let next_paragraph_start = next_paragraph_start.assume_init();\n\n (paragraph_delimiter_index, next_paragraph_start)\n\n }\n\n}\n\n\n\n//pub fn get_log_attrs(text: &str, level: i32, language: &mut Language, log_attrs: /*Ignored*/&[&LogAttr]) {\n\n// unsafe { TODO: call ffi:pango_get_log_attrs() }\n\n//}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 32, "score": 340194.2611830549 }, { "content": "pub fn set_prgname(name: Option<&str>) {\n\n unsafe { ffi::g_set_prgname(name.to_glib_none().0) }\n\n}\n\n\n", "file_path": "glib/src/utils.rs", "rank": 33, "score": 335434.1823915996 }, { "content": "pub trait HyperlinkImplExt: 'static {\n\n fn get_hyperlink(&self) -> Option<Hyperlink>;\n\n}\n\n\n\nimpl<O: IsA<HyperlinkImpl>> HyperlinkImplExt for O {\n\n fn get_hyperlink(&self) -> Option<Hyperlink> {\n\n unsafe {\n\n from_glib_full(ffi::atk_hyperlink_impl_get_hyperlink(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for HyperlinkImpl {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.write_str(\"HyperlinkImpl\")\n\n }\n\n}\n", "file_path": "atk/src/auto/hyperlink_impl.rs", "rank": 34, "score": 332690.6528661475 }, { "content": "pub trait TextTagExt: 'static {\n\n #[cfg(any(feature = \"v3_20\", feature = \"dox\"))]\n\n #[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v3_20\")))]\n\n fn changed(&self, size_changed: bool);\n\n\n\n fn event<P: IsA<glib::Object>>(\n\n &self,\n\n event_object: &P,\n\n event: &gdk::Event,\n\n iter: &TextIter,\n\n ) -> bool;\n\n\n\n fn get_priority(&self) -> i32;\n\n\n\n fn set_priority(&self, priority: i32);\n\n\n\n fn get_property_accumulative_margin(&self) -> bool;\n\n\n\n fn set_property_accumulative_margin(&self, accumulative_margin: bool);\n\n\n", "file_path": "gtk/src/auto/text_tag.rs", "rank": 35, "score": 332627.82926307706 }, { "content": "pub fn content_type_is_a(type_: &str, supertype: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::g_content_type_is_a(\n\n type_.to_glib_none().0,\n\n supertype.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 36, "score": 332011.60265022924 }, { "content": "pub fn dbus_is_address(string: &str) -> bool {\n\n unsafe { from_glib(ffi::g_dbus_is_address(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 37, "score": 329764.75824010366 }, { "content": "pub fn dbus_is_guid(string: &str) -> bool {\n\n unsafe { from_glib(ffi::g_dbus_is_guid(string.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 38, "score": 329764.75824010366 }, { "content": "pub fn content_type_guess(filename: Option<&str>, data: &[u8]) -> (glib::GString, bool) {\n\n let data_size = data.len() as usize;\n\n unsafe {\n\n let mut result_uncertain = mem::MaybeUninit::uninit();\n\n let ret = from_glib_full(ffi::g_content_type_guess(\n\n filename.to_glib_none().0,\n\n data.to_glib_none().0,\n\n data_size,\n\n result_uncertain.as_mut_ptr(),\n\n ));\n\n let result_uncertain = result_uncertain.assume_init();\n\n (ret, from_glib(result_uncertain))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 39, "score": 328328.6997512671 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to at regular\n\n/// intervals with millisecond granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` milliseconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events. Prefer `timeout_add_seconds` when millisecond\n\n/// precision is not necessary.\n\npub fn timeout_source_new<F>(\n\n interval: Duration,\n\n name: Option<&str>,\n\n priority: Priority,\n\n func: F,\n\n) -> Source\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n let source = ffi::g_timeout_source_new(interval.as_millis() as _);\n\n ffi::g_source_set_callback(\n\n source,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n );\n\n ffi::g_source_set_priority(source, priority.to_glib());\n\n\n\n if let Some(name) = name {\n\n ffi::g_source_set_name(source, name.to_glib_none().0);\n\n }\n\n\n\n from_glib_full(source)\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 40, "score": 327924.69767764397 }, { "content": "pub fn drawable<F>(application: &gtk::Application, width: i32, height: i32, draw_fn: F)\n\nwhere\n\n F: Fn(&DrawingArea, &Context) -> Inhibit + 'static,\n\n{\n\n let window = gtk::ApplicationWindow::new(application);\n\n let drawing_area = Box::new(DrawingArea::new)();\n\n\n\n drawing_area.connect_draw(draw_fn);\n\n\n\n window.set_default_size(width, height);\n\n\n\n window.add(&drawing_area);\n\n window.show_all();\n\n}\n", "file_path": "examples/cairo_test/main.rs", "rank": 41, "score": 327826.6940282848 }, { "content": "pub fn content_type_equals(type1: &str, type2: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::g_content_type_equals(\n\n type1.to_glib_none().0,\n\n type2.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 42, "score": 327316.0961552033 }, { "content": "pub fn content_type_is_unknown(type_: &str) -> bool {\n\n unsafe { from_glib(ffi::g_content_type_is_unknown(type_.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 43, "score": 324429.7572992379 }, { "content": "pub fn hostname_is_non_ascii(hostname: &str) -> bool {\n\n unsafe { from_glib(ffi::g_hostname_is_non_ascii(hostname.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 44, "score": 324429.7572992379 }, { "content": "pub fn hostname_is_ascii_encoded(hostname: &str) -> bool {\n\n unsafe { from_glib(ffi::g_hostname_is_ascii_encoded(hostname.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 45, "score": 324429.7572992379 }, { "content": "pub fn hostname_is_ip_address(hostname: &str) -> bool {\n\n unsafe { from_glib(ffi::g_hostname_is_ip_address(hostname.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 46, "score": 324429.7572992379 }, { "content": "pub trait DataOutputStreamExt: 'static {\n\n fn get_byte_order(&self) -> DataStreamByteOrder;\n\n\n\n fn put_byte<P: IsA<Cancellable>>(\n\n &self,\n\n data: u8,\n\n cancellable: Option<&P>,\n\n ) -> Result<(), glib::Error>;\n\n\n\n fn put_int16<P: IsA<Cancellable>>(\n\n &self,\n\n data: i16,\n\n cancellable: Option<&P>,\n\n ) -> Result<(), glib::Error>;\n\n\n\n fn put_int32<P: IsA<Cancellable>>(\n\n &self,\n\n data: i32,\n\n cancellable: Option<&P>,\n\n ) -> Result<(), glib::Error>;\n", "file_path": "gio/src/auto/data_output_stream.rs", "rank": 47, "score": 322607.45531662385 }, { "content": "pub trait DataInputStreamExtManual: 'static {\n\n fn read_line<P: IsA<Cancellable>>(\n\n &self,\n\n cancellable: Option<&P>,\n\n ) -> Result<Vec<u8>, glib::Error>;\n\n\n\n fn read_line_async<\n\n P: IsA<Cancellable>,\n\n Q: FnOnce(Result<Vec<u8>, glib::Error>) + Send + 'static,\n\n >(\n\n &self,\n\n io_priority: glib::Priority,\n\n cancellable: Option<&P>,\n\n callback: Q,\n\n );\n\n\n\n fn read_line_async_future(\n\n &self,\n\n io_priority: glib::Priority,\n\n ) -> Pin<Box_<dyn std::future::Future<Output = Result<Vec<u8>, glib::Error>> + 'static>>;\n", "file_path": "gio/src/data_input_stream.rs", "rank": 48, "score": 322607.45531662385 }, { "content": "pub trait DataInputStreamExt: 'static {\n\n fn get_byte_order(&self) -> DataStreamByteOrder;\n\n\n\n fn get_newline_type(&self) -> DataStreamNewlineType;\n\n\n\n fn read_byte<P: IsA<Cancellable>>(&self, cancellable: Option<&P>) -> Result<u8, glib::Error>;\n\n\n\n fn read_int16<P: IsA<Cancellable>>(&self, cancellable: Option<&P>) -> Result<i16, glib::Error>;\n\n\n\n fn read_int32<P: IsA<Cancellable>>(&self, cancellable: Option<&P>) -> Result<i32, glib::Error>;\n\n\n\n fn read_int64<P: IsA<Cancellable>>(&self, cancellable: Option<&P>) -> Result<i64, glib::Error>;\n\n\n\n fn read_uint16<P: IsA<Cancellable>>(&self, cancellable: Option<&P>)\n\n -> Result<u16, glib::Error>;\n\n\n\n fn read_uint32<P: IsA<Cancellable>>(&self, cancellable: Option<&P>)\n\n -> Result<u32, glib::Error>;\n\n\n\n fn read_uint64<P: IsA<Cancellable>>(&self, cancellable: Option<&P>)\n", "file_path": "gio/src/auto/data_input_stream.rs", "rank": 49, "score": 322607.45531662385 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to whenever a\n\n/// UNIX file descriptor reaches the given IO condition.\n\n///\n\n/// `func` will be called repeatedly while the file descriptor matches the given IO condition\n\n/// until it returns `Continue(false)`.\n\npub fn unix_fd_source_new<F>(\n\n fd: RawFd,\n\n condition: IOCondition,\n\n name: Option<&str>,\n\n priority: Priority,\n\n func: F,\n\n) -> Source\n\nwhere\n\n F: FnMut(RawFd, IOCondition) -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n let source = ffi::g_unix_fd_source_new(fd, condition.to_glib());\n\n ffi::g_source_set_callback(\n\n source,\n\n Some(transmute::<\n\n _,\n\n unsafe extern \"C\" fn(ffi::gpointer) -> ffi::gboolean,\n\n >(trampoline_unix_fd::<F> as *const ())),\n\n into_raw_unix_fd(func),\n\n Some(destroy_closure_unix_fd::<F>),\n", "file_path": "glib/src/source.rs", "rank": 50, "score": 321992.86421958974 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to when a child\n\n/// process exits.\n\n///\n\n/// `func` will be called when `pid` exits\n\npub fn child_watch_source_new<F>(\n\n pid: Pid,\n\n name: Option<&str>,\n\n priority: Priority,\n\n func: F,\n\n) -> Source\n\nwhere\n\n F: FnMut(Pid, i32) + Send + 'static,\n\n{\n\n unsafe {\n\n let source = ffi::g_child_watch_source_new(pid.0);\n\n ffi::g_source_set_callback(\n\n source,\n\n Some(transmute::<\n\n _,\n\n unsafe extern \"C\" fn(ffi::gpointer) -> ffi::gboolean,\n\n >(trampoline_child_watch::<F> as *const ())),\n\n into_raw_child_watch(func),\n\n Some(destroy_closure_child_watch::<F>),\n\n );\n", "file_path": "glib/src/source.rs", "rank": 51, "score": 321987.5841255577 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to whenever a\n\n/// UNIX signal is raised.\n\n///\n\n/// `func` will be called repeatedly every time `signum` is raised until it\n\n/// returns `Continue(false)`.\n\npub fn unix_signal_source_new<F>(\n\n signum: i32,\n\n name: Option<&str>,\n\n priority: Priority,\n\n func: F,\n\n) -> Source\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n let source = ffi::g_unix_signal_source_new(signum);\n\n ffi::g_source_set_callback(\n\n source,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n );\n\n ffi::g_source_set_priority(source, priority.to_glib());\n\n\n\n if let Some(name) = name {\n\n ffi::g_source_set_name(source, name.to_glib_none().0);\n\n }\n\n\n\n from_glib_full(source)\n\n }\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n", "file_path": "glib/src/source.rs", "rank": 52, "score": 321987.22943737527 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to at regular\n\n/// intervals with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\npub fn timeout_source_new_seconds<F>(\n\n interval: u32,\n\n name: Option<&str>,\n\n priority: Priority,\n\n func: F,\n\n) -> Source\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n let source = ffi::g_timeout_source_new_seconds(interval);\n\n ffi::g_source_set_callback(\n\n source,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n );\n\n ffi::g_source_set_priority(source, priority.to_glib());\n\n\n\n if let Some(name) = name {\n\n ffi::g_source_set_name(source, name.to_glib_none().0);\n\n }\n\n\n\n from_glib_full(source)\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 53, "score": 321986.9545554023 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with millisecond granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` milliseconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events. Prefer `timeout_add_seconds` when millisecond\n\n/// precision is not necessary.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// In comparison to `timeout_add()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn timeout_add_once<F>(interval: Duration, func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + Send + 'static,\n\n{\n\n timeout_add(interval, fnmut_callback_wrapper(func))\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 54, "score": 320472.3461313201 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with millisecond granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` milliseconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events. Prefer `timeout_add_seconds` when millisecond\n\n/// precision is not necessary.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\npub fn timeout_add<F>(interval: Duration, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n from_glib(ffi::g_timeout_add_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n interval.as_millis() as _,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 55, "score": 320466.5497094765 }, { "content": "pub fn parse_stretch(str: &str, warn: bool) -> Option<Stretch> {\n\n unsafe {\n\n let mut stretch = mem::MaybeUninit::uninit();\n\n let ret = from_glib(ffi::pango_parse_stretch(\n\n str.to_glib_none().0,\n\n stretch.as_mut_ptr(),\n\n warn.to_glib(),\n\n ));\n\n let stretch = stretch.assume_init();\n\n if ret {\n\n Some(from_glib(stretch))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 56, "score": 320221.1678215979 }, { "content": "pub fn parse_style(str: &str, warn: bool) -> Option<Style> {\n\n unsafe {\n\n let mut style = mem::MaybeUninit::uninit();\n\n let ret = from_glib(ffi::pango_parse_style(\n\n str.to_glib_none().0,\n\n style.as_mut_ptr(),\n\n warn.to_glib(),\n\n ));\n\n let style = style.assume_init();\n\n if ret {\n\n Some(from_glib(style))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 57, "score": 320221.1678215979 }, { "content": "pub fn parse_weight(str: &str, warn: bool) -> Option<Weight> {\n\n unsafe {\n\n let mut weight = mem::MaybeUninit::uninit();\n\n let ret = from_glib(ffi::pango_parse_weight(\n\n str.to_glib_none().0,\n\n weight.as_mut_ptr(),\n\n warn.to_glib(),\n\n ));\n\n let weight = weight.assume_init();\n\n if ret {\n\n Some(from_glib(weight))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 58, "score": 320221.1678215979 }, { "content": "pub fn parse_variant(str: &str, warn: bool) -> Option<Variant> {\n\n unsafe {\n\n let mut variant = mem::MaybeUninit::uninit();\n\n let ret = from_glib(ffi::pango_parse_variant(\n\n str.to_glib_none().0,\n\n variant.as_mut_ptr(),\n\n warn.to_glib(),\n\n ));\n\n let variant = variant.assume_init();\n\n if ret {\n\n Some(from_glib(variant))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "pango/src/auto/functions.rs", "rank": 59, "score": 320221.1678215979 }, { "content": "// rustdoc-stripper-ignore-next\n\n/// Call from build script to run `glib-compile-resources` to generate compiled gresources to embed\n\n/// in binary with [resources_register_include]. `target` is relative to `OUT_DIR`.\n\n///\n\n/// ```no_run\n\n/// gio::compile_resources(\n\n/// \"resources\",\n\n/// \"resources/resources.gresource.xml\",\n\n/// \"compiled.gresource\",\n\n/// );\n\n/// ```\n\npub fn compile_resources<P: AsRef<Path>>(source_dir: P, gresource: &str, target: &str) {\n\n let out_dir = env::var(\"OUT_DIR\").unwrap();\n\n\n\n let status = Command::new(\"glib-compile-resources\")\n\n .arg(\"--sourcedir\")\n\n .arg(source_dir.as_ref())\n\n .arg(\"--target\")\n\n .arg(&format!(\"{}/{}\", out_dir, target))\n\n .arg(gresource)\n\n .status()\n\n .unwrap();\n\n\n\n if !status.success() {\n\n panic!(\"glib-compile-resources failed with exit status {}\", status);\n\n }\n\n\n\n println!(\"cargo:rerun-if-changed={}\", gresource);\n\n let output = Command::new(\"glib-compile-resources\")\n\n .arg(\"--sourcedir\")\n\n .arg(source_dir.as_ref())\n", "file_path": "gio/src/resource.rs", "rank": 60, "score": 319413.0404860616 }, { "content": "pub fn content_type_can_be_executable(type_: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::g_content_type_can_be_executable(\n\n type_.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 61, "score": 319361.94678127556 }, { "content": "#[cfg(any(feature = \"v2_52\", feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v2_52\")))]\n\npub fn content_type_is_mime_type(type_: &str, mime_type: &str) -> bool {\n\n unsafe {\n\n from_glib(ffi::g_content_type_is_mime_type(\n\n type_.to_glib_none().0,\n\n mime_type.to_glib_none().0,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gio/src/auto/functions.rs", "rank": 62, "score": 318533.28041921224 }, { "content": "pub fn set_application_name(application_name: &str) {\n\n unsafe {\n\n ffi::g_set_application_name(application_name.to_glib_none().0);\n\n }\n\n}\n\n\n\n//pub fn set_error(domain: Quark, code: i32, format: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Error {\n\n// unsafe { TODO: call ffi:g_set_error() }\n\n//}\n\n\n\n//pub fn set_print_handler<P: Fn(&str) + Send + Sync + 'static>(func: P) -> Fn(&str) + 'static {\n\n// unsafe { TODO: call ffi:g_set_print_handler() }\n\n//}\n\n\n\n//pub fn set_printerr_handler<P: Fn(&str) + Send + Sync + 'static>(func: P) -> Fn(&str) + 'static {\n\n// unsafe { TODO: call ffi:g_set_printerr_handler() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 63, "score": 317581.7280352667 }, { "content": "pub fn x11_get_default_screen() -> i32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gdk_x11_get_default_screen() }\n\n}\n\n\n\n//#[cfg_attr(feature = \"v3_24\", deprecated)]\n\n//#[cfg(any(feature = \"v3_24_2\", feature = \"dox\"))]\n\n//#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v3_24_2\")))]\n\n//pub fn x11_get_parent_relative_pattern() -> /*Ignored*/Option<cairo::Pattern> {\n\n// unsafe { TODO: call ffi:gdk_x11_get_parent_relative_pattern() }\n\n//}\n\n\n", "file_path": "gdkx11/src/auto/functions.rs", "rank": 64, "score": 316350.0261160994 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// In comparison to `timeout_add_seconds()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn timeout_add_seconds_once<F>(interval: u32, func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + Send + 'static,\n\n{\n\n timeout_add_seconds(interval, fnmut_callback_wrapper(func))\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 65, "score": 315983.9113775549 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with millisecond granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` milliseconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events. Prefer `timeout_add_seconds` when millisecond\n\n/// precision is not necessary.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `timeout_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\n///\n\n/// In comparison to `timeout_add_local()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn timeout_add_local_once<F>(interval: Duration, func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + 'static,\n\n{\n\n timeout_add_local(interval, fnmut_callback_wrapper_local(func))\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 66, "score": 315982.68533643504 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with millisecond granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` milliseconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events. Prefer `timeout_add_seconds` when millisecond\n\n/// precision is not necessary.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `timeout_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\npub fn timeout_add_local<F>(interval: Duration, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + 'static,\n\n{\n\n unsafe {\n\n assert!(MainContext::default().is_owner());\n\n from_glib(ffi::g_timeout_add_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n interval.as_millis() as _,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 67, "score": 315978.1049487622 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\npub fn timeout_add_seconds<F>(interval: u32, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + Send + 'static,\n\n{\n\n unsafe {\n\n from_glib(ffi::g_timeout_add_seconds_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n interval,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 68, "score": 315978.0656839069 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to when a child\n\n/// process exits.\n\n///\n\n/// `func` will be called when `pid` exits\n\npub fn child_watch_add<F>(pid: Pid, func: F) -> SourceId\n\nwhere\n\n F: FnMut(Pid, i32) + Send + 'static,\n\n{\n\n unsafe {\n\n from_glib(ffi::g_child_watch_add_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n pid.0,\n\n Some(trampoline_child_watch::<F>),\n\n into_raw_child_watch(func),\n\n Some(destroy_closure_child_watch::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 69, "score": 315972.209570295 }, { "content": "#[repr(C)]\n\nstruct ChannelSource<T, F: FnMut(T) -> Continue + 'static> {\n\n source: ffi::GSource,\n\n source_funcs: Option<Box<ffi::GSourceFuncs>>,\n\n channel: Option<Channel<T>>,\n\n callback: Option<ThreadGuard<F>>,\n\n}\n\n\n\nunsafe extern \"C\" fn dispatch<T, F: FnMut(T) -> Continue + 'static>(\n\n source: *mut ffi::GSource,\n\n callback: ffi::GSourceFunc,\n\n _user_data: ffi::gpointer,\n\n) -> ffi::gboolean {\n\n let source = &mut *(source as *mut ChannelSource<T, F>);\n\n assert!(callback.is_none());\n\n\n\n // Set ready-time to -1 so that we won't get called again before a new item is added\n\n // to the channel queue.\n\n ffi::g_source_set_ready_time(&mut source.source, -1);\n\n\n\n // Get a reference to the callback. This will panic if we're called from a different\n", "file_path": "glib/src/main_context_channel.rs", "rank": 70, "score": 315423.7760669853 }, { "content": "pub fn access<P: AsRef<std::path::Path>>(filename: P, mode: i32) -> i32 {\n\n unsafe { ffi::g_access(filename.as_ref().to_glib_none().0, mode) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 71, "score": 314749.74195529125 }, { "content": "#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n\n#[cfg(any(feature = \"v2_56\", feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(feature = \"v2_56\")))]\n\npub fn unix_is_system_fs_type(fs_type: &str) -> bool {\n\n unsafe { from_glib(ffi::g_unix_is_system_fs_type(fs_type.to_glib_none().0)) }\n\n}\n", "file_path": "gio/src/auto/functions.rs", "rank": 72, "score": 314541.7446801306 }, { "content": "/// Overrides pointer constness.\n\n///\n\n/// Use when the C API need const pointer, but function with `IsA<T>` constraint,\n\n/// that usaly don't have const pointer conversion.\n\npub fn const_override<T>(ptr: *mut T) -> *const T {\n\n ptr as *const T\n\n}\n\n\n", "file_path": "glib/src/translate.rs", "rank": 73, "score": 312184.53459002904 }, { "content": "/// Overrides pointer mutability.\n\n///\n\n/// Use when the C API should be specifying a const pointer but doesn't.\n\npub fn mut_override<T>(ptr: *const T) -> *mut T {\n\n ptr as *mut T\n\n}\n\n\n", "file_path": "glib/src/translate.rs", "rank": 74, "score": 312181.5293943038 }, { "content": "pub fn mkstemp_full<P: AsRef<std::path::Path>>(tmpl: P, flags: i32, mode: i32) -> i32 {\n\n unsafe { ffi::g_mkstemp_full(tmpl.as_ref().to_glib_none().0, flags, mode) }\n\n}\n\n\n\n//pub fn nullify_pointer(nullify_location: /*Unimplemented*/Fundamental: Pointer) {\n\n// unsafe { TODO: call ffi:g_nullify_pointer() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 75, "score": 312030.2730618368 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `timeout_add_seconds()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\n///\n\n/// In comparison to `timeout_add_seconds_local()`, this only requires `func` to be\n\n/// `FnOnce`, and will automatically return `Continue(false)`.\n\npub fn timeout_add_seconds_local_once<F>(interval: u32, func: F) -> SourceId\n\nwhere\n\n F: FnOnce() + 'static,\n\n{\n\n timeout_add_seconds_local(interval, fnmut_callback_wrapper_local(func))\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 76, "score": 311687.69439694774 }, { "content": "/// Adds a closure to be called by the default main loop at regular intervals\n\n/// with second granularity.\n\n///\n\n/// `func` will be called repeatedly every `interval` seconds until it\n\n/// returns `Continue(false)`. Precise timing is not guaranteed, the timeout may\n\n/// be delayed by other events.\n\n///\n\n/// The default main loop almost always is the main loop of the main thread.\n\n/// Thus the closure is called on the main thread.\n\n///\n\n/// Different to `timeout_add_seconds()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\npub fn timeout_add_seconds_local<F>(interval: u32, func: F) -> SourceId\n\nwhere\n\n F: FnMut() -> Continue + 'static,\n\n{\n\n unsafe {\n\n assert!(MainContext::default().is_owner());\n\n from_glib(ffi::g_timeout_add_seconds_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n interval,\n\n Some(trampoline::<F>),\n\n into_raw(func),\n\n Some(destroy_closure::<F>),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "glib/src/source.rs", "rank": 77, "score": 311683.0839278938 }, { "content": "/// Adds a closure to be called by the main loop the returned `Source` is attached to when a child\n\n/// process exits.\n\n///\n\n/// `func` will be called when `pid` exits\n\n///\n\n/// Different to `child_watch_add()`, this does not require `func` to be\n\n/// `Send` but can only be called from the thread that owns the main context.\n\n///\n\n/// This function panics if called from a different thread than the one that\n\n/// owns the main context.\n\npub fn child_watch_add_local<F>(pid: Pid, func: F) -> SourceId\n\nwhere\n\n F: FnMut(Pid, i32) + 'static,\n\n{\n\n unsafe {\n\n assert!(MainContext::default().is_owner());\n\n from_glib(ffi::g_child_watch_add_full(\n\n ffi::G_PRIORITY_DEFAULT,\n\n pid.0,\n\n Some(trampoline_child_watch::<F>),\n\n into_raw_child_watch(func),\n\n Some(destroy_closure_child_watch::<F>),\n\n ))\n\n }\n\n}\n\n\n\n#[cfg(any(unix, feature = \"dox\"))]\n\n#[cfg_attr(feature = \"dox\", doc(cfg(unix)))]\n", "file_path": "glib/src/source.rs", "rank": 78, "score": 311677.4307258952 }, { "content": "pub fn mkdir_with_parents<P: AsRef<std::path::Path>>(pathname: P, mode: i32) -> i32 {\n\n unsafe { ffi::g_mkdir_with_parents(pathname.as_ref().to_glib_none().0, mode) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 79, "score": 310853.0340873386 }, { "content": "pub fn keyval_from_name(keyval_name: &str) -> u32 {\n\n assert_initialized_main_thread!();\n\n unsafe { ffi::gdk_keyval_from_name(keyval_name.to_glib_none().0) }\n\n}\n\n\n", "file_path": "gdk/src/auto/functions.rs", "rank": 80, "score": 309547.1801952714 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_text_set<P: IsA<Widget>>(widget: &P, string: &str) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_test_text_set(widget.as_ref().to_glib_none().0, string.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 81, "score": 309413.0540219762 }, { "content": "pub fn mkstemp<P: AsRef<std::path::Path>>(tmpl: P) -> i32 {\n\n #[cfg(not(windows))]\n\n use ffi::g_mkstemp;\n\n #[cfg(windows)]\n\n use ffi::g_mkstemp_utf8 as g_mkstemp;\n\n\n\n unsafe { g_mkstemp(tmpl.as_ref().to_glib_none().0) }\n\n}\n\n\n", "file_path": "glib/src/utils.rs", "rank": 82, "score": 308415.2643473271 }, { "content": "pub fn setting_get(name: &str) -> Option<glib::Value> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut value = glib::Value::uninitialized();\n\n let done: bool = from_glib(ffi::gdk_setting_get(\n\n name.to_glib_none().0,\n\n value.to_glib_none_mut().0,\n\n ));\n\n if done {\n\n Some(value)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "gdk/src/functions.rs", "rank": 83, "score": 307403.49840002967 }, { "content": "pub fn on_error_query(prg_name: &str) {\n\n unsafe {\n\n ffi::g_on_error_query(prg_name.to_glib_none().0);\n\n }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 84, "score": 306568.92846972926 }, { "content": "///\n\n/// Translate to a pointer with a mutable borrow.\n\npub trait ToGlibPtrMut<'a, P: Copy> {\n\n type Storage;\n\n\n\n /// Transfer: none.\n\n ///\n\n /// The pointer in the `Stash` is only valid for the lifetime of the `Stash`.\n\n #[allow(clippy::wrong_self_convention)]\n\n fn to_glib_none_mut(&'a mut self) -> StashMut<P, Self>;\n\n}\n\n\n\nimpl<'a, P: Ptr, T: ToGlibPtr<'a, P>> ToGlibPtr<'a, P> for Option<T> {\n\n type Storage = Option<<T as ToGlibPtr<'a, P>>::Storage>;\n\n\n\n #[inline]\n\n fn to_glib_none(&'a self) -> Stash<'a, P, Option<T>> {\n\n self.as_ref()\n\n .map_or(Stash(Ptr::from::<()>(ptr::null_mut()), None), |s| {\n\n let s = s.to_glib_none();\n\n Stash(s.0, Some(s.1))\n\n })\n", "file_path": "glib/src/translate.rs", "rank": 85, "score": 304918.5748876495 }, { "content": "pub fn rmdir<P: AsRef<std::path::Path>>(filename: P) -> i32 {\n\n unsafe { ffi::g_rmdir(filename.as_ref().to_glib_none().0) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 86, "score": 303926.73684504314 }, { "content": "pub fn unlink<P: AsRef<std::path::Path>>(filename: P) -> i32 {\n\n unsafe { ffi::g_unlink(filename.as_ref().to_glib_none().0) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 87, "score": 303926.73684504314 }, { "content": "pub fn chdir<P: AsRef<std::path::Path>>(path: P) -> i32 {\n\n unsafe { ffi::g_chdir(path.as_ref().to_glib_none().0) }\n\n}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 88, "score": 303926.73684504314 }, { "content": "#[cfg_attr(feature = \"v3_20\", deprecated)]\n\npub fn test_spin_button_click<P: IsA<SpinButton>>(spinner: &P, button: u32, upwards: bool) -> bool {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib(ffi::gtk_test_spin_button_click(\n\n spinner.as_ref().to_glib_none().0,\n\n button,\n\n upwards.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 89, "score": 303441.11536195804 }, { "content": "fn delimiter_to_string(delimiter: Delimiter, open: bool) -> &'static str {\n\n match delimiter {\n\n Delimiter::Parenthesis => {\n\n if open {\n\n \"(\"\n\n } else {\n\n \")\"\n\n }\n\n }\n\n Delimiter::Brace => {\n\n if open {\n\n \"{\"\n\n } else {\n\n \"}\"\n\n }\n\n }\n\n Delimiter::Bracket => {\n\n if open {\n\n \"[\"\n\n } else {\n\n \"]\"\n\n }\n\n }\n\n Delimiter::None => \"\",\n\n }\n\n}\n\n\n", "file_path": "glib-macros/src/clone.rs", "rank": 90, "score": 303404.7099224103 }, { "content": "pub fn on_error_stack_trace(prg_name: &str) {\n\n unsafe {\n\n ffi::g_on_error_stack_trace(prg_name.to_glib_none().0);\n\n }\n\n}\n\n\n\n//pub fn parse_debug_string(string: Option<&str>, keys: /*Ignored*/&[&DebugKey]) -> u32 {\n\n// unsafe { TODO: call ffi:g_parse_debug_string() }\n\n//}\n\n\n", "file_path": "glib/src/auto/functions.rs", "rank": 91, "score": 301233.9955831235 }, { "content": "/// Translate from a pointer type which is annotated with `transfer none`.\n\n/// The resulting value is referenced at least once, by the bindings.\n\n///\n\n/// This is suitable for floating references, which become strong references.\n\n/// It is also suitable for acquiring non-gobject values, like `gchar*`.\n\n///\n\n/// <a name=\"safety_points\"></a>\n\n/// # Safety\n\n///\n\n/// The implementation of this trait should acquire a reference to the value\n\n/// in a way appropriate to the type,\n\n/// e.g. by increasing the reference count or copying.\n\n/// Values obtained using this trait must be properly released on `drop()`\n\n/// by the implementing type.\n\n///\n\n/// For more information, refer to module level documentation.\n\npub trait FromGlibPtrNone<P: Ptr>: Sized {\n\n /// # Safety\n\n ///\n\n /// See trait level [notes on safety](#safety_points)\n\n unsafe fn from_glib_none(ptr: P) -> Self;\n\n}\n\n\n", "file_path": "glib/src/translate.rs", "rank": 92, "score": 299326.9626917484 }, { "content": "pub fn propagate_event<P: IsA<Widget>>(widget: &P, event: &mut gdk::Event) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n ffi::gtk_propagate_event(widget.as_ref().to_glib_none().0, event.to_glib_none_mut().0);\n\n }\n\n}\n\n\n", "file_path": "gtk/src/auto/functions.rs", "rank": 93, "score": 294617.45955583773 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "glib/sys/tests/abi.rs", "rank": 94, "score": 291750.0316711452 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gtk/sys/tests/abi.rs", "rank": 95, "score": 291750.0316711452 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gio/sys/tests/abi.rs", "rank": 96, "score": 291750.0316711452 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "atk/sys/tests/abi.rs", "rank": 97, "score": 291750.0316711451 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "graphene/sys/tests/abi.rs", "rank": 98, "score": 291750.0316711451 }, { "content": "fn get_var(name: &str, default: &str) -> Result<Vec<String>, Box<dyn Error>> {\n\n match env::var(name) {\n\n Ok(value) => Ok(shell_words::split(&value)?),\n\n Err(env::VarError::NotPresent) => Ok(shell_words::split(default)?),\n\n Err(err) => Err(format!(\"{} {}\", name, err).into()),\n\n }\n\n}\n\n\n", "file_path": "gdk/sys/tests/abi.rs", "rank": 99, "score": 291750.0316711452 } ]
Rust
python/src/expression.rs
boaz-codota/ballista
75f5f79bdcf18ac897d9ab9e11035040d932fc5e
/* A great deal of this files source code was pulled more or less verbatim from https://github.com/jorgecarleitao/datafusion-python/commit/688f0d23504704cfc2be3fca33e2707e964ea5bc which is dual liscensed as MIT or Apache-2.0. */ /* MIT License Copyright (c) 2020 Jorge Leitao Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use pyo3::{ basic::CompareOp, exceptions::PyException, prelude::*, types::PyTuple, PyNumberProtocol, PyObjectProtocol, }; use crate::util; use datafusion::logical_plan; use datafusion::logical_plan::Expr; #[pyclass(name = "Expression", module = "ballista")] #[derive(Debug, Clone)] pub struct BPyExpr { pub(crate) expr: Expr, } pub fn from_tuple(value: &PyTuple) -> PyResult<Vec<BPyExpr>> { value .iter() .map(|e| e.extract::<BPyExpr>()) .collect::<PyResult<_>>() } pub fn any_to_expression(any: &PyAny) -> PyResult<Expr> { if let Ok(expr) = any.extract::<BPyExpr>() { Ok(expr.expr) } else if let Ok(scalar_value) = any.extract::<crate::scalar::Scalar>() { Ok(Expr::Literal(scalar_value.scalar)) } else { let type_name = util::object_class_name(any) .unwrap_or_else(|err| format!("<Could not get class name:{}>", err)); Err(PyException::new_err(format!( "The rhs type {} could not be converted to an expression.", &type_name ))) } } #[pyproto] impl PyNumberProtocol for BPyExpr { fn __add__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr + rhs_expr, }) } fn __sub__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr - rhs_expr, }) } fn __truediv__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr / rhs_expr, }) } fn __mul__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr * rhs_expr, }) } fn __and__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr.and(rhs_expr), }) } fn __or__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr.or(rhs_expr), }) } fn __invert__(&self) -> PyResult<BPyExpr> { Ok(BPyExpr { expr: self.expr.not(), }) } } #[pyproto] impl PyObjectProtocol for BPyExpr { fn __str__(&self) -> String { format!("{:?}", self.expr) } fn __richcmp__(&self, other: &PyAny, op: CompareOp) -> PyResult<BPyExpr> { let other_expr = any_to_expression(other)?; Ok(match op { CompareOp::Lt => BPyExpr { expr: self.expr.lt(other_expr), }, CompareOp::Le => BPyExpr { expr: self.expr.lt_eq(other_expr), }, CompareOp::Eq => BPyExpr { expr: self.expr.eq(other_expr), }, CompareOp::Ne => BPyExpr { expr: self.expr.not_eq(other_expr), }, CompareOp::Gt => BPyExpr { expr: self.expr.gt(other_expr), }, CompareOp::Ge => BPyExpr { expr: self.expr.gt_eq(other_expr), }, }) } } #[pymethods] impl BPyExpr { #[new] fn new(expr: &PyAny) -> PyResult<BPyExpr> { let converted_expr = any_to_expression(expr)?; Ok(BPyExpr { expr: converted_expr, }) } pub fn alias(&self, name: &str) -> PyResult<BPyExpr> { Ok(BPyExpr { expr: self.expr.alias(name), }) } #[args(negated = "false")] pub fn between(&self, low: &PyAny, high: &PyAny, negated: bool) -> PyResult<BPyExpr> { let low_expr = any_to_expression(low)?; let high_expr = any_to_expression(high)?; Ok(BPyExpr { expr: Expr::Between { expr: Box::new(self.expr.clone()), low: Box::new(low_expr), high: Box::new(high_expr), negated, }, }) } } use pyo3::PyCell; #[pyclass(module = "ballista", module = "ballista")] #[derive(Clone)] pub struct CaseBuilder { case_expr: Option<logical_plan::Expr>, when_expr: Vec<logical_plan::Expr>, then_expr: Vec<logical_plan::Expr>, else_expr: Option<logical_plan::Expr>, built_expr: Option<BPyExpr>, } #[pymethods] impl CaseBuilder { #[new] #[args(case_expr = "None")] pub fn new(case_expr: Option<BPyExpr>) -> Self { Self { case_expr: case_expr.map(|e| e.expr), when_expr: vec![], then_expr: vec![], else_expr: None, built_expr: None, } } pub fn when<'a>( slf: &'a PyCell<Self>, when: &PyAny, then: &PyAny, ) -> PyResult<&'a PyCell<Self>> { { let mut __self = slf.try_borrow_mut()?; let when_expr = any_to_expression(when)?; let then_expr = any_to_expression(then)?; __self.when_impl(when_expr, then_expr)?; } Ok(slf) } pub fn otherwise(&mut self, else_expr: &PyAny) -> PyResult<BPyExpr> { let other_size_expr = any_to_expression(else_expr)?; self.is_built_error()?; self.else_expr = Some(other_size_expr); self.private_build() } pub fn build(&mut self) -> PyResult<BPyExpr> { if self.is_built() { return Ok(self.built_expr.as_ref().unwrap().clone()); } self.private_build() } #[getter] pub fn get_expr(&self) -> Option<BPyExpr> { self.built_expr.clone() } } impl CaseBuilder { pub fn case(case_expr: Expr) -> Self { Self::new(Some(BPyExpr { expr: case_expr })) } pub fn when_impl(&mut self, when: Expr, then: Expr) -> PyResult<()> { self.is_built_error()?; self.when_expr.push(when); self.then_expr.push(then); Ok(()) } fn is_built(&self) -> bool { self.built_expr.is_some() } fn is_built_error(&self) -> PyResult<()> { if self.is_built() { return Err(PyException::new_err("This case builder has already been used, use 'expr' attribute to access expression or create a new builder using case function")); } Ok(()) } fn private_build(&mut self) -> PyResult<BPyExpr> { let mut temp_case = None; if self.when_expr.is_empty() { return Err(PyException::new_err( "The builder must have at least one when then clause added before building", )); } std::mem::swap(&mut temp_case, &mut self.case_expr); let mut builder = match temp_case { Some(expr) => { logical_plan::case(expr).when(self.when_expr.remove(0), self.then_expr.remove(0)) } None => logical_plan::when(self.when_expr.remove(0), self.then_expr.remove(0)), }; let mut temp_when = vec![]; let mut temp_then = vec![]; std::mem::swap(&mut temp_when, &mut self.when_expr); std::mem::swap(&mut temp_then, &mut self.then_expr); for (when, then) in temp_when.into_iter().zip(temp_then.into_iter()) { builder = builder.when(when, then); } let mut temp_else = None; std::mem::swap(&mut temp_else, &mut self.else_expr); let build_result = match temp_else { Some(else_expr) => builder.otherwise(else_expr), None => builder.end(), } .map_err(util::wrap_err)?; self.built_expr = Some(BPyExpr { expr: build_result }); return Ok(self.built_expr.as_ref().unwrap().clone()); } }
/* A great deal of this files source code was pulled more or less verbatim from https://github.com/jorgecarleitao/datafusion-python/commit/688f0d23504704cfc2be3fca33e2707e964ea5bc which is dual liscensed as MIT or Apache-2.0. */ /* MIT License Copyright (c) 2020 Jorge Leitao Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ use pyo3::{ basic::CompareOp, exceptions::PyException, prelude::*, types::PyTuple, PyNumberProtocol, PyObjectProtocol, }; use crate::util; use datafusion::logical_plan; use datafusion::logical_plan::Expr; #[pyclass(name = "Expression", module = "ballista")] #[derive(Debug, Clone)] pub struct BPyExpr { pub(crate) expr: Expr, } pub fn from_tuple(value: &PyTuple) -> PyResult<Vec<BPyExpr>> { value .iter() .map(|e| e.extract::<BPyExpr>()) .collect::<PyResult<_>>() } pub fn any_to_expression(any: &PyAny) -> PyResult<Expr> {
} #[pyproto] impl PyNumberProtocol for BPyExpr { fn __add__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr + rhs_expr, }) } fn __sub__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr - rhs_expr, }) } fn __truediv__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr / rhs_expr, }) } fn __mul__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr * rhs_expr, }) } fn __and__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr.and(rhs_expr), }) } fn __or__(lhs: BPyExpr, rhs: &PyAny) -> PyResult<BPyExpr> { let rhs_expr = any_to_expression(rhs)?; Ok(BPyExpr { expr: lhs.expr.or(rhs_expr), }) } fn __invert__(&self) -> PyResult<BPyExpr> { Ok(BPyExpr { expr: self.expr.not(), }) } } #[pyproto] impl PyObjectProtocol for BPyExpr { fn __str__(&self) -> String { format!("{:?}", self.expr) } fn __richcmp__(&self, other: &PyAny, op: CompareOp) -> PyResult<BPyExpr> { let other_expr = any_to_expression(other)?; Ok(match op { CompareOp::Lt => BPyExpr { expr: self.expr.lt(other_expr), }, CompareOp::Le => BPyExpr { expr: self.expr.lt_eq(other_expr), }, CompareOp::Eq => BPyExpr { expr: self.expr.eq(other_expr), }, CompareOp::Ne => BPyExpr { expr: self.expr.not_eq(other_expr), }, CompareOp::Gt => BPyExpr { expr: self.expr.gt(other_expr), }, CompareOp::Ge => BPyExpr { expr: self.expr.gt_eq(other_expr), }, }) } } #[pymethods] impl BPyExpr { #[new] fn new(expr: &PyAny) -> PyResult<BPyExpr> { let converted_expr = any_to_expression(expr)?; Ok(BPyExpr { expr: converted_expr, }) } pub fn alias(&self, name: &str) -> PyResult<BPyExpr> { Ok(BPyExpr { expr: self.expr.alias(name), }) } #[args(negated = "false")] pub fn between(&self, low: &PyAny, high: &PyAny, negated: bool) -> PyResult<BPyExpr> { let low_expr = any_to_expression(low)?; let high_expr = any_to_expression(high)?; Ok(BPyExpr { expr: Expr::Between { expr: Box::new(self.expr.clone()), low: Box::new(low_expr), high: Box::new(high_expr), negated, }, }) } } use pyo3::PyCell; #[pyclass(module = "ballista", module = "ballista")] #[derive(Clone)] pub struct CaseBuilder { case_expr: Option<logical_plan::Expr>, when_expr: Vec<logical_plan::Expr>, then_expr: Vec<logical_plan::Expr>, else_expr: Option<logical_plan::Expr>, built_expr: Option<BPyExpr>, } #[pymethods] impl CaseBuilder { #[new] #[args(case_expr = "None")] pub fn new(case_expr: Option<BPyExpr>) -> Self { Self { case_expr: case_expr.map(|e| e.expr), when_expr: vec![], then_expr: vec![], else_expr: None, built_expr: None, } } pub fn when<'a>( slf: &'a PyCell<Self>, when: &PyAny, then: &PyAny, ) -> PyResult<&'a PyCell<Self>> { { let mut __self = slf.try_borrow_mut()?; let when_expr = any_to_expression(when)?; let then_expr = any_to_expression(then)?; __self.when_impl(when_expr, then_expr)?; } Ok(slf) } pub fn otherwise(&mut self, else_expr: &PyAny) -> PyResult<BPyExpr> { let other_size_expr = any_to_expression(else_expr)?; self.is_built_error()?; self.else_expr = Some(other_size_expr); self.private_build() } pub fn build(&mut self) -> PyResult<BPyExpr> { if self.is_built() { return Ok(self.built_expr.as_ref().unwrap().clone()); } self.private_build() } #[getter] pub fn get_expr(&self) -> Option<BPyExpr> { self.built_expr.clone() } } impl CaseBuilder { pub fn case(case_expr: Expr) -> Self { Self::new(Some(BPyExpr { expr: case_expr })) } pub fn when_impl(&mut self, when: Expr, then: Expr) -> PyResult<()> { self.is_built_error()?; self.when_expr.push(when); self.then_expr.push(then); Ok(()) } fn is_built(&self) -> bool { self.built_expr.is_some() } fn is_built_error(&self) -> PyResult<()> { if self.is_built() { return Err(PyException::new_err("This case builder has already been used, use 'expr' attribute to access expression or create a new builder using case function")); } Ok(()) } fn private_build(&mut self) -> PyResult<BPyExpr> { let mut temp_case = None; if self.when_expr.is_empty() { return Err(PyException::new_err( "The builder must have at least one when then clause added before building", )); } std::mem::swap(&mut temp_case, &mut self.case_expr); let mut builder = match temp_case { Some(expr) => { logical_plan::case(expr).when(self.when_expr.remove(0), self.then_expr.remove(0)) } None => logical_plan::when(self.when_expr.remove(0), self.then_expr.remove(0)), }; let mut temp_when = vec![]; let mut temp_then = vec![]; std::mem::swap(&mut temp_when, &mut self.when_expr); std::mem::swap(&mut temp_then, &mut self.then_expr); for (when, then) in temp_when.into_iter().zip(temp_then.into_iter()) { builder = builder.when(when, then); } let mut temp_else = None; std::mem::swap(&mut temp_else, &mut self.else_expr); let build_result = match temp_else { Some(else_expr) => builder.otherwise(else_expr), None => builder.end(), } .map_err(util::wrap_err)?; self.built_expr = Some(BPyExpr { expr: build_result }); return Ok(self.built_expr.as_ref().unwrap().clone()); } }
if let Ok(expr) = any.extract::<BPyExpr>() { Ok(expr.expr) } else if let Ok(scalar_value) = any.extract::<crate::scalar::Scalar>() { Ok(Expr::Literal(scalar_value.scalar)) } else { let type_name = util::object_class_name(any) .unwrap_or_else(|err| format!("<Could not get class name:{}>", err)); Err(PyException::new_err(format!( "The rhs type {} could not be converted to an expression.", &type_name ))) }
if_condition
[ { "content": "#[pyfunction]\n\npub fn count(value: BPyExpr) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::count(value.expr),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 0, "score": 210292.79264307546 }, { "content": "#[pyfunction]\n\npub fn min(value: BPyExpr) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::min(value.expr),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 1, "score": 210292.7926430755 }, { "content": "#[pyfunction]\n\npub fn avg(value: BPyExpr) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::avg(value.expr),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 2, "score": 210292.79264307546 }, { "content": "#[pyfunction]\n\npub fn sum(value: BPyExpr) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::sum(value.expr),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 3, "score": 210292.7926430755 }, { "content": "#[pyfunction]\n\npub fn max(value: BPyExpr) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::max(value.expr),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 4, "score": 210292.7926430755 }, { "content": "#[pyfunction]\n\npub fn count_distinct(expr: BPyExpr) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::count_distinct(expr.expr),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 7, "score": 194675.7719643329 }, { "content": "#[pyfunction(expr = \"*\")]\n\npub fn concat(expr: &PyTuple) -> PyResult<BPyExpr> {\n\n let expressions: Vec<Expr> = util::transform_tuple_to_uniform_type(expr, |e: BPyExpr| e.expr)?;\n\n Ok(BPyExpr {\n\n expr: logical_plan::concat(expressions),\n\n })\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 8, "score": 172475.52111632918 }, { "content": "pub fn format_expr(expr: &dyn PhysicalExpr) -> String {\n\n if let Some(e) = expr.as_any().downcast_ref::<Column>() {\n\n e.name().to_string()\n\n } else if let Some(e) = expr.as_any().downcast_ref::<Literal>() {\n\n e.to_string()\n\n } else if let Some(e) = expr.as_any().downcast_ref::<BinaryExpr>() {\n\n format!(\"{} {} {}\", e.left(), e.op(), e.right())\n\n } else {\n\n format!(\"{}\", expr)\n\n }\n\n}\n\n\n", "file_path": "rust/lib/src/utils.rs", "rank": 9, "score": 166327.74964252257 }, { "content": "#[pyfunction]\n\npub fn col(name: &str) -> BPyExpr {\n\n BPyExpr {\n\n expr: logical_plan::col(name),\n\n }\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 10, "score": 159057.54567988866 }, { "content": "pub fn format_agg_expr(expr: &dyn AggregateExpr) -> Result<String> {\n\n Ok(format!(\n\n \"{} {:?}\",\n\n expr.field()?.name(),\n\n expr.expressions()\n\n .iter()\n\n .map(|e| format_expr(e.as_ref()))\n\n .collect::<Vec<String>>()\n\n ))\n\n}\n\n\n", "file_path": "rust/lib/src/utils.rs", "rank": 11, "score": 157714.52200473487 }, { "content": "pub fn init(module: &PyModule) -> PyResult<()> {\n\n module.add_function(wrap_pyfunction!(col, module)?)?;\n\n module.add_function(wrap_pyfunction!(lit, module)?)?;\n\n module.add_function(wrap_pyfunction!(sum, module)?)?;\n\n module.add_function(wrap_pyfunction!(avg, module)?)?;\n\n module.add_function(wrap_pyfunction!(min, module)?)?;\n\n module.add_function(wrap_pyfunction!(max, module)?)?;\n\n module.add_function(wrap_pyfunction!(count, module)?)?;\n\n module.add_function(wrap_pyfunction!(count_distinct, module)?)?;\n\n module.add_function(wrap_pyfunction!(concat, module)?)?;\n\n module.add_function(wrap_pyfunction!(when, module)?)?;\n\n module.add_function(wrap_pyfunction!(case, module)?)?;\n\n module.add_function(wrap_pyfunction!(array, module)?)?;\n\n\n\n //All macro generated scalar functions\n\n module.add_function(wrap_pyfunction!(sqrt, module)?)?;\n\n module.add_function(wrap_pyfunction!(sin, module)?)?;\n\n module.add_function(wrap_pyfunction!(cos, module)?)?;\n\n module.add_function(wrap_pyfunction!(tan, module)?)?;\n\n module.add_function(wrap_pyfunction!(asin, module)?)?;\n", "file_path": "python/src/functions.rs", "rank": 12, "score": 154525.74426696653 }, { "content": "#[pyfunction(module = \"ballista\")]\n\npub fn lit(literal: &PyAny) -> PyResult<BPyExpr> {\n\n let literal_value = literal.extract::<Scalar>()?;\n\n Ok(BPyExpr {\n\n expr: Expr::Literal(literal_value.scalar),\n\n })\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 13, "score": 148643.1686469689 }, { "content": "pub fn ballista_error(message: &str) -> BallistaError {\n\n BallistaError::General(message.to_owned())\n\n}\n\n\n\nimpl From<String> for BallistaError {\n\n fn from(e: String) -> Self {\n\n BallistaError::General(e)\n\n }\n\n}\n\n\n\nimpl From<ArrowError> for BallistaError {\n\n fn from(e: ArrowError) -> Self {\n\n BallistaError::ArrowError(e)\n\n }\n\n}\n\n\n\nimpl From<parser::ParserError> for BallistaError {\n\n fn from(e: parser::ParserError) -> Self {\n\n BallistaError::SqlError(e)\n\n }\n", "file_path": "rust/lib/src/error.rs", "rank": 14, "score": 147914.23310843625 }, { "content": "#[pyfunction(args = \"*\")]\n\npub fn array(args: &PyTuple) -> PyResult<BPyExpr> {\n\n let args: Vec<Expr> = util::transform_tuple_to_uniform_type(args, |e: BPyExpr| e.expr)?;\n\n Ok(BPyExpr {\n\n expr: logical_plan::array(args),\n\n })\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! unary_scalar_expr_pyfunction {\n\n ($FUNC:ident) => {\n\n #[pyfunction]\n\n pub fn $FUNC(e: BPyExpr) -> BPyExpr {\n\n use datafusion::logical_plan;\n\n BPyExpr {\n\n expr: logical_plan::$FUNC(e.expr),\n\n }\n\n }\n\n };\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 15, "score": 146055.55337642715 }, { "content": "pub fn init(m: &PyModule) -> PyResult<()> {\n\n m.add_function(pyo3::wrap_pyfunction!(Date, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Decimal, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Duration, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Dictionary, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(FixedSizeBinary, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(FixedSizeList, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Interval, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(LargeList, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(List, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Struct, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Time, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Timestamp, m)?)?;\n\n m.add_function(pyo3::wrap_pyfunction!(Union, m)?)?;\n\n\n\n m.add_class::<BPyDataType>()?;\n\n m.add_class::<BPyTimeUnit>()?;\n\n m.add_class::<BPyIntervalUnit>()?;\n\n\n\n m.add(\n", "file_path": "python/src/datatypes.rs", "rank": 16, "score": 145968.975908459 }, { "content": "pub fn wrap_df(df: ballista::context::BallistaDataFrame) -> BPyDataFrame {\n\n BPyDataFrame { df }\n\n}\n\n\n\n#[pymethods]\n\nimpl BPyBallistaContext {\n\n #[new]\n\n #[args(host = \"\\\"localhost\\\"\", port = \"50051\", kwds = \"**\")]\n\n pub fn new(host: &str, port: u16, kwds: Option<&pyo3::types::PyDict>) -> PyResult<Self> {\n\n let settings = match kwds {\n\n Some(kwargs) => kwargs\n\n .iter()\n\n .map(|(py_key_obj, py_val_obj)| {\n\n let py_key = py_key_obj.str().map_err(|err| {\n\n pyo3::exceptions::PyTypeError::new_err(format!(\n\n \"kwargs keys must be convertible to a string using __str__: {}\",\n\n err.to_string()\n\n ))\n\n })?;\n\n let py_val = py_val_obj.str().map_err(|err| {\n", "file_path": "python/src/context.rs", "rank": 17, "score": 143607.47644401365 }, { "content": "#[pymodule]\n\nfn ballista(_py: Python, m: &PyModule) -> PyResult<()> {\n\n m.add_class::<context::BPyBallistaContext>()?;\n\n m.add_class::<expression::BPyExpr>()?;\n\n m.add_class::<dataframe::BPyDataFrame>()?;\n\n m.add_class::<expression::CaseBuilder>()?;\n\n m.add_class::<schema::BPySchema>()?;\n\n m.add_class::<field::BPyField>()?;\n\n m.add_class::<partition::BPyPartitioning>()?;\n\n m.add_class::<dfschema::BPyDFSchema>()?;\n\n\n\n crate::functions::init(m)?;\n\n datatypes::init(m)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "python/src/lib.rs", "rank": 18, "score": 136989.47893222852 }, { "content": "#[pyfunction]\n\npub fn case(case: &PyAny) -> PyResult<expression::CaseBuilder> {\n\n let case_expr = expression::any_to_expression(case)?;\n\n Ok(expression::CaseBuilder::case(case_expr))\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 19, "score": 131478.42607338773 }, { "content": "#[pyfunction]\n\npub fn when(when: &PyAny, then: &PyAny) -> PyResult<expression::CaseBuilder> {\n\n let mut builder = expression::CaseBuilder::new(None);\n\n //when method only fails when CaseBuilder has already built the expression so this is safe to unwrap\n\n let when_expr = expression::any_to_expression(when)?;\n\n let then_expr = expression::any_to_expression(then)?;\n\n builder.when_impl(when_expr, then_expr).unwrap();\n\n Ok(builder)\n\n}\n\n\n", "file_path": "python/src/functions.rs", "rank": 20, "score": 130150.0796322027 }, { "content": "pub fn print_version() {\n\n println!(\"Ballista version: {}\", BALLISTA_VERSION)\n\n}\n\n\n\npub mod client;\n\npub mod columnar_batch;\n\npub mod context;\n\npub mod error;\n\npub mod executor;\n\npub mod memory_stream;\n\npub mod prelude;\n\npub mod scheduler;\n\npub mod utils;\n\n\n\n#[cfg(test)]\n\npub mod test_utils;\n\n\n\n#[macro_use]\n\n\n\npub mod serde;\n", "file_path": "rust/lib/src/lib.rs", "rank": 21, "score": 128350.17354828728 }, { "content": "fn parse_required_expr(p: &Option<Box<protobuf::LogicalExprNode>>) -> Result<Expr, BallistaError> {\n\n match p {\n\n Some(expr) => expr.as_ref().try_into(),\n\n None => Err(proto_error(\"Missing required expression\")),\n\n }\n\n}\n\n\n", "file_path": "rust/lib/src/serde/logical_plan/from_proto.rs", "rank": 22, "score": 126397.3651367623 }, { "content": "struct BallistaError(ballista::error::BallistaError);\n\n\n\nimpl From<ballista::error::BallistaError> for BallistaError {\n\n fn from(err: ballista::error::BallistaError) -> Self {\n\n BallistaError(err)\n\n }\n\n}\n\n\n\nimpl From<BallistaError> for PyErr {\n\n fn from(err: BallistaError) -> PyErr {\n\n exceptions::PyException::new_err(err.0.to_string())\n\n }\n\n}\n\n\n\nimpl std::error::Error for BallistaError {}\n\nimpl std::fmt::Display for BallistaError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n <ballista::error::BallistaError as std::fmt::Display>::fmt(&self.0, f)\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for BallistaError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n <ballista::error::BallistaError as std::fmt::Debug>::fmt(&self.0, f)\n\n }\n\n}\n\n\n", "file_path": "python/src/error.rs", "rank": 23, "score": 125298.94820484435 }, { "content": "pub fn remove_unresolved_shuffles(\n\n stage: &dyn ExecutionPlan,\n\n partition_locations: &HashMap<usize, Vec<PartitionLocation>>,\n\n) -> Result<Arc<dyn ExecutionPlan>> {\n\n let mut new_children: Vec<Arc<dyn ExecutionPlan>> = vec![];\n\n for child in stage.children() {\n\n if let Some(unresolved_shuffle) = child.as_any().downcast_ref::<UnresolvedShuffleExec>() {\n\n let mut relevant_locations = vec![];\n\n for id in &unresolved_shuffle.query_stage_ids {\n\n relevant_locations.append(\n\n &mut partition_locations\n\n .get(id)\n\n .ok_or_else(|| {\n\n BallistaError::General(\n\n \"Missing partition location. Could not remove unresolved shuffles\"\n\n .to_owned(),\n\n )\n\n })?\n\n .clone(),\n\n );\n", "file_path": "rust/lib/src/scheduler/planner.rs", "rank": 24, "score": 124001.97000289432 }, { "content": "struct MergedRecordBatchStream {\n\n schema: SchemaRef,\n\n select_all: Pin<Box<SelectAll<SendableRecordBatchStream>>>,\n\n}\n\n\n\nimpl Stream for MergedRecordBatchStream {\n\n type Item = ArrowResult<RecordBatch>;\n\n\n\n fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n self.select_all.as_mut().poll_next(cx)\n\n }\n\n}\n\n\n\nimpl RecordBatchStream for MergedRecordBatchStream {\n\n fn schema(&self) -> SchemaRef {\n\n self.schema.clone()\n\n }\n\n}\n", "file_path": "rust/lib/src/executor/collect.rs", "rank": 25, "score": 105842.38107999698 }, { "content": "/// Convert a single RecordBatch into an iterator of FlightData (containing\n\n/// dictionaries and batches)\n\nfn create_flight_iter(\n\n batch: &RecordBatch,\n\n options: &IpcWriteOptions,\n\n) -> Box<dyn Iterator<Item = Result<FlightData, Status>>> {\n\n let (flight_dictionaries, flight_batch) =\n\n arrow_flight::utils::flight_data_from_arrow_batch(batch, &options);\n\n Box::new(\n\n flight_dictionaries\n\n .into_iter()\n\n .chain(std::iter::once(flight_batch))\n\n .map(Ok),\n\n )\n\n}\n\n\n\nasync fn stream_flight_data<T>(reader: FileReader<T>, tx: FlightDataSender) -> Result<(), Status>\n\nwhere\n\n T: Read + Seek,\n\n{\n\n let options = arrow::ipc::writer::IpcWriteOptions::default();\n\n let schema_flight_data =\n", "file_path": "rust/lib/src/executor/flight_service.rs", "rank": 26, "score": 104858.68086057172 }, { "content": "fn compile_expr(\n\n expr: &protobuf::LogicalExprNode,\n\n schema: &Schema,\n\n) -> Result<Arc<dyn PhysicalExpr>, BallistaError> {\n\n let df_planner = DefaultPhysicalPlanner::default();\n\n let state = ExecutionContextState {\n\n datasources: HashMap::new(),\n\n scalar_functions: HashMap::new(),\n\n var_provider: HashMap::new(),\n\n aggregate_functions: HashMap::new(),\n\n config: ExecutionConfig::new(),\n\n };\n\n let expr: Expr = expr.try_into()?;\n\n df_planner\n\n .create_physical_expr(&expr, schema, &state)\n\n .map_err(|e| BallistaError::General(format!(\"{:?}\", e)))\n\n}\n", "file_path": "rust/lib/src/serde/physical_plan/from_proto.rs", "rank": 27, "score": 104770.62150018888 }, { "content": "fn parse_optional_expr(\n\n p: &Option<Box<protobuf::LogicalExprNode>>,\n\n) -> Result<Option<Expr>, BallistaError> {\n\n match p {\n\n Some(expr) => expr.as_ref().try_into().map(Some),\n\n None => Ok(None),\n\n }\n\n}\n", "file_path": "rust/lib/src/serde/logical_plan/from_proto.rs", "rank": 28, "score": 102766.047347216 }, { "content": "fn try_parse_when_then_expr(\n\n when_expr: &Arc<dyn PhysicalExpr>,\n\n then_expr: &Arc<dyn PhysicalExpr>,\n\n) -> Result<protobuf::WhenThen, BallistaError> {\n\n Ok(protobuf::WhenThen {\n\n when_expr: Some(when_expr.clone().try_into()?),\n\n then_expr: Some(then_expr.clone().try_into()?),\n\n })\n\n}\n", "file_path": "rust/lib/src/serde/physical_plan/to_proto.rs", "rank": 29, "score": 102766.047347216 }, { "content": "pub fn any_to_datatype(any: &PyAny) -> PyResult<DataType> {\n\n if let Ok(datatype) = any.extract::<BPyDataType>() {\n\n Ok(datatype.datatype)\n\n } else if let Ok(datatype_str) = any.extract::<&str>() {\n\n Ok(match datatype_str {\n\n \"int8\" | \"i8\" => DataType::Int8,\n\n \"int16\" | \"i16\" => DataType::Int16,\n\n \"int32\" | \"int\" | \"i32\" => DataType::Int32,\n\n \"int64\" | \"i64\" => DataType::Int64,\n\n \"uint8\" | \"u8\" => DataType::UInt8,\n\n \"uint16\" | \"u16\" => DataType::UInt16,\n\n \"uint32\" | \"uint\" | \"u32\" => DataType::UInt32,\n\n \"uint64\" | \"u64\" => DataType::UInt64,\n\n \"str\" | \"utf8\" => DataType::Utf8,\n\n \"float32\" | \"f32\" => DataType::Float32,\n\n \"float\" | \"f64\" => DataType::Float64,\n\n \"bool\" => DataType::Boolean,\n\n \"date32\" => DataType::Date32,\n\n \"date64\" => DataType::Date64,\n\n _ => {\n", "file_path": "python/src/datatypes.rs", "rank": 30, "score": 102530.484368543 }, { "content": "pub fn get_tpch_schema(table: &str) -> Schema {\n\n // note that the schema intentionally uses signed integers so that any generated Parquet\n\n // files can also be used to benchmark tools that only support signed integers, such as\n\n // Apache Spark\n\n\n\n match table {\n\n \"part\" => Schema::new(vec![\n\n Field::new(\"p_partkey\", DataType::Int32, false),\n\n Field::new(\"p_name\", DataType::Utf8, false),\n\n Field::new(\"p_mfgr\", DataType::Utf8, false),\n\n Field::new(\"p_brand\", DataType::Utf8, false),\n\n Field::new(\"p_type\", DataType::Utf8, false),\n\n Field::new(\"p_size\", DataType::Int32, false),\n\n Field::new(\"p_container\", DataType::Utf8, false),\n\n Field::new(\"p_retailprice\", DataType::Float64, false),\n\n Field::new(\"p_comment\", DataType::Utf8, false),\n\n ]),\n\n\n\n \"supplier\" => Schema::new(vec![\n\n Field::new(\"s_suppkey\", DataType::Int32, false),\n", "file_path": "rust/lib/src/test_utils.rs", "rank": 31, "score": 102052.20630442858 }, { "content": "pub fn transform_tuple_to_uniform_type<'a, T: FromPyObject<'a>, U, F: Fn(T) -> U>(\n\n tuple: &'a PyTuple,\n\n transform: F,\n\n) -> PyResult<Vec<U>> {\n\n tuple\n\n .iter()\n\n .map(|tuple_item| Ok(transform(tuple_item.extract::<T>()?)))\n\n .collect::<Result<Vec<U>, PyErr>>()\n\n}\n\n\n", "file_path": "python/src/util.rs", "rank": 32, "score": 101752.54218540441 }, { "content": " @Override\n\n public Struct createStruct(String s, Object[] objects) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 33, "score": 101482.06380207253 }, { "content": "//Does not typecheck lists\n\nfn typechecked_scalar_value_conversion(\n\n tested_type: &protobuf::scalar_value::Value,\n\n required_type: protobuf::PrimitiveScalarType,\n\n) -> Result<datafusion::scalar::ScalarValue, BallistaError> {\n\n use protobuf::scalar_value::Value;\n\n use protobuf::PrimitiveScalarType;\n\n Ok(match (tested_type, &required_type) {\n\n (Value::BoolValue(v), PrimitiveScalarType::Bool) => ScalarValue::Boolean(Some(*v)),\n\n (Value::Int8Value(v), PrimitiveScalarType::Int8) => ScalarValue::Int8(Some(*v as i8)),\n\n (Value::Int16Value(v), PrimitiveScalarType::Int16) => ScalarValue::Int16(Some(*v as i16)),\n\n (Value::Int32Value(v), PrimitiveScalarType::Int32) => ScalarValue::Int32(Some(*v)),\n\n (Value::Int64Value(v), PrimitiveScalarType::Int64) => ScalarValue::Int64(Some(*v)),\n\n (Value::Uint8Value(v), PrimitiveScalarType::Uint8) => ScalarValue::UInt8(Some(*v as u8)),\n\n (Value::Uint16Value(v), PrimitiveScalarType::Uint16) => {\n\n ScalarValue::UInt16(Some(*v as u16))\n\n }\n\n (Value::Uint32Value(v), PrimitiveScalarType::Uint32) => ScalarValue::UInt32(Some(*v)),\n\n (Value::Uint64Value(v), PrimitiveScalarType::Uint64) => ScalarValue::UInt64(Some(*v)),\n\n (Value::Float32Value(v), PrimitiveScalarType::Float32) => ScalarValue::Float32(Some(*v)),\n\n (Value::Float64Value(v), PrimitiveScalarType::Float64) => ScalarValue::Float64(Some(*v)),\n", "file_path": "rust/lib/src/serde/logical_plan/from_proto.rs", "rank": 34, "score": 100908.21043080458 }, { "content": "pub fn any_to_bpyfield(any: &PyAny) -> PyResult<BPyField> {\n\n let field: BPyField = if let Ok(native_field) = any.extract::<BPyField>() {\n\n native_field\n\n } else if let Ok(py_dict) = any.extract::<&PyDict>() {\n\n //Begin with extracting all required keys from dictionary\n\n let name = py_dict\n\n .get_item(\"name\")\n\n .ok_or_else(|| util::missing_key(\"name\", \"field dictionary\"))?;\n\n let datatype = py_dict\n\n .get_item(\"datatype\")\n\n .ok_or_else(|| util::missing_key(\"datatype\", \"field dictionary\"))?;\n\n //If nullable is not present then set nullable to false\n\n let nullable = match py_dict.get_item(\"nullable\") {\n\n Some(pyobj) => pyobj.extract::<bool>()?,\n\n None => false,\n\n };\n\n\n\n let dict_id = match py_dict.get_item(\"dict_id\") {\n\n Some(pyobj) => Some(pyobj.extract::<i64>()?),\n\n None => None,\n", "file_path": "python/src/field.rs", "rank": 35, "score": 100631.42883220284 }, { "content": "#[pyfunction(members = \"*\")]\n\nfn Struct(members: &PyTuple) -> PyResult<BPyDataType> {\n\n if members.is_empty() {\n\n return Err(PyException::new_err(\n\n \"To construct a Struct at least a single Field must be provided\",\n\n ));\n\n }\n\n let struct_members = members\n\n .iter()\n\n .map(|any| Ok(any.extract::<BPyField>()?.arrow_field))\n\n .collect::<PyResult<Vec<Field>>>()?;\n\n Ok(BPyDataType {\n\n datatype: DataType::Struct(struct_members),\n\n })\n\n}\n\n\n", "file_path": "python/src/datatypes.rs", "rank": 36, "score": 100601.73645680587 }, { "content": "pub fn wrap_kwarg_keytype_error(err: PyErr) -> PyErr {\n\n pyo3::exceptions::PyTypeError::new_err(format!(\n\n \"kwargs values must be convertible to a string using __str__: {}\",\n\n err.to_string()\n\n ))\n\n}\n", "file_path": "python/src/error.rs", "rank": 37, "score": 100336.56450382603 }, { "content": " @Override\n\n public Connection connect(String url, Properties properties) throws SQLException {\n\n logger.info(\"connect() url={}\", url);\n\n //TODO this needs much more work to parse full URLs but this is enough to get end to end tests running\n\n String c = url.substring(PREFIX.length());\n\n int i = c.indexOf(':');\n\n if (i == -1) {\n\n return new FlightConnection(c, 50051);\n\n } else {\n\n return new FlightConnection(c.substring(0,i), Integer.parseInt(c.substring(i + 1)));\n\n }\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/Driver.java", "rank": 38, "score": 99874.53156904224 }, { "content": "fn create_proto_scalar<I, T: FnOnce(&I) -> protobuf::scalar_value::Value>(\n\n v: &Option<I>,\n\n null_arrow_type: protobuf::PrimitiveScalarType,\n\n constructor: T,\n\n) -> protobuf::ScalarValue {\n\n protobuf::ScalarValue {\n\n value: Some(v.as_ref().map(constructor).unwrap_or(\n\n protobuf::scalar_value::Value::NullValue(null_arrow_type as i32),\n\n )),\n\n }\n\n}\n\n\n\nimpl TryInto<protobuf::LogicalExprNode> for &Expr {\n\n type Error = BallistaError;\n\n\n\n fn try_into(self) -> Result<protobuf::LogicalExprNode, Self::Error> {\n\n use datafusion::scalar::ScalarValue;\n\n use protobuf::scalar_value::Value;\n\n match self {\n\n Expr::Column(name) => {\n", "file_path": "rust/lib/src/serde/logical_plan/to_proto.rs", "rank": 39, "score": 99740.03424731476 }, { "content": "pub fn object_class_name(ob: &PyAny) -> PyResult<String> {\n\n let qual_name = ob\n\n .getattr(\"__class__\")?\n\n .getattr(\"__qualname__\")?\n\n .extract::<&str>()?;\n\n let module_name = ob\n\n .getattr(\"__class__\")?\n\n .getattr(\"__module__\")?\n\n .extract::<&str>()?;\n\n Ok(format!(\"{}.{}\", module_name, qual_name))\n\n}\n", "file_path": "python/src/util.rs", "rank": 40, "score": 98827.57183097636 }, { "content": "pub fn to_py(batches: &[RecordBatch]) -> PyResult<PyObject> {\n\n let gil = pyo3::Python::acquire_gil();\n\n let py = gil.python();\n\n let pyarrow = PyModule::import(py, \"pyarrow\")?;\n\n let builtins = PyModule::import(py, \"builtins\")?;\n\n\n\n let mut py_batches = vec![];\n\n for batch in batches {\n\n py_batches.push(to_py_batch(batch, py, pyarrow)?);\n\n }\n\n let result = builtins.call1(\"list\", (py_batches,))?;\n\n Ok(PyObject::from(result))\n\n}\n\n\n", "file_path": "python/src/dataframe.rs", "rank": 41, "score": 98827.57183097636 }, { "content": "pub fn datafusion_test_context(path: &str) -> Result<ExecutionContext> {\n\n let mut ctx = ExecutionContext::new();\n\n for table in TPCH_TABLES {\n\n let schema = get_tpch_schema(table);\n\n let options = CsvReadOptions::new()\n\n .schema(&schema)\n\n .delimiter(b'|')\n\n .has_header(false)\n\n .file_extension(\".tbl\");\n\n let dir = format!(\"{}/{}\", path, table);\n\n ctx.register_csv(table, &dir, options)?;\n\n }\n\n Ok(ctx)\n\n}\n\n\n", "file_path": "rust/lib/src/test_utils.rs", "rank": 42, "score": 95478.18681457799 }, { "content": "pub fn missing_key(key_name: &str, from_name: &str) -> PyErr {\n\n PyException::new_err(format!(\n\n \"The required key {} was not present in {}.\",\n\n key_name, from_name\n\n ))\n\n}\n\n\n", "file_path": "python/src/util.rs", "rank": 43, "score": 94479.57535816792 }, { "content": "fn from_ballista_err(e: &crate::error::BallistaError) -> Status {\n\n Status::internal(format!(\"Ballista Error: {:?}\", e))\n\n}\n\n\n", "file_path": "rust/lib/src/executor/flight_service.rs", "rank": 44, "score": 90969.78631110275 }, { "content": "pub fn produce_diagram(filename: &str, stages: &[Arc<QueryStageExec>]) -> Result<()> {\n\n let write_file = File::create(filename)?;\n\n let mut w = BufWriter::new(&write_file);\n\n writeln!(w, \"digraph G {{\")?;\n\n\n\n // draw stages and entities\n\n for stage in stages {\n\n writeln!(w, \"\\tsubgraph cluster{} {{\", stage.stage_id)?;\n\n writeln!(w, \"\\t\\tlabel = \\\"Stage {}\\\";\", stage.stage_id)?;\n\n let mut id = AtomicUsize::new(0);\n\n build_exec_plan_diagram(&mut w, stage.child.as_ref(), stage.stage_id, &mut id, true)?;\n\n writeln!(w, \"\\t}}\")?;\n\n }\n\n\n\n // draw relationships\n\n for stage in stages {\n\n let mut id = AtomicUsize::new(0);\n\n build_exec_plan_diagram(&mut w, stage.child.as_ref(), stage.stage_id, &mut id, false)?;\n\n }\n\n\n\n write!(w, \"}}\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rust/lib/src/utils.rs", "rank": 45, "score": 89098.731249609 }, { "content": "pub fn to_py_array(array: &ArrayRef, py: Python) -> PyResult<PyObject> {\n\n let (array_pointer, schema_pointer) = array.to_raw().map_err(crate::util::wrap_err)?;\n\n\n\n let pa = py.import(\"pyarrow\")?;\n\n\n\n let array = pa.getattr(\"Array\")?.call_method1(\n\n \"_import_from_c\",\n\n (\n\n array_pointer as Py_uintptr_t,\n\n schema_pointer as Py_uintptr_t,\n\n ),\n\n )?;\n\n Ok(array.to_object(py))\n\n}\n\n\n", "file_path": "python/src/dataframe.rs", "rank": 46, "score": 89098.731249609 }, { "content": "pub fn wrap_err<E: std::error::Error>(err: E) -> PyErr {\n\n PyException::new_err(err.to_string())\n\n}\n\n\n", "file_path": "python/src/util.rs", "rank": 47, "score": 88806.52928329752 }, { "content": "pub fn wrap_err<E: std::error::Error>(err: E) -> PyErr {\n\n PyException::new_err(err.to_string())\n\n}\n", "file_path": "python/src/dataframe.rs", "rank": 48, "score": 88806.52928329752 }, { "content": "pub fn format_plan(plan: &dyn ExecutionPlan, indent: usize) -> Result<String> {\n\n let operator_str = if let Some(exec) = plan.as_any().downcast_ref::<HashAggregateExec>() {\n\n format!(\n\n \"HashAggregateExec: groupBy={:?}, aggrExpr={:?}\",\n\n exec.group_expr()\n\n .iter()\n\n .map(|e| format_expr(e.0.as_ref()))\n\n .collect::<Vec<String>>(),\n\n exec.aggr_expr()\n\n .iter()\n\n .map(|e| format_agg_expr(e.as_ref()))\n\n .collect::<Result<Vec<String>>>()?\n\n )\n\n } else if let Some(exec) = plan.as_any().downcast_ref::<HashJoinExec>() {\n\n format!(\n\n \"HashJoinExec: joinType={:?}, on={:?}\",\n\n exec.join_type(),\n\n exec.on()\n\n )\n\n } else if let Some(exec) = plan.as_any().downcast_ref::<ParquetExec>() {\n", "file_path": "rust/lib/src/utils.rs", "rank": 49, "score": 87248.95704311691 }, { "content": "pub fn kwargs_to_string_map(kwargs: Option<&PyDict>) -> PyResult<HashMap<String, String>> {\n\n Ok(match kwargs {\n\n Some(kwargs) => kwargs\n\n .iter()\n\n .map(|(py_key_obj, py_val_obj)| {\n\n let py_key = py_key_obj.str().map_err(|err| {\n\n pyo3::exceptions::PyTypeError::new_err(format!(\n\n \"kwargs keys must be convertible to a string using str(): {}\",\n\n err.to_string()\n\n ))\n\n })?;\n\n let py_val = py_val_obj.str().map_err(|err| {\n\n pyo3::exceptions::PyTypeError::new_err(format!(\n\n \"kwargs values must be convertible to a string using str(): {}\",\n\n err.to_string()\n\n ))\n\n })?;\n\n Ok((py_key.to_str()?.to_owned(), py_val.to_str()?.to_owned()))\n\n })\n\n .collect::<Result<HashMap<String, String>, PyErr>>()?,\n\n None => HashMap::new(),\n\n })\n\n}\n\n\n", "file_path": "python/src/util.rs", "rank": 50, "score": 84341.9665117463 }, { "content": "pub fn tuple_to_uniform_type<'a, T: FromPyObject<'a>>(tuple: &'a PyTuple) -> PyResult<Vec<T>> {\n\n tuple\n\n .iter()\n\n .map(|tuple_item| tuple_item.extract::<T>())\n\n .collect::<Result<Vec<T>, PyErr>>()\n\n}\n\nuse pyo3::PyAny;\n", "file_path": "python/src/util.rs", "rank": 51, "score": 80189.67616011022 }, { "content": "fn from_proto_binary_op(op: &str) -> Result<Operator, BallistaError> {\n\n match op {\n\n \"And\" => Ok(Operator::And),\n\n \"Or\" => Ok(Operator::Or),\n\n \"Eq\" => Ok(Operator::Eq),\n\n \"NotEq\" => Ok(Operator::NotEq),\n\n \"LtEq\" => Ok(Operator::LtEq),\n\n \"Lt\" => Ok(Operator::Lt),\n\n \"Gt\" => Ok(Operator::Gt),\n\n \"GtEq\" => Ok(Operator::GtEq),\n\n \"Plus\" => Ok(Operator::Plus),\n\n \"Minus\" => Ok(Operator::Minus),\n\n \"Multiply\" => Ok(Operator::Multiply),\n\n \"Divide\" => Ok(Operator::Divide),\n\n \"Like\" => Ok(Operator::Like),\n\n other => Err(proto_error(format!(\n\n \"Unsupported binary operator '{:?}'\",\n\n other\n\n ))),\n\n }\n", "file_path": "rust/lib/src/serde/logical_plan/from_proto.rs", "rank": 52, "score": 75545.42649527274 }, { "content": "public class FlightConnection implements java.sql.Connection {\n\n\n\n protected final String host;\n\n protected final int port;\n\n\n\n public FlightConnection(String host, int port) {\n\n this.host = host;\n\n this.port = port;\n\n }\n\n\n\n @Override\n\n public FlightStatement createStatement() throws SQLException {\n\n return new FlightStatement(this);\n\n }\n\n\n\n @Override\n\n public FlightPreparedStatement prepareStatement(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public CallableStatement prepareCall(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public String nativeSQL(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public boolean getAutoCommit() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setAutoCommit(boolean b) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void commit() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void rollback() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void close() throws SQLException {\n\n\n\n }\n\n\n\n @Override\n\n public boolean isClosed() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public DatabaseMetaData getMetaData() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public boolean isReadOnly() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setReadOnly(boolean b) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public String getCatalog() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setCatalog(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public int getTransactionIsolation() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setTransactionIsolation(int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public SQLWarning getWarnings() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void clearWarnings() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightStatement createStatement(int i, int i1) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightPreparedStatement prepareStatement(String s, int i, int i1) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public CallableStatement prepareCall(String s, int i, int i1) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Map<String, Class<?>> getTypeMap() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setTypeMap(Map<String, Class<?>> map) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public int getHoldability() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setHoldability(int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Savepoint setSavepoint() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Savepoint setSavepoint(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void rollback(Savepoint savepoint) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void releaseSavepoint(Savepoint savepoint) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightStatement createStatement(int i, int i1, int i2) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightPreparedStatement prepareStatement(String s, int i, int i1, int i2) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public CallableStatement prepareCall(String s, int i, int i1, int i2) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightPreparedStatement prepareStatement(String s, int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightPreparedStatement prepareStatement(String s, int[] ints) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public FlightPreparedStatement prepareStatement(String s, String[] strings) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Clob createClob() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Blob createBlob() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public NClob createNClob() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public SQLXML createSQLXML() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public boolean isValid(int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setClientInfo(String s, String s1) throws SQLClientInfoException {\n\n throw new SQLClientInfoException();\n\n\n\n }\n\n\n\n @Override\n\n public String getClientInfo(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Properties getClientInfo() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setClientInfo(Properties properties) throws SQLClientInfoException {\n\n throw new SQLClientInfoException();\n\n }\n\n\n\n @Override\n\n public Array createArrayOf(String s, Object[] objects) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public Struct createStruct(String s, Object[] objects) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public String getSchema() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setSchema(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void abort(Executor executor) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public void setNetworkTimeout(Executor executor, int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public int getNetworkTimeout() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public <T> T unwrap(Class<T> aClass) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n\n\n\n @Override\n\n public boolean isWrapperFor(Class<?> aClass) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n\n }\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 53, "score": 73398.84482712671 }, { "content": "#[pyfunction]\n\nfn Dictionary(key_type: BPyDataType, value_type: BPyDataType) -> BPyDataType {\n\n BPyDataType {\n\n datatype: DataType::Dictionary(Box::new(key_type.datatype), Box::new(value_type.datatype)),\n\n }\n\n}\n\n\n", "file_path": "python/src/datatypes.rs", "rank": 54, "score": 72795.76768157659 }, { "content": " @Override\n\n public boolean isWrapperFor(Class<?> aClass) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 55, "score": 72059.52852754263 }, { "content": " @Override\n\n public void rollback(Savepoint savepoint) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 56, "score": 72059.52852754263 }, { "content": " @Override\n\n public void abort(Executor executor) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 57, "score": 72059.52852754263 }, { "content": " @Override\n\n public <T> T unwrap(Class<T> aClass) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 58, "score": 72059.52852754263 }, { "content": " @Override\n\n public boolean isReadOnly() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 59, "score": 72059.52852754263 }, { "content": " @Override\n\n public boolean isValid(int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 60, "score": 72059.52852754263 }, { "content": " protected final String host;\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 61, "score": 72059.52852754263 }, { "content": " @Override\n\n public void commit() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 62, "score": 72059.52852754263 }, { "content": " protected final int port;\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 63, "score": 72059.52852754263 }, { "content": " @Override\n\n public boolean isClosed() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 64, "score": 72059.52852754263 }, { "content": " @Override\n\n public void close() throws SQLException {\n\n\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 65, "score": 72059.52852754263 }, { "content": "class Module:\n\n def __init__(self, module_name:str, obj:typing.Any, type_stub: bool = False)->None:\n\n self.name = module_name\n\n self.functions: typing.Set[str] = set()\n\n self.submodules: typing.Dict[str,'Module'] = {}\n\n self.classes: typing.Dict[str, Class]={}\n\n self.attributes: typing.Set[str] = set()\n\n for name, data in inspect.getmembers(obj, ballista_noskip):\n\n is_private = name.startswith(\"_\")\n\n if is_private:\n\n continue\n\n\n\n data_is_function = is_function(data, type_stub=type_stub)\n\n data_is_class = is_class(data, type_stub=type_stub)\n\n if data_is_function and not data_is_class:\n\n self.add_function(name)\n\n elif data_is_class and not data_is_function:\n\n self.add_class(Class(name, data, type_stub=type_stub))\n\n elif inspect.ismodule(data):\n\n self.add_submodule(Module(name, data, type_stub=type_stub))\n\n else:\n\n self.add_attributes(name)\n\n #print(f\"TypeStub[{type_stub}: {self.attributes}\")\n\n\n\n\n\n def add_function(self, func_name: str)->None:\n\n if func_name in self.functions:\n\n raise Exception(f\"Module {self.name} already had a function named {func_name}\")\n\n self.functions.add(func_name)\n\n \n\n def add_submodule(self, module: 'Module')->None:\n\n if self.submodules.get(module.name) is not None:\n\n raise Exception(f\"Module {self.name} already had a submodule {module.name}\")\n\n self.submodules[module.name] = module\n\n\n\n def add_class(self, cls: Class)->None:\n\n if self.classes.get(cls.class_name) is not None:\n\n raise Exception(f\"Module {self.name} already had a class named {cls.class_name}\")\n\n self.classes[cls.class_name] = cls\n\n\n\n def add_attributes(self, attribute_name: str)->None:\n\n if attribute_name in self.attributes:\n\n raise Exception(f\"Module {self.name} already had a attribute/property named {attribute_name}\")\n\n self.attributes.add(attribute_name)\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n \n\n def __str__(self, depth: int=0)->str:\n\n ntabs: typing.Callable[[int], str] = lambda n: \"\\t\"*n\n\n if len(self.classes) == 0:\n\n classes_str = \"{}classes: {{}}\\n\".format(ntabs(depth+1))\n\n else:\n\n classes_str = \"{}classes: {{\\n{} }}\\n\".format(ntabs(depth+1), \",\\n\".join(\"{}\".format(c.__str__(depth=depth+2)) for c in self.classes.values())) #type: ignore\n\n\n\n if len(self.attributes) == 0:\n\n attributes_str = \"{}attributes: []\\n\".format(ntabs(depth+1))\n\n else:\n\n attributes_str= \"{}attributes: [\\n{} ]\\n\".format(ntabs(depth+1), \",\\n\".join(\"{}{}\".format(ntabs(depth+2),a) for a in self.attributes) )\n\n if len(self.submodules) == 0:\n\n submodules_str = \"{}modules:[]\\n\".format(ntabs(depth+1))\n\n else:\n\n submodules_str =\"{}modules: [\\n{} ]\\n\".format(ntabs(depth+1), \",\\n\".join(\"{}\".format(m.__str__(depth=depth+2)) for m in self.submodules.values())) #type: ignore\n\n str_repr = f\"{ntabs(depth)}name:{self.name}\\n{classes_str}{attributes_str}{submodules_str}\"\n\n\n", "file_path": "python/ci/check_stub_exhaustiveness.py", "rank": 66, "score": 71393.5987562735 }, { "content": " def test_expr(self):\n\n positive_pressure: ballista.Expression = col(\"pressure\") > 100.0\n", "file_path": "python/tests/test_simple.py", "rank": 67, "score": 71330.83827240235 }, { "content": " @Override\n\n public String nativeSQL(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 68, "score": 70774.37470086024 }, { "content": " @Override\n\n public void clearWarnings() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 69, "score": 70774.37470086024 }, { "content": " @Override\n\n public FlightPreparedStatement prepareStatement(String s, String[] strings) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 70, "score": 70774.37470086024 }, { "content": " @Override\n\n public void releaseSavepoint(Savepoint savepoint) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 71, "score": 70774.37470086024 }, { "content": " @Override\n\n public void setReadOnly(boolean b) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 72, "score": 70774.37470086024 }, { "content": " @Override\n\n public int getHoldability() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 73, "score": 70774.37470086024 }, { "content": " @Override\n\n public String getSchema() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 74, "score": 70774.37470086024 }, { "content": " @Override\n\n public SQLXML createSQLXML() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 75, "score": 70774.37470086024 }, { "content": " @Override\n\n public CallableStatement prepareCall(String s, int i, int i1, int i2) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 76, "score": 70774.37470086024 }, { "content": " @Override\n\n public SQLWarning getWarnings() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 77, "score": 70774.37470086024 }, { "content": " @Override\n\n public void setCatalog(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 78, "score": 70774.37470086024 }, { "content": " @Override\n\n public FlightStatement createStatement(int i, int i1, int i2) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 79, "score": 70774.37470086024 }, { "content": " @Override\n\n public Blob createBlob() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 80, "score": 70774.37470086024 }, { "content": " @Override\n\n public Savepoint setSavepoint(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 81, "score": 70774.37470086024 }, { "content": " @Override\n\n public void setHoldability(int i) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 82, "score": 70774.37470086024 }, { "content": " @Override\n\n public Array createArrayOf(String s, Object[] objects) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 83, "score": 70774.37470086024 }, { "content": " @Override\n\n public void setSchema(String s) throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 84, "score": 70774.37470086024 }, { "content": " @Override\n\n public String getCatalog() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 85, "score": 70774.37470086024 }, { "content": " @Override\n\n public Clob createClob() throws SQLException {\n\n throw new SQLFeatureNotSupportedException();\n", "file_path": "jvm/jdbc/src/main/java/org/ballistacompute/jdbc/FlightConnection.java", "rank": 86, "score": 70774.37470086024 }, { "content": "fn execute(\n\n stages: Vec<Arc<QueryStageExec>>,\n\n executors: Vec<ExecutorMeta>,\n\n) -> SendableExecutionPlan {\n\n Box::pin(async move {\n\n let mut partition_locations: HashMap<usize, Vec<PartitionLocation>> = HashMap::new();\n\n let mut result_partition_locations = vec![];\n\n for stage in &stages {\n\n debug!(\"execute() {}\", &format!(\"{:?}\", stage)[0..60]);\n\n let stage = remove_unresolved_shuffles(stage.as_ref(), &partition_locations)?;\n\n let stage = stage.as_any().downcast_ref::<QueryStageExec>().unwrap();\n\n result_partition_locations = execute_query_stage(\n\n &stage.job_id.clone(),\n\n stage.stage_id,\n\n stage.children()[0].clone(),\n\n executors.clone(),\n\n )\n\n .await?;\n\n partition_locations.insert(stage.stage_id, result_partition_locations.clone());\n\n }\n\n\n\n let shuffle_reader: Arc<dyn ExecutionPlan> = Arc::new(ShuffleReaderExec::try_new(\n\n result_partition_locations,\n\n stages.last().unwrap().schema(),\n\n )?);\n\n Ok(shuffle_reader)\n\n })\n\n}\n\n\n", "file_path": "rust/lib/src/scheduler/planner.rs", "rank": 87, "score": 69970.39385388917 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Ballista Prelude (common imports)\n\n\n\npub use crate::{\n\n client::BallistaClient,\n\n context::BallistaContext,\n\n error::{BallistaError, Result},\n\n};\n\n\n\npub use futures::StreamExt;\n", "file_path": "rust/lib/src/prelude.rs", "rank": 90, "score": 47.57668948305457 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! This crate contains code generated from the Ballista Protocol Buffer Definition as well\n\n//! as convenience code for interacting with the generated code.\n\n\n\nuse std::{convert::TryInto, io::Cursor};\n\n\n\nuse crate::{error::BallistaError, serde::scheduler::Action as BallistaAction};\n", "file_path": "rust/lib/src/serde/mod.rs", "rank": 91, "score": 46.77850162172378 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Ballista Distributed Compute\n\n#![allow(unused_imports)]\n\npub const BALLISTA_VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\n\n", "file_path": "rust/lib/src/lib.rs", "rank": 92, "score": 44.30913591322717 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::convert::TryInto;\n\n\n\nuse crate::error::BallistaError;\n\nuse crate::scheduler::planner::PartitionLocation;\n\nuse crate::serde::protobuf;\n\nuse crate::serde::protobuf::action::ActionType;\n", "file_path": "rust/lib/src/serde/scheduler/to_proto.rs", "rank": 93, "score": 42.94755273988739 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Support for distributed schedulers, such as Kubernetes\n\n\n\npub mod execution_plans;\n\npub mod planner;\n\npub mod state;\n\n\n", "file_path": "rust/lib/src/scheduler/mod.rs", "rank": 94, "score": 42.48549259827602 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Distributed query execution\n\n//!\n\n//! This code is EXPERIMENTAL and still under development\n\n\n\nuse std::pin::Pin;\n\nuse std::sync::Arc;\n", "file_path": "rust/lib/src/scheduler/planner.rs", "rank": 95, "score": 42.21272540902669 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse std::{collections::HashMap, convert::TryInto};\n\n\n\nuse crate::error::BallistaError;\n\nuse crate::scheduler::planner::PartitionLocation;\n\nuse crate::serde::protobuf;\n\nuse crate::serde::protobuf::action::ActionType;\n", "file_path": "rust/lib/src/serde/scheduler/from_proto.rs", "rank": 96, "score": 42.187730933601166 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\npub mod from_proto;\n\npub mod to_proto;\n\n\n\n#[cfg(test)]\n\n\n\nmod roundtrip_tests {\n", "file_path": "rust/lib/src/serde/logical_plan/mod.rs", "rank": 97, "score": 42.15851664056607 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\n//! Ballista error types\n\n\n\nuse std::{\n\n error::Error,\n\n fmt::{Display, Formatter},\n\n io, result,\n", "file_path": "rust/lib/src/error.rs", "rank": 98, "score": 42.027413757396765 }, { "content": "// Copyright 2020 Andy Grove\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n", "file_path": "rust/lib/build.rs", "rank": 99, "score": 41.819243889294995 } ]
Rust
zebra-chain/src/serialization/read_zcash.rs
ebfull/zebra
e61b5e50a2f00ebc6e16ac1de10031897acd71cc
use std::io; use std::net::{IpAddr, SocketAddr}; use byteorder::{BigEndian, LittleEndian, ReadBytesExt}; use super::SerializationError; pub trait ReadZcashExt: io::Read { #[inline] fn read_compactsize(&mut self) -> Result<u64, SerializationError> { use SerializationError::Parse; let flag_byte = self.read_u8()?; match flag_byte { n @ 0x00..=0xfc => Ok(n as u64), 0xfd => match self.read_u16::<LittleEndian>()? { n @ 0x0000_00fd..=0x0000_ffff => Ok(n as u64), _ => Err(Parse("non-canonical compactsize")), }, 0xfe => match self.read_u32::<LittleEndian>()? { n @ 0x0001_0000..=0xffff_ffff => Ok(n as u64), _ => Err(Parse("non-canonical compactsize")), }, 0xff => match self.read_u64::<LittleEndian>()? { n @ 0x1_0000_0000..=0xffff_ffff_ffff_ffff => Ok(n), _ => Err(Parse("non-canonical compactsize")), }, } } #[inline] fn read_ip_addr(&mut self) -> io::Result<IpAddr> { use std::net::{IpAddr::*, Ipv6Addr}; let mut octets = [0u8; 16]; self.read_exact(&mut octets)?; let v6_addr = Ipv6Addr::from(octets); match v6_addr.to_ipv4() { Some(v4_addr) => Ok(V4(v4_addr)), None => Ok(V6(v6_addr)), } } #[inline] fn read_socket_addr(&mut self) -> io::Result<SocketAddr> { let ip_addr = self.read_ip_addr()?; let port = self.read_u16::<BigEndian>()?; Ok(SocketAddr::new(ip_addr, port)) } #[inline] fn read_string(&mut self) -> Result<String, SerializationError> { let len = self.read_compactsize()?; let mut buf = vec![0; len as usize]; self.read_exact(&mut buf)?; String::from_utf8(buf).map_err(|_| SerializationError::Parse("invalid utf-8")) } #[inline] fn read_4_bytes(&mut self) -> io::Result<[u8; 4]> { let mut bytes = [0; 4]; self.read_exact(&mut bytes)?; Ok(bytes) } #[inline] fn read_12_bytes(&mut self) -> io::Result<[u8; 12]> { let mut bytes = [0; 12]; self.read_exact(&mut bytes)?; Ok(bytes) } #[inline] fn read_32_bytes(&mut self) -> io::Result<[u8; 32]> { let mut bytes = [0; 32]; self.read_exact(&mut bytes)?; Ok(bytes) } #[inline] fn read_64_bytes(&mut self) -> io::Result<[u8; 64]> { let mut bytes = [0; 64]; self.read_exact(&mut bytes)?; Ok(bytes) } } impl<R: io::Read + ?Sized> ReadZcashExt for R {}
use std::io; use std::net::{IpAddr, SocketAddr}; use byteorder::{BigEndian, LittleEndian, ReadBytesExt}; use super::SerializationError; pub trait ReadZcashExt: io::Read { #[inline] fn read_compactsize(&mut self) -> Result<u64, SerializationError> { use SerializationError::Parse; let flag_byte = self.read_u8()?; match flag_byte { n @ 0x00..=0xfc => Ok(n as u64), 0xfd => match self.read_u16::<LittleEndian>()? { n @ 0x0000_00fd..=0x0000_ffff => Ok(n as u64), _ => Err(Parse("non-canonical compactsize")), }, 0xfe => match self.read_u32::<LittleEndian>()? { n @ 0x0001_0000..=0xffff_ffff => Ok(n as u64), _ => Err(Parse("non-canonical compactsize")), }, 0xff => match self.read_u64::<LittleEndian>()? { n @ 0x1_0000_0000..=0xffff_ffff_ffff_ffff => Ok(n), _ => Err(Parse("non-canonical compactsize")), }, } } #[inline] fn read_ip_addr(&mut self) -> io::Result<IpAddr> { use std::net::{IpAddr::*, Ipv6Addr}; let mut octets = [0u8; 16]; self.read_exact(&mut octets)?; let v6_addr = Ipv6Addr::from(octets); match v6_addr.to_ipv4() { Some(v4_addr) => Ok(V4(v4_addr)), None => Ok(V6(v6_addr)), } } #[inline] fn read_socket_addr(&mut self) -> io::Result<SocketAddr> { let ip_addr = self.read_ip_addr()?; let port = self.read_u16::<BigEndian>()?; Ok(SocketAddr::new(ip_addr, port)) } #[inline] fn read_st
#[inline] fn read_4_bytes(&mut self) -> io::Result<[u8; 4]> { let mut bytes = [0; 4]; self.read_exact(&mut bytes)?; Ok(bytes) } #[inline] fn read_12_bytes(&mut self) -> io::Result<[u8; 12]> { let mut bytes = [0; 12]; self.read_exact(&mut bytes)?; Ok(bytes) } #[inline] fn read_32_bytes(&mut self) -> io::Result<[u8; 32]> { let mut bytes = [0; 32]; self.read_exact(&mut bytes)?; Ok(bytes) } #[inline] fn read_64_bytes(&mut self) -> io::Result<[u8; 64]> { let mut bytes = [0; 64]; self.read_exact(&mut bytes)?; Ok(bytes) } } impl<R: io::Read + ?Sized> ReadZcashExt for R {}
ring(&mut self) -> Result<String, SerializationError> { let len = self.read_compactsize()?; let mut buf = vec![0; len as usize]; self.read_exact(&mut buf)?; String::from_utf8(buf).map_err(|_| SerializationError::Parse("invalid utf-8")) }
function_block-function_prefixed
[ { "content": "/// Check that if there are no Spends or Outputs, that valueBalance is also 0.\n\n///\n\n/// https://zips.z.cash/protocol/canopy.pdf#consensusfrombitcoin\n\npub fn shielded_balances_match(\n\n shielded_data: &ShieldedData,\n\n value_balance: Amount,\n\n) -> Result<(), TransactionError> {\n\n if (shielded_data.spends().count() + shielded_data.outputs().count() != 0)\n\n || i64::from(value_balance) == 0\n\n {\n\n Ok(())\n\n } else {\n\n Err(TransactionError::BadBalance)\n\n }\n\n}\n\n\n", "file_path": "zebra-consensus/src/transaction/check.rs", "rank": 0, "score": 179679.0377715845 }, { "content": "/// The divisor used for halvings.\n\n///\n\n/// `1 << Halving(height)`, as described in [protocol specification §7.7][7.7]\n\n///\n\n/// [7.7]: https://zips.z.cash/protocol/protocol.pdf#subsidies\n\npub fn halving_divisor(height: Height, network: Network) -> u64 {\n\n let blossom_height = Blossom\n\n .activation_height(network)\n\n .expect(\"blossom activation height should be available\");\n\n\n\n if height < SLOW_START_SHIFT {\n\n unreachable!(\n\n \"unsupported block height: callers should handle blocks below {:?}\",\n\n SLOW_START_SHIFT\n\n )\n\n } else if height < blossom_height {\n\n let scaled_pre_blossom_height = (height - SLOW_START_SHIFT) as u64;\n\n let halving_shift = scaled_pre_blossom_height / (PRE_BLOSSOM_HALVING_INTERVAL.0 as u64);\n\n 1 << halving_shift\n\n } else {\n\n let scaled_pre_blossom_height =\n\n (blossom_height - SLOW_START_SHIFT) as u64 * BLOSSOM_POW_TARGET_SPACING_RATIO;\n\n let post_blossom_height = (height - blossom_height) as u64;\n\n let halving_shift = (scaled_pre_blossom_height + post_blossom_height)\n\n / (POST_BLOSSOM_HALVING_INTERVAL.0 as u64);\n\n 1 << halving_shift\n\n }\n\n}\n\n\n", "file_path": "zebra-consensus/src/block/subsidy/general.rs", "rank": 1, "score": 167477.41815405432 }, { "content": "/// Generates a random scalar from the scalar field 𝔽_{r_𝕁}.\n\n///\n\n/// The prime order subgroup 𝕁^(r) is the order-r_𝕁 subgroup of 𝕁 that consists\n\n/// of the points whose order divides r. This function is useful when generating\n\n/// the uniform distribution on 𝔽_{r_𝕁} needed for Sapling commitment schemes'\n\n/// trapdoor generators.\n\n///\n\n/// https://zips.z.cash/protocol/protocol.pdf#jubjub\n\npub fn generate_trapdoor<T>(csprng: &mut T) -> jubjub::Fr\n\nwhere\n\n T: RngCore + CryptoRng,\n\n{\n\n let mut bytes = [0u8; 64];\n\n csprng.fill_bytes(&mut bytes);\n\n // Fr::from_bytes_wide() reduces the input modulo r via Fr::from_u512()\n\n jubjub::Fr::from_bytes_wide(&bytes)\n\n}\n", "file_path": "zebra-chain/src/sapling/commitment/pedersen_hashes.rs", "rank": 2, "score": 158961.34905929468 }, { "content": "/// A trait for defining constraints on `Amount`\n\npub trait Constraint {\n\n /// Returns the range of values that are valid under this constraint\n\n fn valid_range() -> RangeInclusive<i64>;\n\n\n\n /// Check if an input value is within the valid range\n\n fn validate(value: i64) -> Result<i64, Error> {\n\n let range = Self::valid_range();\n\n\n\n if !range.contains(&value) {\n\n Err(Error::Contains { range, value })\n\n } else {\n\n Ok(value)\n\n }\n\n }\n\n}\n\n\n\nimpl ZcashSerialize for Amount<NegativeAllowed> {\n\n fn zcash_serialize<W: std::io::Write>(&self, mut writer: W) -> Result<(), std::io::Error> {\n\n writer.write_i64::<LittleEndian>(self.0)\n\n }\n", "file_path": "zebra-chain/src/amount.rs", "rank": 4, "score": 150997.21627181285 }, { "content": "/// Mocks computation done during semantic validation\n\npub trait Prepare {\n\n fn prepare(self) -> PreparedBlock;\n\n}\n\n\n\nimpl Prepare for Arc<Block> {\n\n fn prepare(self) -> PreparedBlock {\n\n let block = self;\n\n let hash = block.hash();\n\n let height = block.coinbase_height().unwrap();\n\n let transaction_hashes = block.transactions.iter().map(|tx| tx.hash()).collect();\n\n let new_outputs = crate::utxo::new_outputs(&block);\n\n\n\n PreparedBlock {\n\n block,\n\n hash,\n\n height,\n\n new_outputs,\n\n transaction_hashes,\n\n }\n\n }\n\n}\n\n\n", "file_path": "zebra-state/src/tests.rs", "rank": 5, "score": 150991.62484954382 }, { "content": " pub trait Sealed {}\n\n impl Sealed for Bctv14Proof {}\n\n impl Sealed for Groth16Proof {}\n\n}\n", "file_path": "zebra-chain/src/primitives/proofs.rs", "rank": 6, "score": 148472.09560570936 }, { "content": "pub trait CommandExt {\n\n /// wrapper for `status` fn on `Command` that constructs informative error\n\n /// reports\n\n fn status2(&mut self) -> Result<TestStatus, Report>;\n\n\n\n /// wrapper for `output` fn on `Command` that constructs informative error\n\n /// reports\n\n fn output2(&mut self) -> Result<TestOutput<NoDir>, Report>;\n\n\n\n /// wrapper for `spawn` fn on `Command` that constructs informative error\n\n /// reports\n\n fn spawn2<T>(&mut self, dir: T) -> Result<TestChild<T>, Report>;\n\n}\n\n\n\nimpl CommandExt for Command {\n\n /// wrapper for `status` fn on `Command` that constructs informative error\n\n /// reports\n\n fn status2(&mut self) -> Result<TestStatus, Report> {\n\n let cmd = format!(\"{:?}\", self);\n\n let status = self.status();\n", "file_path": "zebra-test/src/command.rs", "rank": 7, "score": 148472.09560570936 }, { "content": "/// Extension trait for methods on `tempdir::TempDir` for using it as a test\n\n/// directory with an arbitrary command.\n\n///\n\n/// This trait is separate from `ZebradTestDirExt`, so that we can test\n\n/// `zebra_test::command` without running `zebrad`.\n\npub trait TestDirExt\n\nwhere\n\n Self: AsRef<Path> + Sized,\n\n{\n\n /// Spawn `cmd` with `args` as a child process in this test directory,\n\n /// potentially taking ownership of the tempdir for the duration of the\n\n /// child process.\n\n fn spawn_child_with_command(self, cmd: &str, args: &[&str]) -> Result<TestChild<Self>>;\n\n}\n\n\n\nimpl<T> TestDirExt for T\n\nwhere\n\n Self: AsRef<Path> + Sized,\n\n{\n\n fn spawn_child_with_command(self, cmd: &str, args: &[&str]) -> Result<TestChild<Self>> {\n\n let mut cmd = test_cmd(cmd, self.as_ref())?;\n\n\n\n Ok(cmd\n\n .args(args)\n\n .stdout(Stdio::piped())\n", "file_path": "zebra-test/src/command.rs", "rank": 8, "score": 146099.584949553 }, { "content": "/// Helper trait for constructing \"valid\" looking chains of blocks\n\npub trait FakeChainHelper {\n\n fn make_fake_child(&self) -> Arc<Block>;\n\n\n\n fn set_work(self, work: u128) -> Arc<Block>;\n\n}\n\n\n\nimpl FakeChainHelper for Arc<Block> {\n\n fn make_fake_child(&self) -> Arc<Block> {\n\n let parent_hash = self.hash();\n\n let mut child = Block::clone(self);\n\n let mut transactions = mem::take(&mut child.transactions);\n\n let mut tx = transactions.remove(0);\n\n\n\n let input = match Arc::make_mut(&mut tx) {\n\n Transaction::V1 { inputs, .. } => &mut inputs[0],\n\n Transaction::V2 { inputs, .. } => &mut inputs[0],\n\n Transaction::V3 { inputs, .. } => &mut inputs[0],\n\n Transaction::V4 { inputs, .. } => &mut inputs[0],\n\n };\n\n\n", "file_path": "zebra-state/src/tests.rs", "rank": 9, "score": 146094.93732292362 }, { "content": "/// Initialize globals for tests such as the tracing subscriber and panic / error\n\n/// reporting hooks\n\npub fn init() {\n\n INIT.call_once(|| {\n\n let fmt_layer = fmt::layer().with_target(false);\n\n // Use the RUST_LOG env var, or by default:\n\n // - warn for most tests, and\n\n // - for some modules, hide expected warn logs\n\n let filter_layer = EnvFilter::try_from_default_env().unwrap_or_else(|_| {\n\n EnvFilter::try_new(\"warn\")\n\n .unwrap()\n\n .add_directive(\"zebra_consensus=error\".parse().unwrap())\n\n });\n\n\n\n tracing_subscriber::registry()\n\n .with(filter_layer)\n\n .with(fmt_layer)\n\n .with(ErrorLayer::default())\n\n .init();\n\n\n\n color_eyre::config::HookBuilder::default()\n\n .add_frame_filter(Box::new(|frames| {\n", "file_path": "zebra-test/src/lib.rs", "rank": 10, "score": 145176.4983604153 }, { "content": "/// A marker trait used to abstract over BCTV14 or Groth16 proofs.\n\npub trait ZkSnarkProof:\n\n Copy\n\n + Clone\n\n + Debug\n\n + PartialEq\n\n + Eq\n\n + Serialize\n\n + DeserializeOwned\n\n + ZcashSerialize\n\n + ZcashDeserialize\n\n + private::Sealed\n\n{\n\n}\n\nimpl ZkSnarkProof for Bctv14Proof {}\n\nimpl ZkSnarkProof for Groth16Proof {}\n\n\n\nmod private {\n\n use super::*;\n\n\n", "file_path": "zebra-chain/src/primitives/proofs.rs", "rank": 11, "score": 143843.59272669425 }, { "content": "/// Helper for deserializing more succinctly via type inference\n\npub trait ZcashDeserializeInto {\n\n /// Deserialize based on type inference\n\n fn zcash_deserialize_into<T>(self) -> Result<T, SerializationError>\n\n where\n\n T: ZcashDeserialize;\n\n}\n\n\n\nimpl<R: io::Read> ZcashDeserializeInto for R {\n\n fn zcash_deserialize_into<T>(self) -> Result<T, SerializationError>\n\n where\n\n T: ZcashDeserialize,\n\n {\n\n T::zcash_deserialize(self)\n\n }\n\n}\n", "file_path": "zebra-chain/src/serialization/zcash_deserialize.rs", "rank": 12, "score": 143833.01784375135 }, { "content": "/// Add context to an error report\n\npub trait ContextFrom<S> {\n\n type Return;\n\n\n\n fn context_from(self, source: S) -> Self::Return;\n\n}\n\n\n\nimpl<C, T, E> ContextFrom<C> for Result<T, E>\n\nwhere\n\n E: Into<Report>,\n\n Report: ContextFrom<C, Return = Report>,\n\n{\n\n type Return = Result<T, Report>;\n\n\n\n fn context_from(self, source: C) -> Self::Return {\n\n self.map_err(|e| e.into())\n\n .map_err(|report| report.context_from(source))\n\n }\n\n}\n\n\n\nimpl ContextFrom<&TestStatus> for Report {\n", "file_path": "zebra-test/src/command.rs", "rank": 13, "score": 142964.8837292677 }, { "content": "/// Use the provided TCP connection to create a Zcash connection completely\n\n/// isolated from all other node state.\n\n///\n\n/// The connection pool returned by `init` should be used for all requests that\n\n/// don't require isolated state or use of an existing TCP connection. However,\n\n/// this low-level API is useful for custom network crawlers or Tor connections.\n\n///\n\n/// In addition to being completely isolated from all other node state, this\n\n/// method also aims to be minimally distinguishable from other clients.\n\n///\n\n/// Note that this method does not implement any timeout behavior, so callers may\n\n/// want to layer it with a timeout as appropriate for their application.\n\n///\n\n/// # Inputs\n\n///\n\n/// - `conn`: an existing TCP connection to use. Passing an existing TCP\n\n/// connection allows this method to be used with clearnet or Tor transports.\n\n///\n\n/// - `user_agent`: a valid BIP14 user-agent, e.g., the empty string.\n\n///\n\n/// # Bug\n\n///\n\n/// `connect_isolated` only works on `Mainnet`, see #1687.\n\npub fn connect_isolated(\n\n conn: TcpStream,\n\n user_agent: String,\n\n) -> impl Future<\n\n Output = Result<\n\n BoxService<Request, Response, Box<dyn std::error::Error + Send + Sync + 'static>>,\n\n Box<dyn std::error::Error + Send + Sync + 'static>,\n\n >,\n\n> {\n\n let handshake = peer::Handshake::builder()\n\n .with_config(Config::default())\n\n .with_inbound_service(tower::service_fn(|_req| async move {\n\n Ok::<Response, Box<dyn std::error::Error + Send + Sync + 'static>>(Response::Nil)\n\n }))\n\n .with_user_agent(user_agent)\n\n .finish()\n\n .expect(\"provided mandatory builder parameters\");\n\n\n\n // We can't get the remote addr from conn, because it might be a tcp\n\n // connection through a socks proxy, not directly to the remote. But it\n\n // doesn't seem like zcashd cares if we give a bogus one, and Zebra doesn't\n\n // touch it at all.\n\n let remote_addr = \"0.0.0.0:8233\".parse().unwrap();\n\n\n\n Oneshot::new(handshake, (conn, remote_addr)).map_ok(|client| BoxService::new(Wrapper(client)))\n\n}\n\n\n", "file_path": "zebra-network/src/isolated.rs", "rank": 14, "score": 142678.51441247523 }, { "content": "/// Sets the Zebra shutdown flag to `true`.\n\npub fn set_shutting_down() {\n\n IS_SHUTTING_DOWN.store(true, Ordering::SeqCst);\n\n}\n", "file_path": "zebra-chain/src/shutdown.rs", "rank": 15, "score": 142672.7842686008 }, { "content": "// Helper trait for defining the exact format used to interact with disk per\n\n// type.\n\npub trait IntoDisk {\n\n // The type used to compare a value as a key to other keys stored in a\n\n // database\n\n type Bytes: AsRef<[u8]>;\n\n\n\n // function to convert the current type to its disk format in `zs_get()`\n\n // without necessarily allocating a new IVec\n\n fn as_bytes(&self) -> Self::Bytes;\n\n}\n\n\n\nimpl<'a, T> IntoDisk for &'a T\n\nwhere\n\n T: IntoDisk,\n\n{\n\n type Bytes = T::Bytes;\n\n\n\n fn as_bytes(&self) -> Self::Bytes {\n\n T::as_bytes(*self)\n\n }\n\n}\n", "file_path": "zebra-state/src/service/finalized_state/disk_format.rs", "rank": 16, "score": 141703.11022942467 }, { "content": "/// Returns `Ok(())` if `hash` passes:\n\n/// - the target difficulty limit for `network` (PoWLimit), and\n\n/// - the difficulty filter,\n\n/// based on the fields in `header`.\n\n///\n\n/// If the block is invalid, returns an error containing `height` and `hash`.\n\npub fn difficulty_is_valid(\n\n header: &Header,\n\n network: Network,\n\n height: &Height,\n\n hash: &Hash,\n\n) -> Result<(), BlockError> {\n\n let difficulty_threshold = header\n\n .difficulty_threshold\n\n .to_expanded()\n\n .ok_or(BlockError::InvalidDifficulty(*height, *hash))?;\n\n\n\n // Note: the comparisons in this function are u256 integer comparisons, like\n\n // zcashd and bitcoin. Greater values represent *less* work.\n\n\n\n // The PowLimit check is part of `Threshold()` in the spec, but it doesn't\n\n // actually depend on any previous blocks.\n\n if difficulty_threshold > ExpandedDifficulty::target_difficulty_limit(network) {\n\n Err(BlockError::TargetDifficultyLimit(\n\n *height,\n\n *hash,\n", "file_path": "zebra-consensus/src/block/check.rs", "rank": 17, "score": 140305.1609736117 }, { "content": "/// Returns `Ok(())` if `header.time` is less than or equal to\n\n/// 2 hours in the future, according to the node's local clock (`now`).\n\n///\n\n/// This is a non-deterministic rule, as clocks vary over time, and\n\n/// between different nodes.\n\n///\n\n/// \"In addition, a full validator MUST NOT accept blocks with nTime\n\n/// more than two hours in the future according to its clock. This\n\n/// is not strictly a consensus rule because it is nondeterministic,\n\n/// and clock time varies between nodes. Also note that a block that\n\n/// is rejected by this rule at a given point in time may later be\n\n/// accepted.\" [§7.5][7.5]\n\n///\n\n/// [7.5]: https://zips.z.cash/protocol/protocol.pdf#blockheader\n\n///\n\n/// If the header time is invalid, returns an error containing `height` and `hash`.\n\npub fn time_is_valid_at(\n\n header: &Header,\n\n now: DateTime<Utc>,\n\n height: &Height,\n\n hash: &Hash,\n\n) -> Result<(), zebra_chain::block::BlockTimeError> {\n\n header.time_is_valid_at(now, height, hash)\n\n}\n\n\n", "file_path": "zebra-consensus/src/block/check.rs", "rank": 18, "score": 140305.1609736117 }, { "content": "/// Helper trait for retrieving values from rocksdb column familys with a consistently\n\n/// defined format\n\npub trait DiskDeserialize {\n\n /// Serialize the given key and use that to get and deserialize the\n\n /// corresponding value from a rocksdb column family, if it is present.\n\n fn zs_get<K, V>(&self, cf: &rocksdb::ColumnFamily, key: &K) -> Option<V>\n\n where\n\n K: IntoDisk,\n\n V: FromDisk;\n\n}\n\n\n\nimpl DiskDeserialize for rocksdb::DB {\n\n fn zs_get<K, V>(&self, cf: &rocksdb::ColumnFamily, key: &K) -> Option<V>\n\n where\n\n K: IntoDisk,\n\n V: FromDisk,\n\n {\n\n let key_bytes = key.as_bytes();\n\n\n\n // We use `get_pinned_cf` to avoid taking ownership of the serialized\n\n // format because we're going to deserialize it anyways, which avoids an\n\n // extra copy\n", "file_path": "zebra-state/src/service/finalized_state/disk_format.rs", "rank": 19, "score": 139665.50020741395 }, { "content": "/// Helper trait for inserting (Key, Value) pairs into rocksdb with a consistently\n\n/// defined format\n\npub trait DiskSerialize {\n\n /// Serialize and insert the given key and value into a rocksdb column family.\n\n fn zs_insert<K, V>(&mut self, cf: &rocksdb::ColumnFamily, key: K, value: V)\n\n where\n\n K: IntoDisk + Debug,\n\n V: IntoDisk;\n\n}\n\n\n\nimpl DiskSerialize for rocksdb::WriteBatch {\n\n fn zs_insert<K, V>(&mut self, cf: &rocksdb::ColumnFamily, key: K, value: V)\n\n where\n\n K: IntoDisk + Debug,\n\n V: IntoDisk,\n\n {\n\n let key_bytes = key.as_bytes();\n\n let value_bytes = value.as_bytes();\n\n self.put_cf(cf, key_bytes, value_bytes);\n\n }\n\n}\n\n\n", "file_path": "zebra-state/src/service/finalized_state/disk_format.rs", "rank": 20, "score": 139665.50020741395 }, { "content": "#[allow(dead_code)]\n\npub fn miner_subsidy(\n\n height: Height,\n\n network: Network,\n\n non_miner_reward: Option<Amount<NonNegative>>,\n\n) -> Result<Amount<NonNegative>, Error> {\n\n if let Some(non_miner_reward) = non_miner_reward {\n\n block_subsidy(height, network)? - non_miner_reward\n\n } else {\n\n block_subsidy(height, network)\n\n }\n\n}\n\n\n", "file_path": "zebra-consensus/src/block/subsidy/general.rs", "rank": 21, "score": 138062.82612103797 }, { "content": "/// Validate the JoinSplit binding signature.\n\n///\n\n/// https://zips.z.cash/protocol/canopy.pdf#sproutnonmalleability\n\n/// https://zips.z.cash/protocol/canopy.pdf#txnencodingandconsensus\n\npub fn validate_joinsplit_sig(\n\n joinsplit_data: &JoinSplitData<Groth16Proof>,\n\n sighash: &[u8],\n\n) -> Result<(), TransactionError> {\n\n ed25519::VerificationKey::try_from(joinsplit_data.pub_key)\n\n .and_then(|vk| vk.verify(&joinsplit_data.sig, sighash))\n\n .map_err(TransactionError::Ed25519)\n\n}\n\n\n", "file_path": "zebra-consensus/src/transaction/check.rs", "rank": 22, "score": 138062.82612103797 }, { "content": "/// Check Merkle root validity.\n\n///\n\n/// `transaction_hashes` is a precomputed list of transaction hashes.\n\npub fn merkle_root_validity(\n\n block: &Block,\n\n transaction_hashes: &[transaction::Hash],\n\n) -> Result<(), BlockError> {\n\n let merkle_root = transaction_hashes.iter().cloned().collect();\n\n\n\n if block.header.merkle_root != merkle_root {\n\n return Err(BlockError::BadMerkleRoot {\n\n actual: merkle_root,\n\n expected: block.header.merkle_root,\n\n });\n\n }\n\n\n\n // Bitcoin's transaction Merkle trees are malleable, allowing blocks with\n\n // duplicate transactions to have the same Merkle root as blocks without\n\n // duplicate transactions. Duplicate transactions should cause a block to be\n\n // rejected, as duplicate transactions imply that the block contains a\n\n // double-spend. As a defense-in-depth, however, we also check that there\n\n // are no duplicate transaction hashes, by collecting into a HashSet.\n\n use std::collections::HashSet;\n\n if transaction_hashes.len() != transaction_hashes.iter().collect::<HashSet<_>>().len() {\n\n return Err(BlockError::DuplicateTransaction);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "zebra-consensus/src/block/check.rs", "rank": 23, "score": 138062.82612103797 }, { "content": "/// Returns true if the application is shutting down.\n\n///\n\n/// Returns false otherwise.\n\npub fn is_shutting_down() -> bool {\n\n // ## Correctness:\n\n //\n\n // Since we're shutting down, and this is a one-time operation,\n\n // performance is not important. So we use the strongest memory\n\n // ordering.\n\n // https://doc.rust-lang.org/nomicon/atomics.html#sequentially-consistent\n\n IS_SHUTTING_DOWN.load(Ordering::SeqCst)\n\n}\n\n\n", "file_path": "zebra-chain/src/shutdown.rs", "rank": 24, "score": 137640.9803321608 }, { "content": "/// Consensus-critical serialization for Zcash.\n\n///\n\n/// This trait provides a generic serialization for consensus-critical\n\n/// formats, such as network messages, transactions, blocks, etc. It is intended\n\n/// for use only in consensus-critical contexts; in other contexts, such as\n\n/// internal storage, it would be preferable to use Serde.\n\npub trait ZcashSerialize: Sized {\n\n /// Write `self` to the given `writer` using the canonical format.\n\n ///\n\n /// This function has a `zcash_` prefix to alert the reader that the\n\n /// serialization in use is consensus-critical serialization, rather than\n\n /// some other kind of serialization.\n\n ///\n\n /// Notice that the error type is [`std::io::Error`]; this indicates that\n\n /// serialization MUST be infallible up to errors in the underlying writer.\n\n /// In other words, any type implementing `ZcashSerialize` must make illegal\n\n /// states unrepresentable.\n\n fn zcash_serialize<W: io::Write>(&self, writer: W) -> Result<(), io::Error>;\n\n\n\n /// Helper function to construct a vec to serialize the current struct into\n\n fn zcash_serialize_to_vec(&self) -> Result<Vec<u8>, io::Error> {\n\n let mut data = Vec::new();\n\n self.zcash_serialize(&mut data)?;\n\n Ok(data)\n\n }\n\n}\n", "file_path": "zebra-chain/src/serialization/zcash_serialize.rs", "rank": 25, "score": 136195.51442400322 }, { "content": "/// Consensus-critical serialization for Zcash.\n\n///\n\n/// This trait provides a generic deserialization for consensus-critical\n\n/// formats, such as network messages, transactions, blocks, etc. It is intended\n\n/// for use only in consensus-critical contexts; in other contexts, such as\n\n/// internal storage, it would be preferable to use Serde.\n\npub trait ZcashDeserialize: Sized {\n\n /// Try to read `self` from the given `reader`.\n\n ///\n\n /// This function has a `zcash_` prefix to alert the reader that the\n\n /// serialization in use is consensus-critical serialization, rather than\n\n /// some other kind of serialization.\n\n fn zcash_deserialize<R: io::Read>(reader: R) -> Result<Self, SerializationError>;\n\n}\n\n\n\nimpl<T: ZcashDeserialize> ZcashDeserialize for Vec<T> {\n\n fn zcash_deserialize<R: io::Read>(mut reader: R) -> Result<Self, SerializationError> {\n\n let len = reader.read_compactsize()?;\n\n // We're given len, so we could preallocate. But blindly preallocating\n\n // without a size bound can allow DOS attacks, and there's no way to\n\n // pass a size bound in a ZcashDeserialize impl, so instead we allocate\n\n // as we read from the reader. (The maximum block and transaction sizes\n\n // limit the eventual size of these allocations.)\n\n let mut vec = Vec::new();\n\n for _ in 0..len {\n\n vec.push(T::zcash_deserialize(&mut reader)?);\n\n }\n\n Ok(vec)\n\n }\n\n}\n\n\n", "file_path": "zebra-chain/src/serialization/zcash_deserialize.rs", "rank": 26, "score": 136195.51442400322 }, { "content": "/// Returns a list of outputs in `Transaction`, which have a value equal to `Amount`.\n\npub fn find_output_with_amount(\n\n transaction: &Transaction,\n\n amount: Amount<NonNegative>,\n\n) -> Vec<transparent::Output> {\n\n // TODO: shielded coinbase - Heartwood\n\n transaction\n\n .outputs()\n\n .iter()\n\n .filter(|o| o.value == amount)\n\n .cloned()\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use color_eyre::Report;\n\n\n\n #[test]\n\n fn halving_test() -> Result<(), Report> {\n", "file_path": "zebra-consensus/src/block/subsidy/general.rs", "rank": 27, "score": 135936.09115167888 }, { "content": "/// Helper type for retrieving types from the disk with the correct format.\n\n///\n\n/// The ivec should be correctly encoded by IntoDisk.\n\npub trait FromDisk: Sized {\n\n /// Function to convert the disk bytes back into the deserialized type.\n\n ///\n\n /// # Panics\n\n ///\n\n /// - if the input data doesn't deserialize correctly\n\n fn from_bytes(bytes: impl AsRef<[u8]>) -> Self;\n\n}\n\n\n\nimpl<T> FromDisk for Arc<T>\n\nwhere\n\n T: FromDisk,\n\n{\n\n fn from_bytes(bytes: impl AsRef<[u8]>) -> Self {\n\n Arc::new(T::from_bytes(bytes))\n\n }\n\n}\n\n\n\nimpl IntoDisk for Block {\n\n type Bytes = Vec<u8>;\n", "file_path": "zebra-state/src/service/finalized_state/disk_format.rs", "rank": 28, "score": 134153.02857075864 }, { "content": "/// Generate a block with no transactions (not even a coinbase transaction).\n\n///\n\n/// The generated block should fail validation.\n\npub fn block_no_transactions() -> Block {\n\n Block {\n\n header: block::Header::zcash_deserialize(&zebra_test::vectors::DUMMY_HEADER[..]).unwrap(),\n\n transactions: Vec::new(),\n\n }\n\n}\n\n\n\n/// Return a new `(chain_verifier, state_service)` using the hard-coded\n\n/// checkpoint list for `network`.\n\nasync fn verifiers_from_network(\n\n network: Network,\n\n) -> (\n\n impl Service<\n\n Arc<Block>,\n\n Response = block::Hash,\n\n Error = BoxError,\n\n Future = impl Future<Output = Result<block::Hash, BoxError>>,\n\n > + Send\n\n + Clone\n\n + 'static,\n", "file_path": "zebra-consensus/src/chain/tests.rs", "rank": 29, "score": 133031.02218459797 }, { "content": "/// Generate a block header\n\npub fn block_header() -> Header {\n\n Header::zcash_deserialize(&zebra_test::vectors::DUMMY_HEADER[..]).unwrap()\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/generate.rs", "rank": 30, "score": 130904.28721523889 }, { "content": "/// Extends [`Write`] with methods for writing Zcash/Bitcoin types.\n\n///\n\n/// [`Write`]: https://doc.rust-lang.org/std/io/trait.Write.html\n\npub trait WriteZcashExt: io::Write {\n\n /// Writes a `u64` using the Bitcoin `CompactSize` encoding.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```rust\n\n /// use zebra_chain::serialization::WriteZcashExt;\n\n ///\n\n /// let mut buf = Vec::new();\n\n /// buf.write_compactsize(0x12).unwrap();\n\n /// assert_eq!(buf, b\"\\x12\");\n\n ///\n\n /// let mut buf = Vec::new();\n\n /// buf.write_compactsize(0xfd).unwrap();\n\n /// assert_eq!(buf, b\"\\xfd\\xfd\\x00\");\n\n ///\n\n /// let mut buf = Vec::new();\n\n /// buf.write_compactsize(0xaafd).unwrap();\n\n /// assert_eq!(buf, b\"\\xfd\\xfd\\xaa\");\n\n ///\n", "file_path": "zebra-chain/src/serialization/write_zcash.rs", "rank": 31, "score": 127632.46852485277 }, { "content": "/// Generate a block with multiple transactions just above limit\n\npub fn oversized_multi_transaction_block() -> Block {\n\n multi_transaction_block(true)\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/generate.rs", "rank": 32, "score": 126963.6125931445 }, { "content": "/// Generate a block with one transaction and multiple inputs just below limit\n\npub fn large_single_transaction_block() -> Block {\n\n single_transaction_block(false)\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/generate.rs", "rank": 33, "score": 126963.6125931445 }, { "content": "/// Generate a block with one transaction and multiple inputs just above limit\n\npub fn oversized_single_transaction_block() -> Block {\n\n single_transaction_block(true)\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/generate.rs", "rank": 34, "score": 126963.6125931445 }, { "content": "/// Generate a block with multiple transactions just below limit\n\npub fn large_multi_transaction_block() -> Block {\n\n multi_transaction_block(false)\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/generate.rs", "rank": 35, "score": 126963.6125931445 }, { "content": "/// Obtain a read-only (multi-reader) lock on the application configuration.\n\n///\n\n/// Panics if the application configuration has not been loaded.\n\npub fn app_config() -> config::Reader<ZebradApp> {\n\n config::Reader::new(&APPLICATION)\n\n}\n\n\n\n/// Zebrad Application\n\n#[derive(Debug)]\n\npub struct ZebradApp {\n\n /// Application configuration.\n\n config: Option<ZebradConfig>,\n\n\n\n /// Application state.\n\n state: application::State<Self>,\n\n}\n\n\n\nimpl ZebradApp {\n\n /// Are standard output and standard error both connected to ttys?\n\n fn outputs_are_ttys() -> bool {\n\n atty::is(atty::Stream::Stdout) && atty::is(atty::Stream::Stderr)\n\n }\n\n\n", "file_path": "zebrad/src/application.rs", "rank": 36, "score": 123155.73114677425 }, { "content": "/// Obtain a read-only (multi-reader) lock on the application state.\n\n///\n\n/// Panics if the application state has not been initialized.\n\npub fn app_reader() -> application::lock::Reader<ZebradApp> {\n\n APPLICATION.read()\n\n}\n\n\n", "file_path": "zebrad/src/application.rs", "rank": 37, "score": 118089.37561378883 }, { "content": "/// Obtain an exclusive mutable lock on the application state.\n\npub fn app_writer() -> application::lock::Writer<ZebradApp> {\n\n APPLICATION.write()\n\n}\n\n\n", "file_path": "zebrad/src/application.rs", "rank": 38, "score": 118089.37561378883 }, { "content": "/// Returns the hash for the genesis block in `network`.\n\npub fn genesis_hash(network: Network) -> block::Hash {\n\n match network {\n\n // zcash-cli getblockhash 0\n\n Network::Mainnet => \"00040fe8ec8471911baa1db1266ea15dd06b4a8a5c453883c000b031973dce08\",\n\n // zcash-cli -testnet getblockhash 0\n\n Network::Testnet => \"05a60a92d99d85997cce3b87616c089f6124d7342af37106edc76126334a2c38\",\n\n }\n\n .parse()\n\n .expect(\"hard-coded hash parses\")\n\n}\n", "file_path": "zebra-chain/src/parameters/genesis.rs", "rank": 39, "score": 116168.55078427677 }, { "content": "pub fn panic_event_from<T>(msg: T) -> Event<'static>\n\nwhere\n\n T: ToString,\n\n{\n\n let exception = Exception {\n\n ty: \"panic\".into(),\n\n mechanism: Some(Mechanism {\n\n ty: \"panic\".into(),\n\n handled: Some(false),\n\n ..Default::default()\n\n }),\n\n value: Some(msg.to_string()),\n\n // Sentry does not handle panic = abort well yet, and when gibven this\n\n // stacktrace, it consists only of this line, making Sentry dedupe\n\n // events together by their stacetrace fingerprint incorrectly.\n\n //\n\n // stacktrace: current_stacktrace(),\n\n ..Default::default()\n\n };\n\n\n\n Event {\n\n exception: vec![exception].into(),\n\n level: sentry::Level::Fatal,\n\n ..Default::default()\n\n }\n\n}\n", "file_path": "zebrad/src/sentry.rs", "rank": 40, "score": 115447.96067211195 }, { "content": "/// Returns `Ok(())` if there is exactly one coinbase transaction in `Block`,\n\n/// and that coinbase transaction is the first transaction in the block.\n\n///\n\n/// \"The first (and only the first) transaction in a block is a coinbase\n\n/// transaction, which collects and spends any miner subsidy and transaction\n\n/// fees paid by transactions included in this block.\" [§3.10][3.10]\n\n///\n\n/// [3.10]: https://zips.z.cash/protocol/protocol.pdf#coinbasetransactions\n\npub fn coinbase_is_first(block: &Block) -> Result<(), BlockError> {\n\n let first = block\n\n .transactions\n\n .get(0)\n\n .ok_or(BlockError::NoTransactions)?;\n\n let mut rest = block.transactions.iter().skip(1);\n\n if !first.is_coinbase() {\n\n return Err(TransactionError::CoinbasePosition)?;\n\n }\n\n if rest.any(|tx| tx.contains_coinbase_input()) {\n\n return Err(TransactionError::CoinbaseInputFound)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zebra-consensus/src/block/check.rs", "rank": 41, "score": 114339.64297199558 }, { "content": "/// Checks that the transaction has inputs and outputs.\n\n///\n\n/// More specifically:\n\n///\n\n/// * at least one of tx_in_count, nShieldedSpend, and nJoinSplit MUST be non-zero.\n\n/// * at least one of tx_out_count, nShieldedOutput, and nJoinSplit MUST be non-zero.\n\n///\n\n/// https://zips.z.cash/protocol/canopy.pdf#txnencodingandconsensus\n\npub fn has_inputs_and_outputs(tx: &Transaction) -> Result<(), TransactionError> {\n\n // The consensus rule is written in terms of numbers, but our transactions\n\n // hold enum'd data. Mixing pattern matching and numerical checks is risky,\n\n // so convert everything to counts and sum up.\n\n match tx {\n\n Transaction::V4 {\n\n inputs,\n\n outputs,\n\n joinsplit_data,\n\n shielded_data,\n\n ..\n\n } => {\n\n let tx_in_count = inputs.len();\n\n let tx_out_count = outputs.len();\n\n let n_joinsplit = joinsplit_data\n\n .as_ref()\n\n .map(|d| d.joinsplits().count())\n\n .unwrap_or(0);\n\n let n_shielded_spend = shielded_data\n\n .as_ref()\n", "file_path": "zebra-consensus/src/transaction/check.rs", "rank": 42, "score": 114339.64297199558 }, { "content": "/// Returns a random port number from the ephemeral port range.\n\n///\n\n/// Does not check if the port is already in use. It's impossible to do this\n\n/// check in a reliable, cross-platform way.\n\n///\n\n/// ## Usage\n\n///\n\n/// If you want a once-off random unallocated port, use\n\n/// `random_unallocated_port`. Don't use this function if you don't need\n\n/// to - it has a small risk of port conflcits.\n\n///\n\n/// Use this function when you need to use the same random port multiple\n\n/// times. For example: setting up both ends of a connection, or re-using\n\n/// the same port multiple times.\n\nfn random_known_port() -> u16 {\n\n // Use the intersection of the IANA ephemeral port range, and the Linux\n\n // ephemeral port range:\n\n // https://en.wikipedia.org/wiki/Ephemeral_port#Range\n\n rand::thread_rng().gen_range(49152, 60999)\n\n}\n\n\n\n/// Returns the \"magic\" port number that tells the operating system to\n\n/// choose a random unallocated port.\n\n///\n\n/// The OS chooses a different port each time it opens a connection or\n\n/// listener with this magic port number.\n\n///\n\n/// ## Usage\n\n///\n\n/// See the usage note for `random_known_port`.\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 43, "score": 112785.29935429417 }, { "content": "#[allow(dead_code)]\n\nfn random_unallocated_port() -> u16 {\n\n 0\n\n}\n\n\n\n#[tokio::test]\n\nasync fn metrics_endpoint() -> Result<()> {\n\n use hyper::Client;\n\n\n\n zebra_test::init();\n\n\n\n // [Note on port conflict](#Note on port conflict)\n\n let port = random_known_port();\n\n let endpoint = format!(\"127.0.0.1:{}\", port);\n\n let url = format!(\"http://{}\", endpoint);\n\n\n\n // Write a configuration that has metrics endpoint_addr set\n\n let mut config = default_test_config()?;\n\n config.metrics.endpoint_addr = Some(endpoint.parse().unwrap());\n\n\n\n let dir = TempDir::new(\"zebrad_tests\")?.with_config(&mut config)?;\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 44, "score": 112772.42746269662 }, { "content": "#[test]\n\nfn compact_zero() {\n\n zebra_test::init();\n\n\n\n let natural_zero = CompactDifficulty(0);\n\n assert_eq!(natural_zero.to_expanded(), None);\n\n assert_eq!(natural_zero.to_work(), None);\n\n\n\n // Small value zeroes\n\n let small_zero_1 = CompactDifficulty(1);\n\n assert_eq!(small_zero_1.to_expanded(), None);\n\n assert_eq!(small_zero_1.to_work(), None);\n\n let small_zero_max = CompactDifficulty(UNSIGNED_MANTISSA_MASK);\n\n assert_eq!(small_zero_max.to_expanded(), None);\n\n assert_eq!(small_zero_max.to_work(), None);\n\n\n\n // Special-cased zeroes, negative in the floating-point representation\n\n let sc_zero = CompactDifficulty(SIGN_BIT);\n\n assert_eq!(sc_zero.to_expanded(), None);\n\n assert_eq!(sc_zero.to_work(), None);\n\n let sc_zero_next = CompactDifficulty(SIGN_BIT + 1);\n", "file_path": "zebra-chain/src/work/difficulty/tests/vectors.rs", "rank": 45, "score": 110971.84184019027 }, { "content": "#[test]\n\nfn compact_extremes() {\n\n zebra_test::init();\n\n\n\n // Values equal to one\n\n let expanded_one = Some(ExpandedDifficulty(U256::one()));\n\n let work_one = None;\n\n\n\n let canonical_one = CompactDifficulty((1 << PRECISION) + (1 << 16));\n\n assert_eq!(canonical_one.to_expanded(), expanded_one);\n\n assert_eq!(\n\n canonical_one.to_expanded().unwrap().to_compact(),\n\n canonical_one\n\n );\n\n assert_eq!(canonical_one.to_work(), work_one);\n\n\n\n let another_one = CompactDifficulty(OFFSET as u32 * (1 << PRECISION) + 1);\n\n assert_eq!(another_one.to_expanded(), expanded_one);\n\n assert_eq!(\n\n another_one.to_expanded().unwrap().to_compact(),\n\n canonical_one\n", "file_path": "zebra-chain/src/work/difficulty/tests/vectors.rs", "rank": 46, "score": 110971.84184019027 }, { "content": "/// Check that a coinbase tx does not have any JoinSplit or Spend descriptions.\n\n///\n\n/// https://zips.z.cash/protocol/canopy.pdf#txnencodingandconsensus\n\npub fn coinbase_tx_no_joinsplit_or_spend(tx: &Transaction) -> Result<(), TransactionError> {\n\n if tx.is_coinbase() {\n\n match tx {\n\n // Check if there is any JoinSplitData.\n\n Transaction::V4 {\n\n joinsplit_data: Some(_),\n\n ..\n\n } => Err(TransactionError::CoinbaseHasJoinSplit),\n\n\n\n // The ShieldedData contains both Spends and Outputs, and Outputs\n\n // are allowed post-Heartwood, so we have to count Spends.\n\n Transaction::V4 {\n\n shielded_data: Some(shielded_data),\n\n ..\n\n } if shielded_data.spends().count() > 0 => Err(TransactionError::CoinbaseHasSpend),\n\n\n\n Transaction::V4 { .. } => Ok(()),\n\n\n\n Transaction::V1 { .. } | Transaction::V2 { .. } | Transaction::V3 { .. } => {\n\n unreachable!(\"tx version is checked first\")\n\n }\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "zebra-consensus/src/transaction/check.rs", "rank": 47, "score": 110932.39264148785 }, { "content": "/// Returns `Ok(())` if the `EquihashSolution` is valid for `header`\n\npub fn equihash_solution_is_valid(header: &Header) -> Result<(), equihash::Error> {\n\n header.solution.check(&header)\n\n}\n\n\n", "file_path": "zebra-consensus/src/block/check.rs", "rank": 48, "score": 109954.79366477471 }, { "content": "/// Runs a command\n\npub fn test_cmd(command_path: &str, tempdir: &Path) -> Result<Command> {\n\n let mut cmd = Command::new(command_path);\n\n cmd.current_dir(tempdir);\n\n\n\n Ok(cmd)\n\n}\n\n\n", "file_path": "zebra-test/src/command.rs", "rank": 49, "score": 107642.70960847387 }, { "content": "#[test]\n\n#[spandoc::spandoc]\n\nfn compact_bitcoin_test_vectors() {\n\n zebra_test::init();\n\n\n\n // We use two spans, so we can diagnose conversion panics, and mismatching results\n\n for (compact, expected_expanded, expected_work) in COMPACT_DIFFICULTY_CASES.iter().cloned() {\n\n /// SPANDOC: Convert compact to expanded and work {?compact, ?expected_expanded, ?expected_work}\n\n {\n\n let expected_expanded = expected_expanded.map(U256::from).map(ExpandedDifficulty);\n\n let expected_work = expected_work.map(Work);\n\n\n\n let compact = CompactDifficulty(compact);\n\n let actual_expanded = compact.to_expanded();\n\n let actual_work = compact.to_work();\n\n let canonical_compact = actual_expanded.map(|e| e.to_compact());\n\n let round_trip_expanded = canonical_compact.map(|c| c.to_expanded());\n\n\n\n /// SPANDOC: Test that compact produces the expected expanded and work {?compact, ?expected_expanded, ?actual_expanded, ?expected_work, ?actual_work, ?canonical_compact, ?round_trip_expanded}\n\n {\n\n assert_eq!(actual_expanded, expected_expanded);\n\n if expected_expanded.is_some() {\n\n assert_eq!(round_trip_expanded.unwrap(), actual_expanded);\n\n }\n\n assert_eq!(actual_work, expected_work);\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Test blocks using CompactDifficulty.\n", "file_path": "zebra-chain/src/work/difficulty/tests/vectors.rs", "rank": 50, "score": 106817.95517228471 }, { "content": "/// Get the heights of the blocks for constructing a block_locator list\n\npub fn block_locator_heights(tip_height: block::Height) -> Vec<block::Height> {\n\n // Stop at the reorg limit, or the genesis block.\n\n let min_locator_height = tip_height\n\n .0\n\n .saturating_sub(constants::MAX_BLOCK_REORG_HEIGHT);\n\n let locators = iter::successors(Some(1u32), |h| h.checked_mul(2))\n\n .flat_map(move |step| tip_height.0.checked_sub(step));\n\n let locators = iter::once(tip_height.0)\n\n .chain(locators)\n\n .take_while(move |&height| height > min_locator_height)\n\n .chain(iter::once(min_locator_height))\n\n .map(block::Height);\n\n\n\n let locators = locators.collect();\n\n tracing::info!(\n\n ?tip_height,\n\n ?min_locator_height,\n\n ?locators,\n\n \"created block locator\"\n\n );\n\n locators\n\n}\n", "file_path": "zebra-state/src/util.rs", "rank": 51, "score": 105978.89364351014 }, { "content": "/// Returns `Ok(())` if the block subsidy and miner fees in `block` are valid for `network`\n\n///\n\n/// [3.9]: https://zips.z.cash/protocol/protocol.pdf#subsidyconcepts\n\npub fn subsidy_is_valid(block: &Block, network: Network) -> Result<(), BlockError> {\n\n let height = block.coinbase_height().ok_or(SubsidyError::NoCoinbase)?;\n\n let coinbase = block.transactions.get(0).ok_or(SubsidyError::NoCoinbase)?;\n\n\n\n let halving_div = subsidy::general::halving_divisor(height, network);\n\n let canopy_activation_height = NetworkUpgrade::Canopy\n\n .activation_height(network)\n\n .expect(\"Canopy activation height is known\");\n\n\n\n // TODO: the sum of the coinbase transaction outputs must be less than or equal to the block subsidy plus transaction fees\n\n\n\n // Check founders reward and funding streams\n\n if height < SLOW_START_INTERVAL {\n\n unreachable!(\n\n \"unsupported block height: callers should handle blocks below {:?}\",\n\n SLOW_START_INTERVAL\n\n )\n\n } else if halving_div.count_ones() != 1 {\n\n unreachable!(\"invalid halving divisor: the halving divisor must be a non-zero power of two\")\n\n } else if height < canopy_activation_height {\n", "file_path": "zebra-consensus/src/block/check.rs", "rank": 52, "score": 105978.89364351014 }, { "content": "pub fn layer<S>(path_root: &Path) -> (impl Layer<S>, Grapher)\n\nwhere\n\n S: Subscriber + for<'span> LookupSpan<'span>,\n\n{\n\n let path = path_root.with_extension(\"folded\");\n\n let (layer, guard) = tracing_flame::FlameLayer::with_file(&path).unwrap();\n\n let layer = layer.with_empty_samples(false).with_threads_collapsed(true);\n\n let flamegrapher = Grapher { guard, path };\n\n (layer, flamegrapher)\n\n}\n\n\n\nimpl Grapher {\n\n pub fn write_flamegraph(&self) -> Result<(), Report> {\n\n self.guard.flush()?;\n\n let out_path = self.path.with_extension(\"svg\");\n\n let inf = File::open(&self.path)?;\n\n let reader = BufReader::new(inf);\n\n\n\n let out = File::create(out_path)?;\n\n let writer = BufWriter::new(out);\n\n\n\n let mut opts = inferno::flamegraph::Options::default();\n\n info!(\"writing flamegraph to disk...\");\n\n inferno::flamegraph::from_reader(&mut opts, reader, writer)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "zebrad/src/components/tracing/flame.rs", "rank": 53, "score": 103938.16342780777 }, { "content": "/// Initialize a state service from the provided [`Config`].\n\n///\n\n/// Each `network` has its own separate on-disk database.\n\n///\n\n/// To share access to the state, wrap the returned service in a `Buffer`. It's\n\n/// possible to construct multiple state services in the same application (as\n\n/// long as they, e.g., use different storage locations), but doing so is\n\n/// probably not what you want.\n\npub fn init(config: Config, network: Network) -> BoxService<Request, Response, BoxError> {\n\n BoxService::new(StateService::new(config, network))\n\n}\n", "file_path": "zebra-state/src/service.rs", "rank": 54, "score": 100538.34526246827 }, { "content": "/// `BlockSubsidy(height)` as described in [protocol specification §7.7][7.7]\n\n///\n\n/// [7.7]: https://zips.z.cash/protocol/protocol.pdf#subsidies\n\npub fn block_subsidy(height: Height, network: Network) -> Result<Amount<NonNegative>, Error> {\n\n let blossom_height = Blossom\n\n .activation_height(network)\n\n .expect(\"blossom activation height should be available\");\n\n let halving_div = halving_divisor(height, network);\n\n\n\n if height < SLOW_START_INTERVAL {\n\n unreachable!(\n\n \"unsupported block height: callers should handle blocks below {:?}\",\n\n SLOW_START_INTERVAL\n\n )\n\n } else if height < blossom_height {\n\n // this calculation is exact, because the halving divisor is 1 here\n\n Amount::try_from(MAX_BLOCK_SUBSIDY / halving_div)\n\n } else {\n\n let scaled_max_block_subsidy = MAX_BLOCK_SUBSIDY / BLOSSOM_POW_TARGET_SPACING_RATIO;\n\n // in future halvings, this calculation might not be exact\n\n // Amount division is implemented using integer division,\n\n // which truncates (rounds down) the result, as specified\n\n Amount::try_from(scaled_max_block_subsidy / halving_div)\n\n }\n\n}\n\n\n\n/// `MinerSubsidy(height)` as described in [protocol specification §7.7][7.7]\n\n///\n\n/// [7.7]: https://zips.z.cash/protocol/protocol.pdf#subsidies\n\n///\n\n/// `non_miner_reward` is the founders reward or funding stream value.\n\n/// If all the rewards for a block go to the miner, use `None`.\n", "file_path": "zebra-consensus/src/block/subsidy/general.rs", "rank": 55, "score": 97558.88380126149 }, { "content": "#[cfg(test)]\n\npub fn new_outputs(block: &block::Block) -> std::collections::HashMap<transparent::OutPoint, Utxo> {\n\n use std::collections::HashMap;\n\n\n\n let height = block.coinbase_height().expect(\"block has coinbase height\");\n\n\n\n let mut new_outputs = HashMap::default();\n\n for transaction in &block.transactions {\n\n let hash = transaction.hash();\n\n let from_coinbase = transaction.is_coinbase();\n\n for (index, output) in transaction.outputs().iter().cloned().enumerate() {\n\n let index = index as u32;\n\n new_outputs.insert(\n\n transparent::OutPoint { hash, index },\n\n Utxo {\n\n output,\n\n height,\n\n from_coinbase,\n\n },\n\n );\n\n }\n\n }\n\n\n\n new_outputs\n\n}\n", "file_path": "zebra-state/src/utxo.rs", "rank": 56, "score": 97390.11324436357 }, { "content": "/// `FoundersReward(height)` as described in [protocol specification §7.7][7.7]\n\n///\n\n/// [7.7]: https://zips.z.cash/protocol/protocol.pdf#subsidies\n\npub fn founders_reward(height: Height, network: Network) -> Result<Amount<NonNegative>, Error> {\n\n if halving_divisor(height, network) == 1 {\n\n // this calculation is exact, because the block subsidy is divisible by\n\n // the FOUNDERS_FRACTION_DIVISOR until long after the first halving\n\n block_subsidy(height, network)? / FOUNDERS_FRACTION_DIVISOR\n\n } else {\n\n Amount::try_from(0)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use color_eyre::Report;\n\n use zebra_chain::parameters::NetworkUpgrade::*;\n\n #[test]\n\n fn test_founders_reward() -> Result<(), Report> {\n\n zebra_test::init();\n\n\n\n let network = Network::Mainnet;\n", "file_path": "zebra-consensus/src/block/subsidy/founders_reward.rs", "rank": 57, "score": 96164.63421794423 }, { "content": "#[allow(non_snake_case)]\n\npub fn mixing_pedersen_hash(P: jubjub::ExtendedPoint, x: jubjub::Fr) -> jubjub::ExtendedPoint {\n\n const J: [u8; 8] = *b\"Zcash_J_\";\n\n\n\n P + find_group_hash(J, b\"\") * x\n\n}\n\n\n", "file_path": "zebra-chain/src/sapling/commitment/pedersen_hashes.rs", "rank": 58, "score": 94827.53214752828 }, { "content": "#[allow(non_snake_case)]\n\npub fn pedersen_hash(domain: [u8; 8], M: &BitVec<Lsb0, u8>) -> jubjub::Fq {\n\n jubjub::AffinePoint::from(pedersen_hash_to_point(domain, M)).get_u()\n\n}\n\n\n\n/// Mixing Pedersen Hash Function\n\n///\n\n/// Used to compute ρ from a note commitment and its position in the note\n\n/// commitment tree. It takes as input a Pedersen commitment P, and hashes it\n\n/// with another input x.\n\n///\n\n/// MixingPedersenHash(P, x) := P + [x]FindGroupHash^J^(r)(“Zcash_J_”, “”)\n\n///\n\n/// https://zips.z.cash/protocol/protocol.pdf#concretemixinghash\n", "file_path": "zebra-chain/src/sapling/commitment/pedersen_hashes.rs", "rank": 59, "score": 94540.72284962196 }, { "content": "/// Construct a 'windowed' Pedersen commitment by reusing a Pederson hash\n\n/// construction, and adding a randomized point on the Jubjub curve.\n\n///\n\n/// WindowedPedersenCommit_r (s) := \\\n\n/// PedersenHashToPoint(“Zcash_PH”, s) + [r]FindGroupHash^J^(r)(“Zcash_PH”, “r”)\n\n///\n\n/// https://zips.z.cash/protocol/protocol.pdf#concretewindowedcommit\n\npub fn windowed_pedersen_commitment(r: jubjub::Fr, s: &BitVec<Lsb0, u8>) -> jubjub::ExtendedPoint {\n\n const D: [u8; 8] = *b\"Zcash_PH\";\n\n\n\n pedersen_hash_to_point(D, &s) + find_group_hash(D, b\"r\") * r\n\n}\n\n\n", "file_path": "zebra-chain/src/sapling/commitment/pedersen_hashes.rs", "rank": 60, "score": 93203.62077920602 }, { "content": "fn write_coinbase_height<W: io::Write>(height: block::Height, mut w: W) -> Result<(), io::Error> {\n\n // We can't write this as a match statement on stable until exclusive range\n\n // guards are stabilized.\n\n // The Bitcoin encoding requires that the most significant byte is below 0x80,\n\n // so the ranges run up to 2^{n-1} rather than 2^n.\n\n if let 0 = height.0 {\n\n // Genesis block does not include height.\n\n } else if let h @ 1..=16 = height.0 {\n\n w.write_u8(0x50 + (h as u8))?;\n\n } else if let h @ 17..=127 = height.0 {\n\n w.write_u8(0x01)?;\n\n w.write_u8(h as u8)?;\n\n } else if let h @ 128..=32767 = height.0 {\n\n w.write_u8(0x02)?;\n\n w.write_u16::<LittleEndian>(h as u16)?;\n\n } else if let h @ 32768..=8_388_607 = height.0 {\n\n w.write_u8(0x03)?;\n\n w.write_u8(h as u8)?;\n\n w.write_u8((h >> 8) as u8)?;\n\n w.write_u8((h >> 16) as u8)?;\n", "file_path": "zebra-chain/src/transparent/serialize.rs", "rank": 61, "score": 92363.42406093364 }, { "content": "#[allow(non_snake_case)]\n\npub fn pedersen_hash_to_point(domain: [u8; 8], M: &BitVec<Lsb0, u8>) -> jubjub::ExtendedPoint {\n\n let mut result = jubjub::ExtendedPoint::identity();\n\n\n\n // Split M into n segments of 3 * c bits, where c = 63, padding the last\n\n // segment with zeros.\n\n //\n\n // This loop is 1-indexed per the math definitions in the spec.\n\n //\n\n // https://zips.z.cash/protocol/protocol.pdf#concretepedersenhash\n\n for (i, segment) in M\n\n .chunks(189)\n\n .enumerate()\n\n .map(|(i, segment)| (i + 1, segment))\n\n {\n\n result += I_i(domain, i as u32) * M_i(&segment);\n\n }\n\n\n\n result\n\n}\n\n\n\n/// Pedersen Hash Function\n\n///\n\n/// This is technically returning 255 (l_MerkleSapling) bits, not 256.\n\n///\n\n/// https://zips.z.cash/protocol/protocol.pdf#concretepedersenhash\n", "file_path": "zebra-chain/src/sapling/commitment/pedersen_hashes.rs", "rank": 62, "score": 91920.22323814643 }, { "content": "/// Extension trait for methods on `tempdir::TempDir` for using it as a test\n\n/// directory for `zebrad`.\n\ntrait ZebradTestDirExt\n\nwhere\n\n Self: AsRef<Path> + Sized,\n\n{\n\n /// Spawn `zebrad` with `args` as a child process in this test directory,\n\n /// potentially taking ownership of the tempdir for the duration of the\n\n /// child process.\n\n ///\n\n /// If there is a config in the test directory, pass it to `zebrad`.\n\n fn spawn_child(self, args: &[&str]) -> Result<TestChild<Self>>;\n\n\n\n /// Create a config file and use it for all subsequently spawned processes.\n\n /// Returns an error if the config already exists.\n\n ///\n\n /// If needed:\n\n /// - recursively create directories for the config and state\n\n /// - set `config.cache_dir` based on `self`\n\n fn with_config(self, config: &mut ZebradConfig) -> Result<Self>;\n\n\n\n /// Create a config file with the exact contents of `config`, and use it for\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 63, "score": 78044.70651738113 }, { "content": "trait ToAddressWithNetwork {\n\n /// Convert `self` to an `Address`, given the current `network`.\n\n fn to_address(&self, network: Network) -> Address;\n\n}\n\n\n\nimpl ToAddressWithNetwork for Script {\n\n fn to_address(&self, network: Network) -> Address {\n\n Address::PayToScriptHash {\n\n network,\n\n script_hash: Address::hash_payload(&self.0[..]),\n\n }\n\n }\n\n}\n\n\n\nimpl ToAddressWithNetwork for PublicKey {\n\n fn to_address(&self, network: Network) -> Address {\n\n Address::PayToPublicKeyHash {\n\n network,\n\n pub_key_hash: Address::hash_payload(&self.serialize()[..]),\n\n }\n", "file_path": "zebra-chain/src/transparent/address.rs", "rank": 64, "score": 78034.4452655006 }, { "content": "fn main() {\n\n let mut flags = ConstantsFlags::empty();\n\n flags.toggle(ConstantsFlags::SHA_SHORT);\n\n\n\n // We want to use REBUILD_ON_HEAD_CHANGE here, but vergen assumes that the\n\n // git directory is in the crate directory, and Zebra uses a workspace.\n\n // See rustyhorde/vergen#15 and rustyhorde/vergen#21 for details.\n\n let result = generate_rebuild_key();\n\n if let Err(err) = result {\n\n eprintln!(\"Error generating 'cargo:rerun-if-changed': {:?}\", err);\n\n }\n\n\n\n // Generate the 'cargo:' key output\n\n generate_cargo_keys(flags).expect(\"Unable to generate the cargo keys!\");\n\n}\n\n\n", "file_path": "zebrad/build.rs", "rank": 65, "score": 77106.2985816294 }, { "content": "fn transaction_hashes(items: &'_ [InventoryHash]) -> impl Iterator<Item = transaction::Hash> + '_ {\n\n items.iter().filter_map(|item| {\n\n if let InventoryHash::Tx(hash) = item {\n\n Some(*hash)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "zebra-network/src/peer/connection.rs", "rank": 66, "score": 76222.49256634797 }, { "content": "fn block_hashes(items: &'_ [InventoryHash]) -> impl Iterator<Item = block::Hash> + '_ {\n\n items.iter().filter_map(|item| {\n\n if let InventoryHash::Block(hash) = item {\n\n Some(*hash)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n", "file_path": "zebra-network/src/peer/connection.rs", "rank": 67, "score": 76222.49256634797 }, { "content": "/// Sync `network` until `zebrad` reaches `height`, and ensure that\n\n/// the output contains `stop_regex`. If `reuse_tempdir` is supplied,\n\n/// use it as the test's temporary directory.\n\n///\n\n/// If `stop_regex` is encountered before the process exits, kills the\n\n/// process, and mark the test as successful, even if `height` has not\n\n/// been reached.\n\n///\n\n/// On success, returns the associated `TempDir`. Returns an error if\n\n/// the child exits or `timeout` elapses before `regex` is found.\n\n///\n\n/// If your test environment does not have network access, skip\n\n/// this test by setting the `ZEBRA_SKIP_NETWORK_TESTS` env var.\n\nfn sync_until(\n\n height: Height,\n\n network: Network,\n\n stop_regex: &str,\n\n timeout: Duration,\n\n reuse_tempdir: Option<TempDir>,\n\n) -> Result<TempDir> {\n\n zebra_test::init();\n\n\n\n if env::var_os(\"ZEBRA_SKIP_NETWORK_TESTS\").is_some() {\n\n // This message is captured by the test runner, use\n\n // `cargo test -- --nocapture` to see it.\n\n eprintln!(\"Skipping network test because '$ZEBRA_SKIP_NETWORK_TESTS' is set.\");\n\n return testdir();\n\n }\n\n\n\n // Use a persistent state, so we can handle large syncs\n\n let mut config = persistent_test_config()?;\n\n // TODO: add convenience methods?\n\n config.network.network = network;\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 68, "score": 75622.12339690303 }, { "content": " // Quick helper trait for making queued blocks with throw away channels\n\n trait IntoQueued {\n\n fn into_queued(self) -> QueuedBlock;\n\n }\n\n\n\n impl IntoQueued for Arc<Block> {\n\n fn into_queued(self) -> QueuedBlock {\n\n let (rsp_tx, _) = oneshot::channel();\n\n (self.prepare(), rsp_tx)\n\n }\n\n }\n\n\n\n #[test]\n\n fn dequeue_gives_right_children() -> Result<()> {\n\n zebra_test::init();\n\n\n\n let block1: Arc<Block> =\n\n zebra_test::vectors::BLOCK_MAINNET_419200_BYTES.zcash_deserialize_into()?;\n\n let child1: Arc<Block> =\n\n zebra_test::vectors::BLOCK_MAINNET_419201_BYTES.zcash_deserialize_into()?;\n\n let child2 = block1.make_fake_child();\n", "file_path": "zebra-state/src/service/non_finalized_state/queued_blocks.rs", "rank": 69, "score": 74646.17057690752 }, { "content": "#[cfg_attr(feature = \"test_sync_to_sapling_mainnet\", test)]\n\nfn sync_to_sapling_mainnet() {\n\n zebra_test::init();\n\n let network = Mainnet;\n\n create_cached_database(network).unwrap();\n\n}\n\n\n\n// Sync to the sapling activation height testnet and stop.\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 70, "score": 72899.98261924185 }, { "content": "/// Compute an index of newly created transparent outputs, given a block and a\n\n/// list of precomputed transaction hashes.\n\nfn new_outputs(\n\n block: &Block,\n\n transaction_hashes: &[transaction::Hash],\n\n) -> Arc<HashMap<transparent::OutPoint, zs::Utxo>> {\n\n let mut new_outputs = HashMap::default();\n\n let height = block.coinbase_height().expect(\"block has coinbase height\");\n\n for (transaction, hash) in block\n\n .transactions\n\n .iter()\n\n .zip(transaction_hashes.iter().cloned())\n\n {\n\n let from_coinbase = transaction.is_coinbase();\n\n for (index, output) in transaction.outputs().iter().cloned().enumerate() {\n\n let index = index as u32;\n\n new_outputs.insert(\n\n transparent::OutPoint { hash, index },\n\n zs::Utxo {\n\n output,\n\n height,\n\n from_coinbase,\n\n },\n\n );\n\n }\n\n }\n\n\n\n Arc::new(new_outputs)\n\n}\n", "file_path": "zebra-consensus/src/block.rs", "rank": 71, "score": 72899.98261924185 }, { "content": "#[cfg_attr(feature = \"test_sync_to_sapling_testnet\", test)]\n\nfn sync_to_sapling_testnet() {\n\n zebra_test::init();\n\n let network = Testnet;\n\n create_cached_database(network).unwrap();\n\n}\n\n\n\n/// Test syncing 1200 blocks (3 checkpoints) past the last checkpoint on mainnet.\n\n///\n\n/// This assumes that the config'd state is already synced at or near Sapling\n\n/// activation on mainnet. If the state has already synced past Sapling\n\n/// activation by 1200 blocks, it will fail.\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 72, "score": 72899.98261924185 }, { "content": "#[cfg_attr(feature = \"test_sync_past_sapling_mainnet\", test)]\n\nfn sync_past_sapling_mainnet() {\n\n zebra_test::init();\n\n let network = Mainnet;\n\n sync_past_sapling(network).unwrap();\n\n}\n\n\n\n/// Test syncing 1200 blocks (3 checkpoints) past the last checkpoint on testnet.\n\n///\n\n/// This assumes that the config'd state is already synced at or near Sapling\n\n/// activation on testnet. If the state has already synced past Sapling\n\n/// activation by 1200 blocks, it will fail.\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 73, "score": 71654.5600450422 }, { "content": "#[cfg_attr(feature = \"test_sync_past_sapling_testnet\", test)]\n\nfn sync_past_sapling_testnet() {\n\n zebra_test::init();\n\n let network = Testnet;\n\n sync_past_sapling(network).unwrap();\n\n}\n\n\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 74, "score": 71654.5600450422 }, { "content": "#[test]\n\nfn memo_fmt() {\n\n zebra_test::init();\n\n\n\n let memo = Memo(Box::new(\n\n *b\"thiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis \\\n\n iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis \\\n\n aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa \\\n\n veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeryyyyyyyyyyyyyyyyyyyyyyyyyy \\\n\n looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong \\\n\n meeeeeeeeeeeeeeeeeeemooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo \\\n\n but it's just short enough\",\n\n ));\n\n\n\n assert_eq!(format!(\"{:?}\", memo),\n\n \"Memo(\\\"thiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiis aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeryyyyyyyyyyyyyyyyyyyyyyyyyy looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong meeeeeeeeeeeeeeeeeeemooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo but it\\\\\\'s just short enough\\\")\"\n\n );\n\n\n\n let mut some_bytes = [0u8; 512];\n\n some_bytes[0] = 0xF6;\n\n\n\n assert_eq!(format!(\"{:?}\", Memo(Box::new(some_bytes))),\n\n \"Memo(\\\"f600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\\\")\"\n\n );\n\n}\n\n\n", "file_path": "zebra-chain/src/transaction/memo.rs", "rank": 75, "score": 71654.5600450422 }, { "content": "#[test]\n\nfn memo_from_string() {\n\n zebra_test::init();\n\n\n\n let memo = Memo::try_from(\"foo bar baz\".as_ref()).unwrap();\n\n\n\n let mut bytes = [0; 512];\n\n bytes[0..11].copy_from_slice(&[102, 111, 111, 32, 98, 97, 114, 32, 98, 97, 122]);\n\n\n\n assert!(memo.0.iter().eq(bytes.iter()));\n\n}\n", "file_path": "zebra-chain/src/transaction/memo.rs", "rank": 76, "score": 71654.5600450422 }, { "content": "#[test]\n\nfn operator_tests() {\n\n zebra_test::init();\n\n\n\n assert_eq!(Some(Height(2)), Height(1) + Height(1));\n\n assert_eq!(None, Height::MAX + Height(1));\n\n // Bad heights aren't caught at compile-time or runtime, until we add or subtract\n\n assert_eq!(None, Height(Height::MAX_AS_U32 + 1) + Height(0));\n\n assert_eq!(None, Height(i32::MAX as u32) + Height(0));\n\n assert_eq!(None, Height(u32::MAX) + Height(0));\n\n\n\n assert_eq!(Some(Height(2)), Height(1) + 1);\n\n assert_eq!(None, Height::MAX + 1);\n\n // Adding negative numbers\n\n assert_eq!(Some(Height(1)), Height(2) + -1);\n\n assert_eq!(Some(Height(0)), Height(1) + -1);\n\n assert_eq!(None, Height(0) + -1);\n\n assert_eq!(Some(Height(Height::MAX_AS_U32 - 1)), Height::MAX + -1);\n\n // Bad heights aren't caught at compile-time or runtime, until we add or subtract\n\n // `+ 0` would also cause an error here, but it triggers a spurious clippy lint\n\n assert_eq!(None, Height(Height::MAX_AS_U32 + 1) + 1);\n", "file_path": "zebra-chain/src/block/height.rs", "rank": 77, "score": 71654.5600450422 }, { "content": "/// Returns `ValidateContextError::OrphanedBlock` if the height of the given\n\n/// block is less than or equal to the finalized tip height.\n\nfn block_is_not_orphaned(\n\n finalized_tip_height: block::Height,\n\n candidate_height: block::Height,\n\n) -> Result<(), ValidateContextError> {\n\n if candidate_height <= finalized_tip_height {\n\n Err(ValidateContextError::OrphanedBlock {\n\n candidate_height,\n\n finalized_tip_height,\n\n })\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "zebra-state/src/service/check.rs", "rank": 78, "score": 71654.5600450422 }, { "content": "#[test]\n\nfn activation_bijective() {\n\n zebra_test::init();\n\n\n\n let mainnet_activations = NetworkUpgrade::activation_list(Mainnet);\n\n let mainnet_heights: HashSet<&block::Height> = mainnet_activations.keys().collect();\n\n assert_eq!(MAINNET_ACTIVATION_HEIGHTS.len(), mainnet_heights.len());\n\n\n\n let mainnet_nus: HashSet<&NetworkUpgrade> = mainnet_activations.values().collect();\n\n assert_eq!(MAINNET_ACTIVATION_HEIGHTS.len(), mainnet_nus.len());\n\n\n\n let testnet_activations = NetworkUpgrade::activation_list(Testnet);\n\n let testnet_heights: HashSet<&block::Height> = testnet_activations.keys().collect();\n\n assert_eq!(TESTNET_ACTIVATION_HEIGHTS.len(), testnet_heights.len());\n\n\n\n let testnet_nus: HashSet<&NetworkUpgrade> = testnet_activations.values().collect();\n\n assert_eq!(TESTNET_ACTIVATION_HEIGHTS.len(), testnet_nus.len());\n\n}\n\n\n", "file_path": "zebra-chain/src/parameters/tests.rs", "rank": 79, "score": 71654.5600450422 }, { "content": "/// Helper trait to organize inverse operations done on the `Chain` type. Used to\n\n/// overload the `update_chain_state_with` and `revert_chain_state_with` methods\n\n/// based on the type of the argument.\n\n///\n\n/// This trait was motivated by the length of the `push` and `pop_root` functions\n\n/// and fear that it would be easy to introduce bugs when updating them unless\n\n/// the code was reorganized to keep related operations adjacent to eachother.\n\ntrait UpdateWith<T> {\n\n /// Update `Chain` cumulative data members to add data that are derived from\n\n /// `T`\n\n fn update_chain_state_with(&mut self, _: &T);\n\n\n\n /// Update `Chain` cumulative data members to remove data that are derived\n\n /// from `T`\n\n fn revert_chain_state_with(&mut self, _: &T);\n\n}\n\n\n\nimpl UpdateWith<PreparedBlock> for Chain {\n\n fn update_chain_state_with(&mut self, prepared: &PreparedBlock) {\n\n let (block, hash, height, transaction_hashes) = (\n\n prepared.block.as_ref(),\n\n prepared.hash,\n\n prepared.height,\n\n &prepared.transaction_hashes,\n\n );\n\n\n\n // add hash to height_by_hash\n", "file_path": "zebra-state/src/service/non_finalized_state/chain.rs", "rank": 80, "score": 71248.98282590837 }, { "content": "/// Validate the time and `difficulty_threshold` from a candidate block's\n\n/// header.\n\n///\n\n/// Uses the `difficulty_adjustment` context for the block to:\n\n/// * check that the candidate block's time is within the valid range,\n\n/// based on the network and candidate height, and\n\n/// * check that the expected difficulty is equal to the block's\n\n/// `difficulty_threshold`.\n\n///\n\n/// These checks are performed together, because the time field is used to\n\n/// calculate the expected difficulty adjustment.\n\nfn difficulty_threshold_is_valid(\n\n difficulty_threshold: CompactDifficulty,\n\n difficulty_adjustment: AdjustedDifficulty,\n\n) -> Result<(), ValidateContextError> {\n\n // Check the block header time consensus rules from the Zcash specification\n\n let candidate_height = difficulty_adjustment.candidate_height();\n\n let candidate_time = difficulty_adjustment.candidate_time();\n\n let network = difficulty_adjustment.network();\n\n let median_time_past = difficulty_adjustment.median_time_past();\n\n let block_time_max =\n\n median_time_past + Duration::seconds(difficulty::BLOCK_MAX_TIME_SINCE_MEDIAN);\n\n if candidate_time <= median_time_past {\n\n Err(ValidateContextError::TimeTooEarly {\n\n candidate_time,\n\n median_time_past,\n\n })?\n\n }\n\n\n\n // The maximum time rule is only active on Testnet from a specific height\n\n if NetworkUpgrade::is_max_block_time_enforced(network, candidate_height)\n", "file_path": "zebra-state/src/service/check.rs", "rank": 81, "score": 70481.86033722859 }, { "content": "#[test]\n\nfn deserialize_blockheader() {\n\n zebra_test::init();\n\n\n\n // Includes the 32-byte nonce and 3-byte equihash length field.\n\n const BLOCK_HEADER_LENGTH: usize = crate::work::equihash::Solution::INPUT_LENGTH\n\n + 32\n\n + 3\n\n + crate::work::equihash::SOLUTION_SIZE;\n\n\n\n for block in zebra_test::vectors::BLOCKS.iter() {\n\n let header_bytes = &block[..BLOCK_HEADER_LENGTH];\n\n\n\n let _header = header_bytes\n\n .zcash_deserialize_into::<Header>()\n\n .expect(\"blockheader test vector should deserialize\");\n\n }\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/vectors.rs", "rank": 82, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn blockheaderhash_from_blockheader() {\n\n zebra_test::init();\n\n\n\n let blockheader = generate::block_header();\n\n\n\n let hash = Hash::from(&blockheader);\n\n\n\n assert_eq!(\n\n format!(\"{:?}\", hash),\n\n \"block::Hash(\\\"d1d6974bbe1d4d127c889119b2fc05724c67588dc72708839727586b8c2bc939\\\")\"\n\n );\n\n\n\n let mut bytes = Cursor::new(Vec::new());\n\n\n\n blockheader\n\n .zcash_serialize(&mut bytes)\n\n .expect(\"these bytes to serialize from a blockheader without issue\");\n\n\n\n bytes.set_position(0);\n\n let other_header = bytes\n\n .zcash_deserialize_into()\n\n .expect(\"these bytes to deserialize into a blockheader without issue\");\n\n\n\n assert_eq!(blockheader, other_header);\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/vectors.rs", "rank": 83, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn blockheaderhash_debug() {\n\n zebra_test::init();\n\n\n\n let preimage = b\"foo bar baz\";\n\n let mut sha_writer = sha256d::Writer::default();\n\n let _ = sha_writer.write_all(preimage);\n\n\n\n let hash = Hash(sha_writer.finish());\n\n\n\n assert_eq!(\n\n format!(\"{:?}\", hash),\n\n \"block::Hash(\\\"3166411bd5343e0b284a108f39a929fbbb62619784f8c6dafe520703b5b446bf\\\")\"\n\n );\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/vectors.rs", "rank": 84, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn deserialize_block() {\n\n zebra_test::init();\n\n\n\n // this one has a bad version field\n\n zebra_test::vectors::BLOCK_MAINNET_434873_BYTES\n\n .zcash_deserialize_into::<Block>()\n\n .expect(\"block test vector should deserialize\");\n\n\n\n for block_bytes in zebra_test::vectors::BLOCKS.iter() {\n\n let block = block_bytes\n\n .zcash_deserialize_into::<Block>()\n\n .expect(\"block is structurally valid\");\n\n\n\n let round_trip_bytes = block\n\n .zcash_serialize_to_vec()\n\n .expect(\"vec serialization is infallible\");\n\n\n\n assert_eq!(&round_trip_bytes[..], *block_bytes);\n\n }\n\n}\n\n\n", "file_path": "zebra-chain/src/block/tests/vectors.rs", "rank": 85, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn branch_id_bijective() {\n\n zebra_test::init();\n\n\n\n let branch_id_list = NetworkUpgrade::branch_id_list();\n\n let nus: HashSet<&NetworkUpgrade> = branch_id_list.keys().collect();\n\n assert_eq!(CONSENSUS_BRANCH_IDS.len(), nus.len());\n\n\n\n let branch_ids: HashSet<&ConsensusBranchId> = branch_id_list.values().collect();\n\n assert_eq!(CONSENSUS_BRANCH_IDS.len(), branch_ids.len());\n\n}\n\n\n", "file_path": "zebra-chain/src/parameters/tests.rs", "rank": 86, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn activation_extremes_mainnet() {\n\n zebra_test::init();\n\n activation_extremes(Mainnet)\n\n}\n\n\n", "file_path": "zebra-chain/src/parameters/tests.rs", "rank": 87, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn activation_consistent_testnet() {\n\n zebra_test::init();\n\n activation_consistent(Testnet)\n\n}\n\n\n", "file_path": "zebra-chain/src/parameters/tests.rs", "rank": 88, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn activation_extremes_testnet() {\n\n zebra_test::init();\n\n activation_extremes(Testnet)\n\n}\n\n\n", "file_path": "zebra-chain/src/parameters/tests.rs", "rank": 89, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn round_trip_work_expanded() {\n\n zebra_test::init();\n\n\n\n proptest!(|(work_before in any::<Work>())| {\n\n let work: U256 = work_before.as_u128().into();\n\n let expanded = work_to_expanded(work);\n\n let work_after = Work::try_from(expanded).unwrap();\n\n prop_assert_eq!(work_before, work_after);\n\n });\n\n}\n\n\n\n/// Check that the block locator heights are sensible.\n", "file_path": "zebra-state/src/tests.rs", "rank": 90, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn activation_consistent_mainnet() {\n\n zebra_test::init();\n\n activation_consistent(Mainnet)\n\n}\n\n\n", "file_path": "zebra-chain/src/parameters/tests.rs", "rank": 91, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn minimum_difficulty_mainnet() {\n\n minimum_difficulty(Mainnet)\n\n}\n\n\n", "file_path": "zebra-consensus/src/parameters/tests.rs", "rank": 92, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn test_block_locator_heights() {\n\n zebra_test::init();\n\n\n\n for (height, min_height) in BLOCK_LOCATOR_CASES.iter().cloned() {\n\n let locator = util::block_locator_heights(block::Height(height));\n\n\n\n assert!(!locator.is_empty(), \"locators must not be empty\");\n\n if (height - min_height) > 1 {\n\n assert!(\n\n locator.len() > 2,\n\n \"non-trivial locators must have some intermediate heights\"\n\n );\n\n }\n\n\n\n assert_eq!(\n\n locator[0],\n\n block::Height(height),\n\n \"locators must start with the tip height\"\n\n );\n\n\n", "file_path": "zebra-state/src/tests.rs", "rank": 93, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn minimum_difficulty_testnet() {\n\n minimum_difficulty(Testnet)\n\n}\n\n\n", "file_path": "zebra-consensus/src/parameters/tests.rs", "rank": 94, "score": 70476.83311990302 }, { "content": "fn parse_coinbase_height(\n\n mut data: Vec<u8>,\n\n) -> Result<(block::Height, CoinbaseData), SerializationError> {\n\n use block::Height;\n\n match (data.get(0), data.len()) {\n\n // Blocks 1 through 16 inclusive encode block height with OP_N opcodes.\n\n (Some(op_n @ 0x51..=0x60), len) if len >= 1 => Ok((\n\n Height((op_n - 0x50) as u32),\n\n CoinbaseData(data.split_off(1)),\n\n )),\n\n // Blocks 17 through 128 exclusive encode block height with the `0x01` opcode.\n\n // The Bitcoin encoding requires that the most significant byte is below 0x80.\n\n (Some(0x01), len) if len >= 2 && data[1] < 0x80 => {\n\n Ok((Height(data[1] as u32), CoinbaseData(data.split_off(2))))\n\n }\n\n // Blocks 128 through 32768 exclusive encode block height with the `0x02` opcode.\n\n // The Bitcoin encoding requires that the most significant byte is below 0x80.\n\n (Some(0x02), len) if len >= 3 && data[2] < 0x80 => Ok((\n\n Height(data[1] as u32 + ((data[2] as u32) << 8)),\n\n CoinbaseData(data.split_off(3)),\n", "file_path": "zebra-chain/src/transparent/serialize.rs", "rank": 95, "score": 70476.83311990302 }, { "content": "#[test]\n\nfn help_no_args() -> Result<()> {\n\n zebra_test::init();\n\n\n\n let testdir = testdir()?.with_config(&mut default_test_config()?)?;\n\n\n\n let child = testdir.spawn_child(&[\"help\"])?;\n\n let output = child.wait_with_output()?;\n\n let output = output.assert_success()?;\n\n\n\n // First line haves the version\n\n output.stdout_contains(r\"zebrad [0-9].[0-9].[0-9]\")?;\n\n\n\n // Make sure we are in help by looking usage string\n\n output.stdout_contains(r\"USAGE:\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 96, "score": 70079.53676886884 }, { "content": "#[test]\n\nfn generate_no_args() -> Result<()> {\n\n zebra_test::init();\n\n\n\n let child = testdir()?\n\n .with_config(&mut default_test_config()?)?\n\n .spawn_child(&[\"generate\"])?;\n\n\n\n let output = child.wait_with_output()?;\n\n let output = output.assert_success()?;\n\n\n\n // First line\n\n output.stdout_contains(r\"# Default configuration for zebrad.\")?;\n\n\n\n Ok(())\n\n}\n\n\n\n/// Panics if `$pred` is false, with an error report containing:\n\n/// * context from `$source`, and\n\n/// * an optional wrapper error, using `$fmt_arg`+ as a format string and\n\n/// arguments.\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 97, "score": 70079.53676886884 }, { "content": "#[test]\n\nfn help_args() -> Result<()> {\n\n zebra_test::init();\n\n\n\n let testdir = testdir()?;\n\n let testdir = &testdir;\n\n\n\n // The subcommand \"argument\" wasn't recognized.\n\n let child = testdir.spawn_child(&[\"help\", \"argument\"])?;\n\n let output = child.wait_with_output()?;\n\n output.assert_failure()?;\n\n\n\n // option `-f` does not accept an argument\n\n let child = testdir.spawn_child(&[\"help\", \"-f\"])?;\n\n let output = child.wait_with_output()?;\n\n output.assert_failure()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 98, "score": 70079.53676886884 }, { "content": "#[test]\n\nfn generate_args() -> Result<()> {\n\n zebra_test::init();\n\n\n\n let testdir = testdir()?;\n\n let testdir = &testdir;\n\n\n\n // unexpected free argument `argument`\n\n let child = testdir.spawn_child(&[\"generate\", \"argument\"])?;\n\n let output = child.wait_with_output()?;\n\n output.assert_failure()?;\n\n\n\n // unrecognized option `-f`\n\n let child = testdir.spawn_child(&[\"generate\", \"-f\"])?;\n\n let output = child.wait_with_output()?;\n\n output.assert_failure()?;\n\n\n\n // missing argument to option `-o`\n\n let child = testdir.spawn_child(&[\"generate\", \"-o\"])?;\n\n let output = child.wait_with_output()?;\n\n output.assert_failure()?;\n", "file_path": "zebrad/tests/acceptance.rs", "rank": 99, "score": 70079.53676886884 } ]
Rust
src/de/mod.rs
RoccoDev/bson-rust
9781781212056c619464be8444a27587c7dc5de9
mod error; mod raw; mod serde; pub use self::{ error::{Error, Result}, serde::Deserializer, }; use std::io::Read; use crate::{ bson::{Array, Binary, Bson, DbPointer, Document, JavaScriptCodeWithScope, Regex, Timestamp}, oid::{self, ObjectId}, ser::write_i32, spec::{self, BinarySubtype}, Decimal128, }; use ::serde::{ de::{DeserializeOwned, Error as _, Unexpected}, Deserialize, }; pub(crate) use self::serde::BsonVisitor; pub(crate) const MAX_BSON_SIZE: i32 = 16 * 1024 * 1024; pub(crate) const MIN_BSON_DOCUMENT_SIZE: i32 = 4 + 1; pub(crate) const MIN_BSON_STRING_SIZE: i32 = 4 + 1; pub(crate) const MIN_CODE_WITH_SCOPE_SIZE: i32 = 4 + MIN_BSON_STRING_SIZE + MIN_BSON_DOCUMENT_SIZE; pub(crate) fn ensure_read_exactly<F, R>( reader: &mut R, length: usize, error_message: &str, func: F, ) -> Result<()> where F: FnOnce(&mut std::io::Cursor<Vec<u8>>) -> Result<()>, R: Read + ?Sized, { let mut buf = vec![0u8; length]; reader.read_exact(&mut buf)?; let mut cursor = std::io::Cursor::new(buf); func(&mut cursor)?; if cursor.position() != length as u64 { return Err(Error::invalid_length(length, &error_message)); } Ok(()) } pub(crate) fn read_string<R: Read + ?Sized>(reader: &mut R, utf8_lossy: bool) -> Result<String> { let len = read_i32(reader)?; if len < 1 { return Err(Error::invalid_length( len as usize, &"UTF-8 string must have at least 1 byte", )); } let s = if utf8_lossy { let mut buf = Vec::with_capacity(len as usize - 1); reader.take(len as u64 - 1).read_to_end(&mut buf)?; String::from_utf8_lossy(&buf).to_string() } else { let mut s = String::with_capacity(len as usize - 1); reader.take(len as u64 - 1).read_to_string(&mut s)?; s }; if read_u8(reader)? != 0 { return Err(Error::invalid_length( len as usize, &"contents of string longer than provided length", )); } Ok(s) } pub(crate) fn read_bool<R: Read>(mut reader: R) -> Result<bool> { let val = read_u8(&mut reader)?; if val > 1 { return Err(Error::invalid_value( Unexpected::Unsigned(val as u64), &"boolean must be stored as 0 or 1", )); } Ok(val != 0) } fn read_cstring<R: Read + ?Sized>(reader: &mut R) -> Result<String> { let mut v = Vec::new(); loop { let c = read_u8(reader)?; if c == 0 { break; } v.push(c); } Ok(String::from_utf8(v)?) } #[inline] pub(crate) fn read_u8<R: Read + ?Sized>(reader: &mut R) -> Result<u8> { let mut buf = [0; 1]; reader.read_exact(&mut buf)?; Ok(u8::from_le_bytes(buf)) } #[inline] pub(crate) fn read_i32<R: Read + ?Sized>(reader: &mut R) -> Result<i32> { let mut buf = [0; 4]; reader.read_exact(&mut buf)?; Ok(i32::from_le_bytes(buf)) } #[inline] pub(crate) fn read_i64<R: Read + ?Sized>(reader: &mut R) -> Result<i64> { let mut buf = [0; 8]; reader.read_exact(&mut buf)?; Ok(i64::from_le_bytes(buf)) } #[inline] fn read_f64<R: Read + ?Sized>(reader: &mut R) -> Result<f64> { let mut buf = [0; 8]; reader.read_exact(&mut buf)?; Ok(f64::from_le_bytes(buf)) } #[inline] fn read_f128<R: Read + ?Sized>(reader: &mut R) -> Result<Decimal128> { let mut buf = [0u8; 128 / 8]; reader.read_exact(&mut buf)?; Ok(Decimal128 { bytes: buf }) } fn deserialize_array<R: Read + ?Sized>(reader: &mut R, utf8_lossy: bool) -> Result<Array> { let mut arr = Array::new(); let length = read_i32(reader)?; if !(MIN_BSON_DOCUMENT_SIZE..=MAX_BSON_SIZE).contains(&length) { return Err(Error::invalid_length( length as usize, &format!( "array length must be between {} and {}", MIN_BSON_DOCUMENT_SIZE, MAX_BSON_SIZE ) .as_str(), )); } ensure_read_exactly( reader, (length as usize) - 4, "array length longer than contents", |cursor| { loop { let tag = read_u8(cursor)?; if tag == 0 { break; } let (_, val) = deserialize_bson_kvp(cursor, tag, utf8_lossy)?; arr.push(val) } Ok(()) }, )?; Ok(arr) } pub(crate) fn deserialize_bson_kvp<R: Read + ?Sized>( reader: &mut R, tag: u8, utf8_lossy: bool, ) -> Result<(String, Bson)> { use spec::ElementType; let key = read_cstring(reader)?; let val = match ElementType::from(tag) { Some(ElementType::Double) => Bson::Double(read_f64(reader)?), Some(ElementType::String) => read_string(reader, utf8_lossy).map(Bson::String)?, Some(ElementType::EmbeddedDocument) => Document::from_reader(reader).map(Bson::Document)?, Some(ElementType::Array) => deserialize_array(reader, utf8_lossy).map(Bson::Array)?, Some(ElementType::Binary) => Bson::Binary(Binary::from_reader(reader)?), Some(ElementType::ObjectId) => { let mut objid = [0; 12]; for x in &mut objid { *x = read_u8(reader)?; } Bson::ObjectId(oid::ObjectId::from_bytes(objid)) } Some(ElementType::Boolean) => Bson::Boolean(read_bool(reader)?), Some(ElementType::Null) => Bson::Null, Some(ElementType::RegularExpression) => { Bson::RegularExpression(Regex::from_reader(reader)?) } Some(ElementType::JavaScriptCode) => { read_string(reader, utf8_lossy).map(Bson::JavaScriptCode)? } Some(ElementType::JavaScriptCodeWithScope) => { Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope::from_reader(reader, utf8_lossy)?) } Some(ElementType::Int32) => read_i32(reader).map(Bson::Int32)?, Some(ElementType::Int64) => read_i64(reader).map(Bson::Int64)?, Some(ElementType::Timestamp) => Bson::Timestamp(Timestamp::from_reader(reader)?), Some(ElementType::DateTime) => { let time = read_i64(reader)?; Bson::DateTime(crate::DateTime::from_millis(time)) } Some(ElementType::Symbol) => read_string(reader, utf8_lossy).map(Bson::Symbol)?, Some(ElementType::Decimal128) => read_f128(reader).map(Bson::Decimal128)?, Some(ElementType::Undefined) => Bson::Undefined, Some(ElementType::DbPointer) => Bson::DbPointer(DbPointer::from_reader(reader)?), Some(ElementType::MaxKey) => Bson::MaxKey, Some(ElementType::MinKey) => Bson::MinKey, None => { return Err(Error::UnrecognizedDocumentElementType { key, element_type: tag, }) } }; Ok((key, val)) } impl Binary { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let len = read_i32(&mut reader)?; if !(0..=MAX_BSON_SIZE).contains(&len) { return Err(Error::invalid_length( len as usize, &format!("binary length must be between 0 and {}", MAX_BSON_SIZE).as_str(), )); } let subtype = BinarySubtype::from(read_u8(&mut reader)?); Self::from_reader_with_len_and_payload(reader, len, subtype) } pub(crate) fn from_reader_with_len_and_payload<R: Read>( mut reader: R, mut len: i32, subtype: BinarySubtype, ) -> Result<Self> { if !(0..=MAX_BSON_SIZE).contains(&len) { return Err(Error::invalid_length( len as usize, &format!("binary length must be between 0 and {}", MAX_BSON_SIZE).as_str(), )); } if let BinarySubtype::BinaryOld = subtype { let data_len = read_i32(&mut reader)?; if !(0..=(MAX_BSON_SIZE - 4)).contains(&data_len) { return Err(Error::invalid_length( data_len as usize, &format!("0x02 length must be between 0 and {}", MAX_BSON_SIZE - 4).as_str(), )); } if data_len + 4 != len { return Err(Error::invalid_length( data_len as usize, &"0x02 length did not match top level binary length", )); } len -= 4; } let mut bytes = Vec::with_capacity(len as usize); reader.take(len as u64).read_to_end(&mut bytes)?; Ok(Binary { subtype, bytes }) } } impl DbPointer { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let ns = read_string(&mut reader, false)?; let oid = ObjectId::from_reader(&mut reader)?; Ok(DbPointer { namespace: ns, id: oid, }) } } impl Regex { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let pattern = read_cstring(&mut reader)?; let options = read_cstring(&mut reader)?; Ok(Regex { pattern, options }) } } impl Timestamp { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { read_i64(&mut reader).map(Timestamp::from_le_i64) } } impl ObjectId { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let mut buf = [0u8; 12]; reader.read_exact(&mut buf)?; Ok(Self::from_bytes(buf)) } } impl JavaScriptCodeWithScope { pub(crate) fn from_reader<R: Read>(mut reader: R, utf8_lossy: bool) -> Result<Self> { let length = read_i32(&mut reader)?; if length < MIN_CODE_WITH_SCOPE_SIZE { return Err(Error::invalid_length( length as usize, &format!( "code with scope length must be at least {}", MIN_CODE_WITH_SCOPE_SIZE ) .as_str(), )); } else if length > MAX_BSON_SIZE { return Err(Error::invalid_length( length as usize, &"code with scope length too large", )); } let mut buf = vec![0u8; (length - 4) as usize]; reader.read_exact(&mut buf)?; let mut slice = buf.as_slice(); let code = read_string(&mut slice, utf8_lossy)?; let scope = Document::from_reader(&mut slice)?; Ok(JavaScriptCodeWithScope { code, scope }) } } pub fn from_bson<T>(bson: Bson) -> Result<T> where T: DeserializeOwned, { let de = Deserializer::new(bson); Deserialize::deserialize(de) } pub fn from_document<T>(doc: Document) -> Result<T> where T: DeserializeOwned, { from_bson(Bson::Document(doc)) } fn reader_to_vec<R: Read>(mut reader: R) -> Result<Vec<u8>> { let length = read_i32(&mut reader)?; if length < MIN_BSON_DOCUMENT_SIZE { return Err(Error::custom("document size too small")); } let mut bytes = Vec::with_capacity(length as usize); write_i32(&mut bytes, length).map_err(Error::custom)?; reader.take(length as u64 - 4).read_to_end(&mut bytes)?; Ok(bytes) } pub fn from_reader<R, T>(reader: R) -> Result<T> where T: DeserializeOwned, R: Read, { let bytes = reader_to_vec(reader)?; from_slice(bytes.as_slice()) } pub fn from_reader_utf8_lossy<R, T>(reader: R) -> Result<T> where T: DeserializeOwned, R: Read, { let bytes = reader_to_vec(reader)?; from_slice_utf8_lossy(bytes.as_slice()) } pub fn from_slice<'de, T>(bytes: &'de [u8]) -> Result<T> where T: Deserialize<'de>, { let mut deserializer = raw::Deserializer::new(bytes, false); T::deserialize(&mut deserializer) } pub fn from_slice_utf8_lossy<'de, T>(bytes: &'de [u8]) -> Result<T> where T: Deserialize<'de>, { let mut deserializer = raw::Deserializer::new(bytes, true); T::deserialize(&mut deserializer) }
mod error; mod raw; mod serde; pub use self::{ error::{Error, Result}, serde::Deserializer, }; use std::io::Read; use crate::{ bson::{Array, Binary, Bson, DbPointer, Document, JavaScriptCodeWithScope, Regex, Timestamp}, oid::{self, ObjectId}, ser::write_i32, spec::{self, BinarySubtype}, Decimal128, }; use ::serde::{ de::{DeserializeOwned, Error as _, Unexpected}, Deserialize, }; pub(crate) use self::serde::BsonVisitor; pub(crate) const MAX_BSON_SIZE: i32 = 16 * 1024 * 1024; pub(crate) const MIN_BSON_DOCUMENT_SIZE: i32 = 4 + 1; pub(crate) const MIN_BSON_STRING_SIZE: i32 = 4 + 1; pub(crate) const MIN_CODE_WITH_SCOPE_SIZE: i32 = 4 + MIN_BSON_STRING_SIZE + MIN_BSON_DOCUMENT_SIZE; pub(crate) fn ensure_read_exactly<F, R>( reader: &mut R, length: usize, error_message: &str, func: F, ) -> Result<()> where F: FnOnce(&mut std::io::Cursor<Vec<u8>>) -> Result<()>, R: Read + ?Sized, { let mut buf = vec![0u8; length]; reader.read_exact(&mut buf)?; let mut cursor = std::io::Cursor::new(buf); func(&mut cursor)?; if cursor.position() != length as u64 { return Err(Error::invalid_length(length, &error_message)); } Ok(()) } pub(crate) fn read_string<R: Read + ?Sized>(reader: &mut R, utf8_lossy: bool) -> Result<String> { let len = read_i32(reader)?; if len < 1 { return Err(Error::invalid_length( len as usize, &"UTF-8 string must have at least 1 byte", )); } let s = if utf8_lossy { let mut buf = Vec::with_capacity(len as usize - 1); reader.take(len as u64 - 1).read_to_end(&mut buf)?; String::from_utf8_lossy(&buf).to_string() } else { let mut s = String::with_capacity(len as usize - 1); reader.take(len as u64 - 1).read_to_string(&mut s)?; s }; if read_u8(reader)? != 0 { return Err(Error::invalid_length( len as usize, &"contents of string longer than provided length", )); } Ok(s) } pub(crate) fn read_bool<R: Read>(mut reader: R) -> Result<bool> { let val = read_u8(&mut reader)?; if val > 1 { return Err(Error::invalid_value( Unexpected::Unsigned(val as u64), &"boolean must be stored as 0 or 1", )); } Ok(val != 0) } fn read_cstring<R: Read + ?Sized>(reader: &mut R) -> Result<String> { let mut v = Vec::new(); loop { let c = read_u8(reader)?; if c == 0 { break; } v.push(c); } Ok(String::from_utf8(v)?) } #[inline] pub(crate) fn read_u8<R: Read + ?Sized>(reader: &mut R) -> Result<u8> { let mut buf = [0; 1]; reader.read_exact(&mut buf)?; Ok(u8::from_le_bytes(buf)) } #[inline] pub(crate) fn read_i32<R: Read + ?Sized>(reader: &mut R) -> Result<i32> { let mut buf = [0; 4]; reader.read_exact(&mut buf)?; Ok(i32::from_le_bytes(buf)) } #[inline] pub(crate) fn read_i64<R: Read + ?Sized>(reader: &mut R) -> Result<i64> { let mut buf = [0; 8]; reader.read_exact(&mut buf)?; Ok(i64::from_le_bytes(buf)) } #[inline] fn read_f64<R: Read + ?Sized>(reader: &mut R) -> Result<f64> { let mut buf = [0; 8]; reader.read_exact(&mut buf)?; Ok(f64::from_le_bytes(buf)) } #[inline] fn read_f128<R: Read + ?Sized>(reader: &mut R) -> Result<Decimal128> { let mut buf = [0u8; 128 / 8]; reader.read_exact(&mut buf)?; Ok(Decimal128 { bytes: buf }) } fn deserialize_array<R: Read + ?Sized>(reader: &mut R, utf8_lossy: bool) -> Result<Array> { let mut arr = Array::new(); let length = read_i32(reader)?; if !(MIN_BSON_DOCUMENT_SIZE..=MAX_BSON_SIZE).contains(&length) { return Err(Error::invalid_length( length as usize, &format!( "array length must be between {} and {}", MIN_BSON_DOCUMENT_SIZE, MAX_BSON_SIZE ) .as_str(), )); } ensure_read_exactly( reader, (length as usize) - 4, "array length longer than contents", |cursor| { loop { let tag = read_u8(cursor)?; if tag == 0 {
pub(crate) fn deserialize_bson_kvp<R: Read + ?Sized>( reader: &mut R, tag: u8, utf8_lossy: bool, ) -> Result<(String, Bson)> { use spec::ElementType; let key = read_cstring(reader)?; let val = match ElementType::from(tag) { Some(ElementType::Double) => Bson::Double(read_f64(reader)?), Some(ElementType::String) => read_string(reader, utf8_lossy).map(Bson::String)?, Some(ElementType::EmbeddedDocument) => Document::from_reader(reader).map(Bson::Document)?, Some(ElementType::Array) => deserialize_array(reader, utf8_lossy).map(Bson::Array)?, Some(ElementType::Binary) => Bson::Binary(Binary::from_reader(reader)?), Some(ElementType::ObjectId) => { let mut objid = [0; 12]; for x in &mut objid { *x = read_u8(reader)?; } Bson::ObjectId(oid::ObjectId::from_bytes(objid)) } Some(ElementType::Boolean) => Bson::Boolean(read_bool(reader)?), Some(ElementType::Null) => Bson::Null, Some(ElementType::RegularExpression) => { Bson::RegularExpression(Regex::from_reader(reader)?) } Some(ElementType::JavaScriptCode) => { read_string(reader, utf8_lossy).map(Bson::JavaScriptCode)? } Some(ElementType::JavaScriptCodeWithScope) => { Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope::from_reader(reader, utf8_lossy)?) } Some(ElementType::Int32) => read_i32(reader).map(Bson::Int32)?, Some(ElementType::Int64) => read_i64(reader).map(Bson::Int64)?, Some(ElementType::Timestamp) => Bson::Timestamp(Timestamp::from_reader(reader)?), Some(ElementType::DateTime) => { let time = read_i64(reader)?; Bson::DateTime(crate::DateTime::from_millis(time)) } Some(ElementType::Symbol) => read_string(reader, utf8_lossy).map(Bson::Symbol)?, Some(ElementType::Decimal128) => read_f128(reader).map(Bson::Decimal128)?, Some(ElementType::Undefined) => Bson::Undefined, Some(ElementType::DbPointer) => Bson::DbPointer(DbPointer::from_reader(reader)?), Some(ElementType::MaxKey) => Bson::MaxKey, Some(ElementType::MinKey) => Bson::MinKey, None => { return Err(Error::UnrecognizedDocumentElementType { key, element_type: tag, }) } }; Ok((key, val)) } impl Binary { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let len = read_i32(&mut reader)?; if !(0..=MAX_BSON_SIZE).contains(&len) { return Err(Error::invalid_length( len as usize, &format!("binary length must be between 0 and {}", MAX_BSON_SIZE).as_str(), )); } let subtype = BinarySubtype::from(read_u8(&mut reader)?); Self::from_reader_with_len_and_payload(reader, len, subtype) } pub(crate) fn from_reader_with_len_and_payload<R: Read>( mut reader: R, mut len: i32, subtype: BinarySubtype, ) -> Result<Self> { if !(0..=MAX_BSON_SIZE).contains(&len) { return Err(Error::invalid_length( len as usize, &format!("binary length must be between 0 and {}", MAX_BSON_SIZE).as_str(), )); } if let BinarySubtype::BinaryOld = subtype { let data_len = read_i32(&mut reader)?; if !(0..=(MAX_BSON_SIZE - 4)).contains(&data_len) { return Err(Error::invalid_length( data_len as usize, &format!("0x02 length must be between 0 and {}", MAX_BSON_SIZE - 4).as_str(), )); } if data_len + 4 != len { return Err(Error::invalid_length( data_len as usize, &"0x02 length did not match top level binary length", )); } len -= 4; } let mut bytes = Vec::with_capacity(len as usize); reader.take(len as u64).read_to_end(&mut bytes)?; Ok(Binary { subtype, bytes }) } } impl DbPointer { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let ns = read_string(&mut reader, false)?; let oid = ObjectId::from_reader(&mut reader)?; Ok(DbPointer { namespace: ns, id: oid, }) } } impl Regex { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let pattern = read_cstring(&mut reader)?; let options = read_cstring(&mut reader)?; Ok(Regex { pattern, options }) } } impl Timestamp { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { read_i64(&mut reader).map(Timestamp::from_le_i64) } } impl ObjectId { pub(crate) fn from_reader<R: Read>(mut reader: R) -> Result<Self> { let mut buf = [0u8; 12]; reader.read_exact(&mut buf)?; Ok(Self::from_bytes(buf)) } } impl JavaScriptCodeWithScope { pub(crate) fn from_reader<R: Read>(mut reader: R, utf8_lossy: bool) -> Result<Self> { let length = read_i32(&mut reader)?; if length < MIN_CODE_WITH_SCOPE_SIZE { return Err(Error::invalid_length( length as usize, &format!( "code with scope length must be at least {}", MIN_CODE_WITH_SCOPE_SIZE ) .as_str(), )); } else if length > MAX_BSON_SIZE { return Err(Error::invalid_length( length as usize, &"code with scope length too large", )); } let mut buf = vec![0u8; (length - 4) as usize]; reader.read_exact(&mut buf)?; let mut slice = buf.as_slice(); let code = read_string(&mut slice, utf8_lossy)?; let scope = Document::from_reader(&mut slice)?; Ok(JavaScriptCodeWithScope { code, scope }) } } pub fn from_bson<T>(bson: Bson) -> Result<T> where T: DeserializeOwned, { let de = Deserializer::new(bson); Deserialize::deserialize(de) } pub fn from_document<T>(doc: Document) -> Result<T> where T: DeserializeOwned, { from_bson(Bson::Document(doc)) } fn reader_to_vec<R: Read>(mut reader: R) -> Result<Vec<u8>> { let length = read_i32(&mut reader)?; if length < MIN_BSON_DOCUMENT_SIZE { return Err(Error::custom("document size too small")); } let mut bytes = Vec::with_capacity(length as usize); write_i32(&mut bytes, length).map_err(Error::custom)?; reader.take(length as u64 - 4).read_to_end(&mut bytes)?; Ok(bytes) } pub fn from_reader<R, T>(reader: R) -> Result<T> where T: DeserializeOwned, R: Read, { let bytes = reader_to_vec(reader)?; from_slice(bytes.as_slice()) } pub fn from_reader_utf8_lossy<R, T>(reader: R) -> Result<T> where T: DeserializeOwned, R: Read, { let bytes = reader_to_vec(reader)?; from_slice_utf8_lossy(bytes.as_slice()) } pub fn from_slice<'de, T>(bytes: &'de [u8]) -> Result<T> where T: Deserialize<'de>, { let mut deserializer = raw::Deserializer::new(bytes, false); T::deserialize(&mut deserializer) } pub fn from_slice_utf8_lossy<'de, T>(bytes: &'de [u8]) -> Result<T> where T: Deserialize<'de>, { let mut deserializer = raw::Deserializer::new(bytes, true); T::deserialize(&mut deserializer) }
break; } let (_, val) = deserialize_bson_kvp(cursor, tag, utf8_lossy)?; arr.push(val) } Ok(()) }, )?; Ok(arr) }
function_block-function_prefix_line
[ { "content": "/// Attempts to serialize a u64 as an i32. Errors if an exact conversion is not possible.\n\npub fn serialize_u64_as_i32<S: Serializer>(val: &u64, serializer: S) -> Result<S::Ok, S::Error> {\n\n match i32::try_from(*val) {\n\n Ok(val) => serializer.serialize_i32(val),\n\n Err(_) => Err(ser::Error::custom(format!(\"cannot convert {} to i32\", val))),\n\n }\n\n}\n\n\n", "file_path": "src/serde_helpers.rs", "rank": 5, "score": 317124.7941050074 }, { "content": "fn read_lenencoded(buf: &[u8]) -> Result<&str> {\n\n let length = i32_from_slice(&buf[..4])?;\n\n let end = checked_add(usize_try_from_i32(length)?, 4)?;\n\n\n\n if end < MIN_BSON_STRING_SIZE as usize {\n\n return Err(Error::new_without_key(ErrorKind::MalformedValue {\n\n message: format!(\n\n \"BSON length encoded string needs to be at least {} bytes, instead got {}\",\n\n MIN_BSON_STRING_SIZE, end\n\n ),\n\n }));\n\n }\n\n\n\n if buf.len() < end {\n\n return Err(Error::new_without_key(ErrorKind::MalformedValue {\n\n message: format!(\n\n \"expected buffer to contain at least {} bytes, but it only has {}\",\n\n end,\n\n buf.len()\n\n ),\n", "file_path": "src/raw/mod.rs", "rank": 7, "score": 299642.04692874756 }, { "content": "fn read_nullterminated(buf: &[u8]) -> Result<&str> {\n\n let mut splits = buf.splitn(2, |x| *x == 0);\n\n let value = splits.next().ok_or_else(|| {\n\n Error::new_without_key(ErrorKind::MalformedValue {\n\n message: \"no value\".into(),\n\n })\n\n })?;\n\n if splits.next().is_some() {\n\n Ok(try_to_str(value)?)\n\n } else {\n\n Err(Error::new_without_key(ErrorKind::MalformedValue {\n\n message: \"expected null terminator\".into(),\n\n }))\n\n }\n\n}\n\n\n", "file_path": "src/raw/mod.rs", "rank": 8, "score": 299642.04692874756 }, { "content": "/// Attempts to serialize a u64 as an i64. Errors if an exact conversion is not possible.\n\npub fn serialize_u64_as_i64<S: Serializer>(val: &u64, serializer: S) -> Result<S::Ok, S::Error> {\n\n match i64::try_from(*val) {\n\n Ok(val) => serializer.serialize_i64(val),\n\n Err(_) => Err(ser::Error::custom(format!(\"cannot convert {} to i64\", val))),\n\n }\n\n}\n\n\n", "file_path": "src/serde_helpers.rs", "rank": 9, "score": 292733.50286923087 }, { "content": "fn serialize_array<W: Write + ?Sized>(writer: &mut W, arr: &[Bson]) -> Result<()> {\n\n let mut buf = Vec::new();\n\n for (key, val) in arr.iter().enumerate() {\n\n serialize_bson(&mut buf, &key.to_string(), val)?;\n\n }\n\n\n\n write_i32(\n\n writer,\n\n (buf.len() + mem::size_of::<i32>() + mem::size_of::<u8>()) as i32,\n\n )?;\n\n writer.write_all(&buf)?;\n\n writer.write_all(b\"\\0\")?;\n\n Ok(())\n\n}\n\n\n\npub(crate) fn serialize_bson<W: Write + ?Sized>(\n\n writer: &mut W,\n\n key: &str,\n\n val: &Bson,\n\n) -> Result<()> {\n", "file_path": "src/ser/mod.rs", "rank": 11, "score": 287960.2111828432 }, { "content": "/// Given a u8 slice, return an i32 calculated from the first four bytes in\n\n/// little endian order.\n\nfn i32_from_slice(val: &[u8]) -> Result<i32> {\n\n let arr = val\n\n .get(0..4)\n\n .and_then(|s| s.try_into().ok())\n\n .ok_or_else(|| {\n\n Error::new_without_key(ErrorKind::MalformedValue {\n\n message: format!(\"expected 4 bytes to read i32, instead got {}\", val.len()),\n\n })\n\n })?;\n\n Ok(i32::from_le_bytes(arr))\n\n}\n\n\n", "file_path": "src/raw/mod.rs", "rank": 12, "score": 279075.11421344883 }, { "content": "fn usize_try_from_i32(i: i32) -> Result<usize> {\n\n usize::try_from(i).map_err(|e| {\n\n Error::new_without_key(ErrorKind::MalformedValue {\n\n message: e.to_string(),\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/raw/mod.rs", "rank": 13, "score": 268757.71319332195 }, { "content": "/// Attempts to serialize a u32 as an i32. Errors if an exact conversion is not possible.\n\npub fn serialize_u32_as_i32<S: Serializer>(val: &u32, serializer: S) -> Result<S::Ok, S::Error> {\n\n match i32::try_from(*val) {\n\n Ok(val) => serializer.serialize_i32(val),\n\n Err(_) => Err(ser::Error::custom(format!(\"cannot convert {} to i32\", val))),\n\n }\n\n}\n\n\n", "file_path": "src/serde_helpers.rs", "rank": 14, "score": 267813.79246200505 }, { "content": "/// Encode a `T` Serializable into a BSON `Value`.\n\npub fn to_bson<T: ?Sized>(value: &T) -> Result<Bson>\n\nwhere\n\n T: Serialize,\n\n{\n\n let ser = Serializer::new();\n\n value.serialize(ser)\n\n}\n\n\n", "file_path": "src/ser/mod.rs", "rank": 15, "score": 252190.06062011834 }, { "content": "/// Encode a `T` Serializable into a BSON `Document`.\n\npub fn to_document<T: ?Sized>(value: &T) -> Result<Document>\n\nwhere\n\n T: Serialize,\n\n{\n\n match to_bson(value)? {\n\n Bson::Document(doc) => Ok(doc),\n\n bson => Err(Error::SerializationError {\n\n message: format!(\n\n \"Could not be serialized to Document, got {:?} instead\",\n\n bson.element_type()\n\n ),\n\n }),\n\n }\n\n}\n\n\n\n/// Serialize the given `T` as a BSON byte vector.\n", "file_path": "src/ser/mod.rs", "rank": 16, "score": 252119.62373865576 }, { "content": "fn write_string<W: Write + ?Sized>(writer: &mut W, s: &str) -> Result<()> {\n\n writer.write_all(&(s.len() as i32 + 1).to_le_bytes())?;\n\n writer.write_all(s.as_bytes())?;\n\n writer.write_all(b\"\\0\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/ser/mod.rs", "rank": 17, "score": 251257.55377019825 }, { "content": "/// Serializes a u32 as an i64.\n\npub fn serialize_u32_as_i64<S: Serializer>(val: &u32, serializer: S) -> Result<S::Ok, S::Error> {\n\n serializer.serialize_i64(*val as i64)\n\n}\n\n\n", "file_path": "src/serde_helpers.rs", "rank": 19, "score": 243418.1144903599 }, { "content": "fn to_bytes(doc: &crate::Document) -> Vec<u8> {\n\n let mut docbytes = Vec::new();\n\n doc.to_writer(&mut docbytes).unwrap();\n\n docbytes\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 20, "score": 235160.5928211086 }, { "content": "fn try_to_str(data: &[u8]) -> Result<&str> {\n\n std::str::from_utf8(data).map_err(|e| Error::new_without_key(ErrorKind::Utf8EncodingError(e)))\n\n}\n\n\n", "file_path": "src/raw/mod.rs", "rank": 22, "score": 225052.5051557566 }, { "content": "fn write_cstring<W: Write + ?Sized>(writer: &mut W, s: &str) -> Result<()> {\n\n if s.contains('\\0') {\n\n return Err(Error::InvalidCString(s.into()));\n\n }\n\n writer.write_all(s.as_bytes())?;\n\n writer.write_all(b\"\\0\")?;\n\n Ok(())\n\n}\n\n\n\n#[inline]\n\npub(crate) fn write_i32<W: Write + ?Sized>(writer: &mut W, val: i32) -> Result<()> {\n\n writer\n\n .write_all(&val.to_le_bytes())\n\n .map(|_| ())\n\n .map_err(From::from)\n\n}\n\n\n", "file_path": "src/ser/mod.rs", "rank": 23, "score": 223156.53833628318 }, { "content": "#[inline]\n\nfn write_i64<W: Write + ?Sized>(writer: &mut W, val: i64) -> Result<()> {\n\n writer\n\n .write_all(&val.to_le_bytes())\n\n .map(|_| ())\n\n .map_err(From::from)\n\n}\n\n\n", "file_path": "src/ser/mod.rs", "rank": 24, "score": 218146.89534711005 }, { "content": "#[inline]\n\nfn write_f64<W: Write + ?Sized>(writer: &mut W, val: f64) -> Result<()> {\n\n writer\n\n .write_all(&val.to_le_bytes())\n\n .map(|_| ())\n\n .map_err(From::from)\n\n}\n\n\n", "file_path": "src/ser/mod.rs", "rank": 25, "score": 218146.89534711005 }, { "content": "/// Given a u8 slice, return an i32 calculated from the first four bytes in\n\n/// little endian order.\n\nfn f64_from_slice(val: &[u8]) -> Result<f64> {\n\n let arr = val\n\n .get(0..8)\n\n .and_then(|s| s.try_into().ok())\n\n .ok_or_else(|| {\n\n Error::new_without_key(ErrorKind::MalformedValue {\n\n message: format!(\"expected 8 bytes to read double, instead got {}\", val.len()),\n\n })\n\n })?;\n\n Ok(f64::from_le_bytes(arr))\n\n}\n\n\n", "file_path": "src/raw/mod.rs", "rank": 26, "score": 213523.62128841132 }, { "content": "/// Given an u8 slice, return an i64 calculated from the first 8 bytes in\n\n/// little endian order.\n\nfn i64_from_slice(val: &[u8]) -> Result<i64> {\n\n let arr = val\n\n .get(0..8)\n\n .and_then(|s| s.try_into().ok())\n\n .ok_or_else(|| {\n\n Error::new_without_key(ErrorKind::MalformedValue {\n\n message: format!(\"expected 8 bytes to read i64, instead got {}\", val.len()),\n\n })\n\n })?;\n\n Ok(i64::from_le_bytes(arr))\n\n}\n\n\n", "file_path": "src/raw/mod.rs", "rank": 27, "score": 213519.36755820632 }, { "content": "fn convert_unsigned_to_signed<E>(value: u64) -> Result<Bson, E>\n\nwhere\n\n E: Error,\n\n{\n\n if let Ok(int32) = i32::try_from(value) {\n\n Ok(Bson::Int32(int32))\n\n } else if let Ok(int64) = i64::try_from(value) {\n\n Ok(Bson::Int64(int64))\n\n } else {\n\n Err(Error::custom(format!(\n\n \"cannot represent {} as a signed number\",\n\n value\n\n )))\n\n }\n\n}\n\n\n\n/// Serde Deserializer\n\npub struct Deserializer {\n\n value: Option<Bson>,\n\n}\n", "file_path": "src/de/serde.rs", "rank": 28, "score": 210361.1919644024 }, { "content": "#[inline]\n\nfn write_binary<W: Write>(mut writer: W, bytes: &[u8], subtype: BinarySubtype) -> Result<()> {\n\n let len = if let BinarySubtype::BinaryOld = subtype {\n\n bytes.len() + 4\n\n } else {\n\n bytes.len()\n\n };\n\n\n\n if len > MAX_BSON_SIZE as usize {\n\n return Err(Error::custom(format!(\n\n \"binary length {} exceeded maximum size\",\n\n bytes.len()\n\n )));\n\n }\n\n\n\n write_i32(&mut writer, len as i32)?;\n\n writer.write_all(&[subtype.into()])?;\n\n\n\n if let BinarySubtype::BinaryOld = subtype {\n\n write_i32(&mut writer, len as i32 - 4)?;\n\n };\n\n\n\n writer.write_all(bytes).map_err(From::from)\n\n}\n\n\n", "file_path": "src/ser/mod.rs", "rank": 30, "score": 202746.50092619093 }, { "content": "#[test]\n\nfn regex() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"regex\": Bson::RegularExpression(Regex { pattern: String::from(r\"end\\s*$\"), options: String::from(\"i\")}),\n\n }).unwrap();\n\n let regex = rawdoc\n\n .get(\"regex\")\n\n .expect(\"error finding key regex\")\n\n .expect(\"no key regex\")\n\n .as_regex()\n\n .expect(\"was not regex\");\n\n assert_eq!(regex.pattern, r\"end\\s*$\");\n\n assert_eq!(regex.options, \"i\");\n\n}\n", "file_path": "src/raw/test/mod.rs", "rank": 31, "score": 197371.05057857506 }, { "content": "#[test]\n\nfn boolean() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"boolean\": true,\n\n })\n\n .unwrap();\n\n\n\n let boolean = rawdoc\n\n .get(\"boolean\")\n\n .expect(\"error finding key boolean\")\n\n .expect(\"no key boolean\")\n\n .as_bool()\n\n .expect(\"result was not boolean\");\n\n\n\n assert!(boolean);\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 32, "score": 197349.30041926814 }, { "content": "#[test]\n\nfn timestamp() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"timestamp\": Bson::Timestamp(Timestamp { time: 3542578, increment: 7 }),\n\n })\n\n .unwrap();\n\n let ts = rawdoc\n\n .get(\"timestamp\")\n\n .expect(\"error finding key timestamp\")\n\n .expect(\"no key timestamp\")\n\n .as_timestamp()\n\n .expect(\"was not a timestamp\");\n\n\n\n assert_eq!(ts.increment, 7);\n\n assert_eq!(ts.time, 3542578);\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 33, "score": 197333.62056577043 }, { "content": "#[test]\n\nfn string() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\"string\": \"hello\"}).unwrap();\n\n\n\n assert_eq!(\n\n rawdoc\n\n .get(\"string\")\n\n .expect(\"error finding key string\")\n\n .expect(\"no key string\")\n\n .as_str()\n\n .expect(\"result was not a string\"),\n\n \"hello\",\n\n );\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 34, "score": 197260.5207764741 }, { "content": "#[test]\n\nfn array() {\n\n let rawdoc = RawDocumentBuf::from_document(\n\n &doc! { \"array\": [\"binary\", \"serialized\", \"object\", \"notation\"]},\n\n )\n\n .unwrap();\n\n\n\n let array = rawdoc\n\n .get(\"array\")\n\n .expect(\"error finding key array\")\n\n .expect(\"no key array\")\n\n .as_array()\n\n .expect(\"result was not an array\");\n\n assert_eq!(array.get_str(0), Ok(\"binary\"));\n\n assert_eq!(array.get_str(3), Ok(\"notation\"));\n\n assert_eq!(\n\n array.get_str(4).unwrap_err().kind,\n\n ValueAccessErrorKind::NotPresent\n\n );\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 35, "score": 197204.2044454901 }, { "content": "#[test]\n\nfn document() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\"document\": {}}).unwrap();\n\n\n\n let doc = rawdoc\n\n .get(\"document\")\n\n .expect(\"error finding key document\")\n\n .expect(\"no key document\")\n\n .as_document()\n\n .expect(\"result was not a document\");\n\n assert_eq!(doc.as_bytes(), [5u8, 0, 0, 0, 0].as_ref()); // Empty document\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 36, "score": 196502.63379949162 }, { "content": "#[test]\n\nfn string_from_document() {\n\n let docbytes = to_bytes(&doc! {\n\n \"this\": \"first\",\n\n \"that\": \"second\",\n\n \"something\": \"else\",\n\n });\n\n let rawdoc = RawDocument::new(&docbytes).unwrap();\n\n assert_eq!(\n\n rawdoc.get(\"that\").unwrap().unwrap().as_str().unwrap(),\n\n \"second\",\n\n );\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 37, "score": 194527.9890561763 }, { "content": "fn checked_add(lhs: usize, rhs: usize) -> Result<usize> {\n\n lhs.checked_add(rhs).ok_or_else(|| {\n\n Error::new_without_key(ErrorKind::MalformedValue {\n\n message: \"attempted to add with overflow\".to_string(),\n\n })\n\n })\n\n}\n", "file_path": "src/raw/mod.rs", "rank": 38, "score": 194412.71206478684 }, { "content": "/// Verifies the following:\n\n/// - deserializing a `T` from `expected_doc` produces `expected_value`\n\n/// - deserializing a `T` from the raw BSON version of `expected_doc` produces `expected_value`\n\n/// - deserializing a `Document` from the raw BSON version of `expected_doc` produces `expected_doc`\n\nfn run_deserialize_test<T>(expected_value: &T, expected_doc: &Document, description: &str)\n\nwhere\n\n T: DeserializeOwned + PartialEq + std::fmt::Debug,\n\n{\n\n let mut expected_bytes = Vec::new();\n\n expected_doc\n\n .to_writer(&mut expected_bytes)\n\n .expect(description);\n\n\n\n assert_eq!(\n\n &bson::from_document::<T>(expected_doc.clone()).expect(description),\n\n expected_value,\n\n \"{}\",\n\n description\n\n );\n\n assert_eq!(\n\n &bson::from_reader::<_, T>(expected_bytes.as_slice()).expect(description),\n\n expected_value,\n\n \"{}\",\n\n description\n\n );\n\n assert_eq!(\n\n &bson::from_reader::<_, Document>(expected_bytes.as_slice()).expect(description),\n\n expected_doc,\n\n \"{}\",\n\n description\n\n );\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 40, "score": 176139.87736129016 }, { "content": "#[test]\n\nfn array() {\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n a: Vec<i32>,\n\n }\n\n\n\n let v = Foo {\n\n a: vec![1, 2, 3, 4],\n\n };\n\n let doc = doc! {\n\n \"a\": [1, 2, 3, 4],\n\n };\n\n run_test(&v, &doc, \"array\");\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 41, "score": 165912.94781494976 }, { "content": "#[test]\n\nfn binary() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"binary\": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] }\n\n })\n\n .unwrap();\n\n let binary: bson::RawBinary<'_> = rawdoc\n\n .get(\"binary\")\n\n .expect(\"error finding key binary\")\n\n .expect(\"no key binary\")\n\n .as_binary()\n\n .expect(\"result was not a binary object\");\n\n assert_eq!(binary.subtype, BinarySubtype::Generic);\n\n assert_eq!(binary.bytes, &[1, 2, 3]);\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 42, "score": 165003.58901780075 }, { "content": "#[inline]\n\npub fn to_vec<T>(value: &T) -> Result<Vec<u8>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut serializer = raw::Serializer::new();\n\n value.serialize(&mut serializer)?;\n\n Ok(serializer.into_vec())\n\n}\n", "file_path": "src/ser/mod.rs", "rank": 43, "score": 163705.8986459294 }, { "content": "#[test]\n\nfn application_deserialize_error() {\n\n #[derive(PartialEq, Debug)]\n\n struct Range10(usize);\n\n impl<'de> Deserialize<'de> for Range10 {\n\n fn deserialize<D: serde::Deserializer<'de>>(d: D) -> Result<Range10, D::Error> {\n\n let x: usize = Deserialize::deserialize(d)?;\n\n if x > 10 {\n\n Err(serde::de::Error::invalid_value(\n\n Unexpected::Unsigned(x as u64),\n\n &\"more than 10\",\n\n ))\n\n } else {\n\n Ok(Range10(x))\n\n }\n\n }\n\n }\n\n let d_good = Deserializer::new(Bson::Int64(5));\n\n let d_bad1 = Deserializer::new(Bson::String(\"not an isize\".to_string()));\n\n let d_bad2 = Deserializer::new(Bson::Int64(11));\n\n\n\n assert_eq!(\n\n Range10(5),\n\n Deserialize::deserialize(d_good).expect(\"deserialization should succeed\")\n\n );\n\n\n\n Range10::deserialize(d_bad1).expect_err(\"deserialization from string should fail\");\n\n Range10::deserialize(d_bad2).expect_err(\"deserialization from 11 should fail\");\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 44, "score": 160344.03236501318 }, { "content": "#[test]\n\nfn into_bson_conversion() {\n\n let docbytes = to_bytes(&doc! {\n\n \"f64\": 2.5,\n\n \"string\": \"hello\",\n\n \"document\": {},\n\n \"array\": [\"binary\", \"serialized\", \"object\", \"notation\"],\n\n \"object_id\": ObjectId::from_bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),\n\n \"binary\": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] },\n\n \"boolean\": false,\n\n });\n\n let rawbson = RawBson::Document(RawDocument::new(docbytes.as_slice()).unwrap());\n\n let b: Bson = rawbson.try_into().expect(\"invalid bson\");\n\n let doc = b.as_document().expect(\"not a document\");\n\n assert_eq!(*doc.get(\"f64\").expect(\"f64 not found\"), Bson::Double(2.5));\n\n assert_eq!(\n\n *doc.get(\"string\").expect(\"string not found\"),\n\n Bson::String(String::from(\"hello\"))\n\n );\n\n assert_eq!(\n\n *doc.get(\"document\").expect(\"document not found\"),\n", "file_path": "src/raw/test/mod.rs", "rank": 45, "score": 158753.18367854963 }, { "content": "#[test]\n\nfn document_iteration() {\n\n let doc = doc! {\n\n \"f64\": 2.5,\n\n \"string\": \"hello\",\n\n \"document\": {},\n\n \"array\": [\"binary\", \"serialized\", \"object\", \"notation\"],\n\n \"binary\": Binary { subtype: BinarySubtype::Generic, bytes: vec![1u8, 2, 3] },\n\n \"object_id\": ObjectId::from_bytes([1, 2, 3, 4, 5,6,7,8,9,10, 11,12]),\n\n \"boolean\": true,\n\n \"datetime\": DateTime::now(),\n\n \"null\": Bson::Null,\n\n \"regex\": Bson::RegularExpression(Regex { pattern: String::from(r\"end\\s*$\"), options: String::from(\"i\")}),\n\n \"javascript\": Bson::JavaScriptCode(String::from(\"console.log(console);\")),\n\n \"symbol\": Bson::Symbol(String::from(\"artist-formerly-known-as\")),\n\n \"javascript_with_scope\": Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope{ code: String::from(\"console.log(msg);\"), scope: doc!{\"ok\": true}}),\n\n \"int32\": 23i32,\n\n \"timestamp\": Bson::Timestamp(Timestamp { time: 3542578, increment: 0 }),\n\n \"int64\": 46i64,\n\n \"end\": \"END\",\n\n };\n", "file_path": "src/raw/test/mod.rs", "rank": 46, "score": 158712.78181275353 }, { "content": "#[test]\n\nfn nested_document() {\n\n let docbytes = to_bytes(&doc! {\n\n \"outer\": {\n\n \"inner\": \"surprise\",\n\n \"i64\": 6_i64,\n\n },\n\n });\n\n let rawdoc = RawDocument::new(&docbytes).unwrap();\n\n let subdoc = rawdoc\n\n .get(\"outer\")\n\n .expect(\"get doc result\")\n\n .expect(\"get doc option\")\n\n .as_document()\n\n .expect(\"as doc\");\n\n assert_eq!(\n\n subdoc\n\n .get(\"inner\")\n\n .expect(\"get str result\")\n\n .expect(\"get str option\")\n\n .as_str()\n", "file_path": "src/raw/test/mod.rs", "rank": 47, "score": 158712.78181275353 }, { "content": "/// Serializes an [`ObjectId`] as a hex string.\n\npub fn serialize_object_id_as_hex_string<S: Serializer>(\n\n val: &ObjectId,\n\n serializer: S,\n\n) -> Result<S::Ok, S::Error> {\n\n val.to_hex().serialize(serializer)\n\n}\n\n\n\n/// Contains functions to serialize a u32 as an f64 (BSON double) and deserialize a\n\n/// u32 from an f64 (BSON double).\n\n///\n\n/// ```rust\n\n/// # use serde::{Serialize, Deserialize};\n\n/// # use bson::serde_helpers::u32_as_f64;\n\n/// #[derive(Serialize, Deserialize)]\n\n/// struct FileInfo {\n\n/// #[serde(with = \"u32_as_f64\")]\n\n/// pub size_bytes: u32,\n\n/// }\n\n/// ```\n\npub mod u32_as_f64 {\n", "file_path": "src/serde_helpers.rs", "rank": 48, "score": 156038.08718922042 }, { "content": "#[test]\n\nfn test_serialize_deserialize_utf8_string() {\n\n let _guard = LOCK.run_concurrently();\n\n let src = \"test你好吗\".to_owned();\n\n let dst = vec![\n\n 28, 0, 0, 0, 2, 107, 101, 121, 0, 14, 0, 0, 0, 116, 101, 115, 116, 228, 189, 160, 229, 165,\n\n 189, 229, 144, 151, 0, 0,\n\n ];\n\n\n\n let doc = doc! { \"key\": src };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n assert_eq!(buf, dst);\n\n\n\n let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(deserialized, doc);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 49, "score": 154049.04746385562 }, { "content": "#[test]\n\nfn test_deserialize_invalid_array_length() {\n\n let _guard = LOCK.run_concurrently();\n\n let buffer = b\"\\n\\x00\\x00\\x00\\x04\\x00\\x00\\x00\\x00\\x00\";\n\n Document::from_reader(&mut std::io::Cursor::new(buffer))\n\n .expect_err(\"expected deserialization to fail\");\n\n}\n\n\n\n/// [RUST-713](https://jira.mongodb.org/browse/RUST-713)\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 50, "score": 154023.34188052823 }, { "content": "/// Verifies the following:\n\n/// - round trip `expected_value` through `Document`:\n\n/// - serializing the `expected_value` to a `Document` matches the `expected_doc`\n\n/// - deserializing from the serialized document produces `expected_value`\n\n/// - round trip through raw BSON:\n\n/// - deserializing a `T` from the raw BSON version of `expected_doc` produces `expected_value`\n\n/// - deserializing a `Document` from the raw BSON version of `expected_doc` produces\n\n/// `expected_doc`\n\n/// - `bson::to_writer` and `Document::to_writer` produce the same result given the same input\n\nfn run_test<T>(expected_value: &T, expected_doc: &Document, description: &str)\n\nwhere\n\n T: Serialize + DeserializeOwned + PartialEq + std::fmt::Debug,\n\n{\n\n let mut expected_bytes = Vec::new();\n\n expected_doc\n\n .to_writer(&mut expected_bytes)\n\n .expect(description);\n\n\n\n let expected_bytes_serde = bson::to_vec(&expected_value).expect(description);\n\n assert_eq!(expected_bytes_serde, expected_bytes, \"{}\", description);\n\n\n\n let expected_bytes_from_doc_serde = bson::to_vec(&expected_doc).expect(description);\n\n assert_eq!(\n\n expected_bytes_from_doc_serde, expected_bytes,\n\n \"{}\",\n\n description\n\n );\n\n\n\n let serialized_doc = bson::to_document(&expected_value).expect(description);\n", "file_path": "serde-tests/test.rs", "rank": 51, "score": 152937.20079662482 }, { "content": "#[test]\n\nfn test_deserialize_invalid_utf8_string_issue64() {\n\n let _guard = LOCK.run_concurrently();\n\n let buffer = b\"\\x13\\x00\\x00\\x00\\x02\\x01\\x00\\x00\\x00\\x00\\x00\\x00\\x00foo\\x00\\x13\\x05\\x00\\x00\\x00\";\n\n\n\n assert!(Document::from_reader(&mut Cursor::new(buffer)).is_err());\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 52, "score": 150079.46084869784 }, { "content": "#[test]\n\nfn test_deserialize_invalid_old_binary_length() {\n\n let _guard = LOCK.run_concurrently();\n\n let buffer = b\"\\x0F\\x00\\x00\\x00\\x05\\x00\\x00\\x00\\x00\\x00\\x02\\xFC\\xFF\\xFF\\xFF\";\n\n Document::from_reader(&mut std::io::Cursor::new(buffer))\n\n .expect_err(\"expected deserialization to fail\");\n\n\n\n let buffer = b\".\\x00\\x00\\x00\\x05\\x01\\x00\\x00\\x00\\x00\\x00\\x02\\xfc\\xff\\xff\\xff\\xff\\xff\\xff\\xff\\x00\\x00*\\x00h\\x0e\\x10++\\x00h\\x0e++\\x00\\x00\\t\\x00\\x00\\x00\\x00\\x00*\\x0e\\x10++\";\n\n Document::from_reader(&mut std::io::Cursor::new(buffer))\n\n .expect_err(\"expected deserialization to fail\");\n\n}\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 53, "score": 150059.64434786295 }, { "content": "struct Decimal128Deserializer(Decimal128);\n\n\n\nimpl<'de> serde::de::Deserializer<'de> for Decimal128Deserializer {\n\n type Error = Error;\n\n\n\n fn deserialize_any<V>(self, visitor: V) -> Result<V::Value>\n\n where\n\n V: serde::de::Visitor<'de>,\n\n {\n\n visitor.visit_bytes(&self.0.bytes)\n\n }\n\n\n\n serde::forward_to_deserialize_any! {\n\n bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq\n\n bytes byte_buf map struct option unit newtype_struct\n\n ignored_any unit_struct tuple_struct tuple enum identifier\n\n }\n\n}\n\n\n", "file_path": "src/de/raw.rs", "rank": 54, "score": 145432.7539427409 }, { "content": "#[test]\n\nfn test_encode_decode_utf8_string_invalid() {\n\n let bytes = b\"\\x80\\xae\".to_vec();\n\n let src = unsafe { String::from_utf8_unchecked(bytes) };\n\n\n\n let doc = doc! { \"key\": src };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n let expected = doc! { \"key\": \"��\" };\n\n let decoded = Document::from_reader_utf8_lossy(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(decoded, expected);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 55, "score": 138551.01244173554 }, { "content": "#[test]\n\nfn test_serde_bytes() {\n\n let _guard = LOCK.run_concurrently();\n\n #[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]\n\n pub struct Foo {\n\n #[serde(with = \"serde_bytes\")]\n\n data: Vec<u8>,\n\n }\n\n\n\n let x = Foo {\n\n data: b\"12345abcde\".to_vec(),\n\n };\n\n\n\n let b = to_bson(&x).unwrap();\n\n assert_eq!(\n\n b.as_document().unwrap(),\n\n &doc! {\"data\": Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: b\"12345abcde\".to_vec() })}\n\n );\n\n\n\n let f = from_bson::<Foo>(b).unwrap();\n\n assert_eq!(x, f);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 56, "score": 135532.93425735508 }, { "content": "#[test]\n\nfn table_array() {\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n a: Vec<Bar>,\n\n }\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Bar {\n\n a: i32,\n\n }\n\n\n\n let v = Foo {\n\n a: vec![Bar { a: 1 }, Bar { a: 2 }],\n\n };\n\n let doc = doc! {\n\n \"a\": [{ \"a\": 1 }, { \"a\": 2 }]\n\n };\n\n run_test(&v, &doc, \"table_array\");\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 57, "score": 129066.78056574118 }, { "content": "#[test]\n\nfn default_array() {\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n #[serde(default)]\n\n a: Vec<Bar>,\n\n }\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Bar;\n\n\n\n let v = Foo { a: vec![] };\n\n let doc = doc! {};\n\n run_deserialize_test(&v, &doc, \"default_array\");\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 58, "score": 129066.78056574118 }, { "content": "#[test]\n\nfn null_array() {\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n a: Option<Vec<Bar>>,\n\n }\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Bar;\n\n\n\n let v = Foo { a: None };\n\n let doc = doc! {};\n\n run_deserialize_test(&v, &doc, \"null_array\");\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 59, "score": 129066.78056574118 }, { "content": "#[test]\n\nfn empty_array() {\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n a: Option<Vec<Bar>>,\n\n }\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Bar;\n\n\n\n let v = Foo { a: Some(vec![]) };\n\n let doc = doc! {\n\n \"a\": []\n\n };\n\n run_deserialize_test(&v, &doc, \"empty_array\");\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 60, "score": 129066.78056574118 }, { "content": "#[test]\n\nfn missing_errors() {\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n bar: i32,\n\n }\n\n\n\n let doc = doc! {};\n\n\n\n bson::from_document::<Foo>(doc.clone()).unwrap_err();\n\n\n\n let mut bytes = Vec::new();\n\n doc.to_writer(&mut bytes).unwrap();\n\n\n\n bson::from_reader::<_, Foo>(bytes.as_slice()).unwrap_err();\n\n}\n\n\n", "file_path": "serde-tests/test.rs", "rank": 61, "score": 129041.58497535517 }, { "content": "#[test]\n\nfn symbol() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"symbol\": Bson::Symbol(String::from(\"artist-formerly-known-as\")),\n\n })\n\n .unwrap();\n\n\n\n let symbol = rawdoc\n\n .get(\"symbol\")\n\n .expect(\"error finding key symbol\")\n\n .expect(\"no key symbol\")\n\n .as_symbol()\n\n .expect(\"was not symbol\");\n\n assert_eq!(symbol, \"artist-formerly-known-as\");\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 62, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn int64() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"int64\": 46i64,\n\n })\n\n .unwrap();\n\n let int64 = rawdoc\n\n .get(\"int64\")\n\n .expect(\"error finding key int64\")\n\n .expect(\"no key int64\")\n\n .as_i64()\n\n .expect(\"was not int64\");\n\n assert_eq!(int64, 46i64);\n\n}\n", "file_path": "src/raw/test/mod.rs", "rank": 63, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn int32() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"int32\": 23i32,\n\n })\n\n .unwrap();\n\n let int32 = rawdoc\n\n .get(\"int32\")\n\n .expect(\"error finding key int32\")\n\n .expect(\"no key int32\")\n\n .as_i32()\n\n .expect(\"was not int32\");\n\n assert_eq!(int32, 23i32);\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 64, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn javascript() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"javascript\": Bson::JavaScriptCode(String::from(\"console.log(console);\")),\n\n })\n\n .unwrap();\n\n let js = rawdoc\n\n .get(\"javascript\")\n\n .expect(\"error finding key javascript\")\n\n .expect(\"no key javascript\")\n\n .as_javascript()\n\n .expect(\"was not javascript\");\n\n assert_eq!(js, \"console.log(console);\");\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 65, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn f64() {\n\n #![allow(clippy::float_cmp)]\n\n\n\n let rawdoc = RawDocumentBuf::from_document(&doc! { \"f64\": 2.5 }).unwrap();\n\n assert_eq!(\n\n rawdoc\n\n .get(\"f64\")\n\n .expect(\"error finding key f64\")\n\n .expect(\"no key f64\")\n\n .as_f64()\n\n .expect(\"result was not a f64\"),\n\n 2.5,\n\n );\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 66, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn null() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"null\": null,\n\n })\n\n .unwrap();\n\n let () = rawdoc\n\n .get(\"null\")\n\n .expect(\"error finding key null\")\n\n .expect(\"no key null\")\n\n .as_null()\n\n .expect(\"was not null\");\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 67, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn iterate() {\n\n let docbytes = to_bytes(&doc! {\n\n \"apples\": \"oranges\",\n\n \"peanut butter\": \"chocolate\",\n\n \"easy as\": {\"do\": 1, \"re\": 2, \"mi\": 3},\n\n });\n\n let rawdoc = RawDocument::new(&docbytes).expect(\"malformed bson document\");\n\n let mut dociter = rawdoc.into_iter();\n\n let next = dociter.next().expect(\"no result\").expect(\"invalid bson\");\n\n assert_eq!(next.0, \"apples\");\n\n assert_eq!(next.1.as_str().expect(\"result was not a str\"), \"oranges\");\n\n let next = dociter.next().expect(\"no result\").expect(\"invalid bson\");\n\n assert_eq!(next.0, \"peanut butter\");\n\n assert_eq!(next.1.as_str().expect(\"result was not a str\"), \"chocolate\");\n\n let next = dociter.next().expect(\"no result\").expect(\"invalid bson\");\n\n assert_eq!(next.0, \"easy as\");\n\n let _doc = next.1.as_document().expect(\"result was a not a document\");\n\n let next = dociter.next();\n\n assert!(next.is_none());\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 68, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn datetime() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"boolean\": true,\n\n \"datetime\": DateTime::from_chrono(Utc.ymd(2000,10,31).and_hms(12, 30, 45)),\n\n })\n\n .unwrap();\n\n let datetime = rawdoc\n\n .get(\"datetime\")\n\n .expect(\"error finding key datetime\")\n\n .expect(\"no key datetime\")\n\n .as_datetime()\n\n .expect(\"result was not datetime\");\n\n assert_eq!(datetime.to_rfc3339(), \"2000-10-31T12:30:45Z\");\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 69, "score": 127776.5338055752 }, { "content": "#[test]\n\nfn test_serialize_deserialize_boolean() {\n\n let _guard = LOCK.run_concurrently();\n\n let src = true;\n\n let dst = vec![11, 0, 0, 0, 8, 107, 101, 121, 0, 1, 0];\n\n\n\n let doc = doc! { \"key\": src };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n assert_eq!(buf, dst);\n\n\n\n let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(deserialized, doc);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 70, "score": 126246.69905635175 }, { "content": "#[test]\n\nfn test_serialize_deserialize_i32() {\n\n let _guard = LOCK.run_concurrently();\n\n let src = 100i32;\n\n let dst = vec![14, 0, 0, 0, 16, 107, 101, 121, 0, 100, 0, 0, 0, 0];\n\n\n\n let doc = doc! { \"key\": src };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n assert_eq!(buf, dst);\n\n\n\n let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(deserialized, doc);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 71, "score": 126235.84724711187 }, { "content": "#[test]\n\nfn test_serialize_deserialize_decimal128() {\n\n let _guard = LOCK.run_concurrently();\n\n let val = Bson::Decimal128(Decimal128 {\n\n bytes: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 34],\n\n });\n\n let dst = vec![\n\n 26, 0, 0, 0, 19, 107, 101, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 34, 0,\n\n ];\n\n\n\n let doc = doc! { \"key\": val };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n assert_eq!(buf, dst);\n\n\n\n let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(deserialized, doc);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 72, "score": 126235.84724711187 }, { "content": "#[test]\n\nfn test_serialize_deserialize_timestamp() {\n\n let _guard = LOCK.run_concurrently();\n\n let src = Bson::Timestamp(Timestamp {\n\n time: 0,\n\n increment: 100,\n\n });\n\n let dst = vec![\n\n 18, 0, 0, 0, 17, 107, 101, 121, 0, 100, 0, 0, 0, 0, 0, 0, 0, 0,\n\n ];\n\n\n\n let doc = doc! { \"key\": src };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n assert_eq!(buf, dst);\n\n\n\n let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(deserialized, doc);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 73, "score": 126209.06917930511 }, { "content": "#[test]\n\nfn test_serialize_deserialize_array() {\n\n let _guard = LOCK.run_concurrently();\n\n let src = vec![Bson::Double(1.01), Bson::String(\"xyz\".to_owned())];\n\n let dst = vec![\n\n 37, 0, 0, 0, 4, 107, 101, 121, 0, 27, 0, 0, 0, 1, 48, 0, 41, 92, 143, 194, 245, 40, 240,\n\n 63, 2, 49, 0, 4, 0, 0, 0, 120, 121, 122, 0, 0, 0,\n\n ];\n\n\n\n let doc = doc! { \"key\": src };\n\n\n\n let mut buf = Vec::new();\n\n doc.to_writer(&mut buf).unwrap();\n\n\n\n assert_eq!(buf, dst);\n\n\n\n let deserialized = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n assert_eq!(deserialized, doc);\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 74, "score": 126144.59928861225 }, { "content": "#[test]\n\nfn test_serialize_deserialize_document() {\n\n let _guard = LOCK.run_concurrently();\n\n\n\n #[derive(Debug, Deserialize, Serialize, PartialEq)]\n\n struct Point {\n\n x: i32,\n\n y: i32,\n\n }\n\n let src = Point { x: 1, y: 2 };\n\n\n\n let doc = to_document(&src).unwrap();\n\n assert_eq!(doc, doc! { \"x\": 1, \"y\": 2 });\n\n\n\n let point: Point = from_document(doc).unwrap();\n\n assert_eq!(src, point);\n\n\n\n #[derive(Debug, Deserialize, Serialize, PartialEq)]\n\n struct Line {\n\n p1: Point,\n\n p2: Point,\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 75, "score": 125607.7102313983 }, { "content": "#[test]\n\nfn timestamp_ordering() {\n\n let _guard = LOCK.run_concurrently();\n\n let ts1 = Timestamp {\n\n time: 0,\n\n increment: 1,\n\n };\n\n let ts2 = Timestamp {\n\n time: 0,\n\n increment: 2,\n\n };\n\n let ts3 = Timestamp {\n\n time: 1,\n\n increment: 0,\n\n };\n\n assert!(ts1 < ts2);\n\n assert!(ts1 < ts3);\n\n assert!(ts2 < ts3);\n\n}\n\n\n", "file_path": "src/tests/modules/bson.rs", "rank": 76, "score": 125573.15322231976 }, { "content": "#[test]\n\nfn arr() {\n\n let _guard = LOCK.run_concurrently();\n\n let obj = Bson::Array(vec![\n\n Bson::Int32(0),\n\n Bson::Int32(1),\n\n Bson::Int32(2),\n\n Bson::Int32(3),\n\n ]);\n\n let arr: Vec<i32> = from_bson(obj.clone()).unwrap();\n\n assert_eq!(arr, vec![0i32, 1i32, 2i32, 3i32]);\n\n\n\n let deser: Bson = to_bson(&arr).unwrap();\n\n assert_eq!(deser, obj);\n\n}\n\n\n", "file_path": "src/tests/modules/ser.rs", "rank": 77, "score": 125525.67201150142 }, { "content": "#[test]\n\nfn boolean() {\n\n let _guard = LOCK.run_concurrently();\n\n let obj = Bson::Boolean(true);\n\n let b: bool = from_bson(obj.clone()).unwrap();\n\n assert!(b);\n\n\n\n let deser: Bson = to_bson(&b).unwrap();\n\n assert_eq!(deser, obj);\n\n}\n\n\n", "file_path": "src/tests/modules/ser.rs", "rank": 78, "score": 125519.1797743224 }, { "content": "#[test]\n\nfn string() {\n\n let _guard = LOCK.run_concurrently();\n\n let obj = Bson::String(\"avocado\".to_owned());\n\n let s: String = from_bson(obj.clone()).unwrap();\n\n assert_eq!(s, \"avocado\");\n\n\n\n let deser: Bson = to_bson(&s).unwrap();\n\n assert_eq!(obj, deser);\n\n}\n\n\n", "file_path": "src/tests/modules/ser.rs", "rank": 79, "score": 125430.4001315284 }, { "content": "#[test]\n\nfn document_default() {\n\n let _guard = LOCK.run_concurrently();\n\n let doc1 = Document::default();\n\n assert_eq!(doc1.keys().count(), 0);\n\n assert_eq!(doc1, Document::new());\n\n}\n\n\n", "file_path": "src/tests/modules/bson.rs", "rank": 80, "score": 124901.04444259244 }, { "content": "#[test]\n\nfn test_de_regex() {\n\n let _guard = LOCK.run_concurrently();\n\n use bson::Regex;\n\n\n\n #[derive(Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n regex: Regex,\n\n }\n\n\n\n let regex = Regex {\n\n pattern: \"12\".into(),\n\n options: \"01\".into(),\n\n };\n\n\n\n let foo: Foo = from_bson(Bson::Document(doc! {\n\n \"regex\": Bson::RegularExpression(regex.clone()),\n\n }))\n\n .unwrap();\n\n\n\n assert_eq!(foo.regex, regex);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 81, "score": 124899.40089391646 }, { "content": "#[test]\n\nfn test_ser_regex() {\n\n let _guard = LOCK.run_concurrently();\n\n use bson::Regex;\n\n\n\n #[derive(Serialize, Deserialize, PartialEq, Debug)]\n\n struct Foo {\n\n regex: Regex,\n\n }\n\n\n\n let regex = Regex {\n\n pattern: \"12\".into(),\n\n options: \"01\".into(),\n\n };\n\n\n\n let foo = Foo {\n\n regex: regex.clone(),\n\n };\n\n\n\n let x = to_bson(&foo).unwrap();\n\n assert_eq!(\n\n x.as_document().unwrap(),\n\n &doc! { \"regex\": Bson::RegularExpression(regex) }\n\n );\n\n\n\n let xfoo: Foo = from_bson(x).unwrap();\n\n assert_eq!(xfoo, foo);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 82, "score": 124899.40089391646 }, { "content": "#[test]\n\nfn test_byte_vec() {\n\n let _guard = LOCK.run_concurrently();\n\n #[derive(Serialize, Debug, Eq, PartialEq)]\n\n pub struct AuthChallenge<'a> {\n\n #[serde(with = \"serde_bytes\")]\n\n pub challenge: &'a [u8],\n\n }\n\n\n\n let x = AuthChallenge {\n\n challenge: b\"18762b98b7c34c25bf9dc3154e4a5ca3\",\n\n };\n\n\n\n let b = to_bson(&x).unwrap();\n\n assert_eq!(\n\n b,\n\n Bson::Document(\n\n doc! { \"challenge\": (Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: x.challenge.to_vec() }))}\n\n )\n\n );\n\n\n\n // let mut buf = Vec::new();\n\n // b.as_document().unwrap().to_writer(&mut buf).unwrap();\n\n\n\n // let xb = Document::from_reader(&mut Cursor::new(buf)).unwrap();\n\n // assert_eq!(b.as_document().unwrap(), &xb);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 83, "score": 124875.39424294567 }, { "content": "#[test]\n\nfn test_timestamp_helpers() {\n\n let _guard = LOCK.run_concurrently();\n\n\n\n #[derive(Deserialize, Serialize)]\n\n struct A {\n\n #[serde(with = \"u32_as_timestamp\")]\n\n pub time: u32,\n\n }\n\n\n\n let time = 12345;\n\n let a = A { time };\n\n let doc = to_document(&a).unwrap();\n\n let timestamp = doc.get_timestamp(\"time\").unwrap();\n\n assert_eq!(timestamp.time, time);\n\n assert_eq!(timestamp.increment, 0);\n\n let a: A = from_document(doc).unwrap();\n\n assert_eq!(a.time, time);\n\n\n\n #[derive(Deserialize, Serialize)]\n\n struct B {\n", "file_path": "src/tests/serde.rs", "rank": 84, "score": 124863.43876044312 }, { "content": "#[test]\n\nfn test_ser_timestamp() {\n\n let _guard = LOCK.run_concurrently();\n\n use bson::Timestamp;\n\n\n\n #[derive(Serialize, Deserialize, Eq, PartialEq, Debug)]\n\n struct Foo {\n\n ts: Timestamp,\n\n }\n\n\n\n let foo = Foo {\n\n ts: Timestamp {\n\n time: 12,\n\n increment: 10,\n\n },\n\n };\n\n\n\n let x = to_bson(&foo).unwrap();\n\n assert_eq!(\n\n x.as_document().unwrap(),\n\n &doc! { \"ts\": Bson::Timestamp(Timestamp { time: 0x0000_000C, increment: 0x0000_000A }) }\n\n );\n\n\n\n let xfoo: Foo = from_bson(x).unwrap();\n\n assert_eq!(xfoo, foo);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 85, "score": 124863.43876044312 }, { "content": "#[test]\n\nfn test_de_timestamp() {\n\n let _guard = LOCK.run_concurrently();\n\n use bson::Timestamp;\n\n\n\n #[derive(Deserialize, Eq, PartialEq, Debug)]\n\n struct Foo {\n\n ts: Timestamp,\n\n }\n\n\n\n let foo: Foo = from_bson(Bson::Document(doc! {\n\n \"ts\": Bson::Timestamp(Timestamp { time: 0x0000_000C, increment: 0x0000_000A }),\n\n }))\n\n .unwrap();\n\n\n\n assert_eq!(\n\n foo.ts,\n\n Timestamp {\n\n time: 12,\n\n increment: 10\n\n }\n\n );\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 86, "score": 124863.43876044312 }, { "content": "#[test]\n\nfn oid_as_hex_string() {\n\n let _guard = LOCK.run_concurrently();\n\n\n\n #[derive(Serialize)]\n\n struct Foo {\n\n #[serde(serialize_with = \"serialize_object_id_as_hex_string\")]\n\n oid: ObjectId,\n\n }\n\n\n\n let oid = ObjectId::new();\n\n let foo = Foo { oid };\n\n let doc = to_document(&foo).unwrap();\n\n assert_eq!(doc.get_str(\"oid\").unwrap(), oid.to_hex());\n\n}\n", "file_path": "src/tests/serde.rs", "rank": 87, "score": 124845.49178601397 }, { "content": "#[test]\n\nfn rawdoc_to_doc() {\n\n let docbytes = to_bytes(&doc! {\n\n \"f64\": 2.5,\n\n \"string\": \"hello\",\n\n \"document\": {},\n\n \"array\": [\"binary\", \"serialized\", \"object\", \"notation\"],\n\n \"binary\": Binary { subtype: BinarySubtype::Generic, bytes: vec![1, 2, 3] },\n\n \"object_id\": ObjectId::from_bytes([1, 2, 3, 4, 5,6,7,8,9,10, 11,12]),\n\n \"boolean\": true,\n\n \"datetime\": DateTime::now(),\n\n \"null\": Bson::Null,\n\n \"regex\": Bson::RegularExpression(Regex { pattern: String::from(r\"end\\s*$\"), options: String::from(\"i\")}),\n\n \"javascript\": Bson::JavaScriptCode(String::from(\"console.log(console);\")),\n\n \"symbol\": Bson::Symbol(String::from(\"artist-formerly-known-as\")),\n\n \"javascript_with_scope\": Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope{ code: String::from(\"console.log(msg);\"), scope: doc!{\"ok\": true}}),\n\n \"int32\": 23i32,\n\n \"timestamp\": Bson::Timestamp(Timestamp { time: 3542578, increment: 0 }),\n\n \"int64\": 46i64,\n\n \"end\": \"END\",\n\n });\n", "file_path": "src/raw/test/mod.rs", "rank": 88, "score": 123551.73637462893 }, { "content": "#[test]\n\nfn object_id() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"object_id\": ObjectId::from_bytes([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),\n\n })\n\n .unwrap();\n\n let oid = rawdoc\n\n .get(\"object_id\")\n\n .expect(\"error finding key object_id\")\n\n .expect(\"no key object_id\")\n\n .as_object_id()\n\n .expect(\"result was not an object id\");\n\n assert_eq!(oid.to_hex(), \"0102030405060708090a0b0c\");\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 89, "score": 123551.73637462893 }, { "content": "#[test]\n\nfn javascript_with_scope() {\n\n let rawdoc = RawDocumentBuf::from_document(&doc! {\n\n \"javascript_with_scope\": Bson::JavaScriptCodeWithScope(JavaScriptCodeWithScope {\n\n code: String::from(\"console.log(msg);\"),\n\n scope: doc! { \"ok\": true }\n\n }),\n\n })\n\n .unwrap();\n\n let js_with_scope = rawdoc\n\n .get(\"javascript_with_scope\")\n\n .expect(\"error finding key javascript_with_scope\")\n\n .expect(\"no key javascript_with_scope\")\n\n .as_javascript_with_scope()\n\n .expect(\"was not javascript with scope\");\n\n assert_eq!(js_with_scope.code(), \"console.log(msg);\");\n\n let (scope_key, scope_value_bson) = js_with_scope\n\n .scope()\n\n .into_iter()\n\n .next()\n\n .expect(\"no next value in scope\")\n\n .expect(\"invalid element\");\n\n assert_eq!(scope_key, \"ok\");\n\n let scope_value = scope_value_bson.as_bool().expect(\"not a boolean\");\n\n assert!(scope_value);\n\n}\n\n\n", "file_path": "src/raw/test/mod.rs", "rank": 90, "score": 123551.73637462893 }, { "content": "#[test]\n\nfn byte_string_oid() {\n\n let _guard = LOCK.run_concurrently();\n\n let s = \"541b1a00e8a23afa832b218e\";\n\n let oid_res = ObjectId::parse_str(s);\n\n assert!(oid_res.is_ok());\n\n let oid = oid_res.unwrap();\n\n let bytes: [u8; 12] = [\n\n 0x54u8, 0x1Bu8, 0x1Au8, 0x00u8, 0xE8u8, 0xA2u8, 0x3Au8, 0xFAu8, 0x83u8, 0x2Bu8, 0x21u8,\n\n 0x8Eu8,\n\n ];\n\n\n\n assert_eq!(bytes, oid.bytes());\n\n assert_eq!(s, oid.to_string());\n\n}\n\n\n", "file_path": "src/tests/modules/oid.rs", "rank": 91, "score": 122193.03718921932 }, { "content": "#[test]\n\nfn test_de_oid_string() {\n\n let _guard = LOCK.run_concurrently();\n\n\n\n #[derive(Debug, Deserialize)]\n\n struct Foo {\n\n pub oid: ObjectId,\n\n }\n\n\n\n let foo: Foo = serde_json::from_str(\"{ \\\"oid\\\": \\\"507f1f77bcf86cd799439011\\\" }\").unwrap();\n\n let oid = ObjectId::parse_str(\"507f1f77bcf86cd799439011\").unwrap();\n\n assert_eq!(foo.oid, oid);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 92, "score": 120890.73235071982 }, { "content": "#[test]\n\nfn test_binary_generic_roundtrip() {\n\n let _guard = LOCK.run_concurrently();\n\n #[derive(Serialize, Deserialize, Debug, PartialEq)]\n\n pub struct Foo {\n\n data: Bson,\n\n }\n\n\n\n let x = Foo {\n\n data: Bson::Binary(Binary {\n\n subtype: BinarySubtype::Generic,\n\n bytes: b\"12345abcde\".to_vec(),\n\n }),\n\n };\n\n\n\n let b = to_bson(&x).unwrap();\n\n assert_eq!(\n\n b.as_document().unwrap(),\n\n &doc! {\"data\": Bson::Binary(Binary { subtype: BinarySubtype::Generic, bytes: b\"12345abcde\".to_vec() })}\n\n );\n\n\n\n let f = from_bson::<Foo>(b).unwrap();\n\n assert_eq!(x, f);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 93, "score": 120844.47438501054 }, { "content": "#[test]\n\nfn cstring_null_bytes_error() {\n\n let _guard = LOCK.run_concurrently();\n\n\n\n let doc = doc! { \"\\0\": \"a\" };\n\n verify_doc(doc);\n\n\n\n let doc = doc! { \"a\": { \"\\0\": \"b\" } };\n\n verify_doc(doc);\n\n\n\n let regex = doc! { \"regex\": Regex { pattern: \"\\0\".into(), options: \"a\".into() } };\n\n verify_doc(regex);\n\n\n\n let regex = doc! { \"regex\": Regex { pattern: \"a\".into(), options: \"\\0\".into() } };\n\n verify_doc(regex);\n\n\n\n fn verify_doc(doc: Document) {\n\n let mut vec = Vec::new();\n\n assert!(matches!(\n\n doc.to_writer(&mut vec).unwrap_err(),\n\n ser::Error::InvalidCString(_)\n\n ));\n\n assert!(matches!(\n\n to_vec(&doc).unwrap_err(),\n\n ser::Error::InvalidCString(_)\n\n ));\n\n }\n\n}\n", "file_path": "src/tests/modules/ser.rs", "rank": 94, "score": 118405.92598838717 }, { "content": "#[test]\n\nfn test_illegal_size() {\n\n let _guard = LOCK.run_concurrently();\n\n let buffer = [\n\n 0x06, 0xcc, 0xf9, 0x0a, 0x05, 0x00, 0x00, 0x03, 0x00, 0xff, 0xff,\n\n ];\n\n assert!(Document::from_reader(&mut Cursor::new(&buffer[..])).is_err());\n\n}\n\n\n", "file_path": "src/tests/modules/serializer_deserializer.rs", "rank": 95, "score": 118235.35188894108 }, { "content": "#[test]\n\nfn test_display_regex_type() {\n\n let x = Regex {\n\n pattern: String::from(\"pattern\"),\n\n options: String::from(\"options\"),\n\n };\n\n let output = \"/pattern/options\";\n\n assert_eq!(format!(\"{}\", x), output);\n\n assert_eq!(format!(\"{}\", Bson::from(x)), output);\n\n}\n\n\n", "file_path": "src/tests/modules/bson.rs", "rank": 96, "score": 117931.68432646662 }, { "content": "#[test]\n\nfn test_display_timestamp_type() {\n\n let x = Timestamp {\n\n time: 100,\n\n increment: 200,\n\n };\n\n let output = \"Timestamp(100, 200)\";\n\n assert_eq!(format!(\"{}\", x), output);\n\n assert_eq!(format!(\"{}\", Bson::from(x)), output);\n\n}\n\n\n", "file_path": "src/tests/modules/bson.rs", "rank": 97, "score": 117898.33767985336 }, { "content": "#[test]\n\nfn test_display_binary_type() {\n\n let encoded_bytes = \"aGVsbG8gd29ybGQ=\";\n\n let bytes = base64::decode(encoded_bytes).unwrap();\n\n let x = Binary {\n\n subtype: BinarySubtype::Generic,\n\n bytes,\n\n };\n\n let output = format!(\"Binary(0x0, {})\", encoded_bytes);\n\n assert_eq!(format!(\"{}\", x), output);\n\n assert_eq!(format!(\"{}\", Bson::from(x)), output);\n\n}\n\n\n", "file_path": "src/tests/modules/bson.rs", "rank": 98, "score": 117837.12014941969 }, { "content": "#[test]\n\nfn test_binary_non_generic_roundtrip() {\n\n let _guard = LOCK.run_concurrently();\n\n #[derive(Serialize, Deserialize, Debug, PartialEq)]\n\n pub struct Foo {\n\n data: Bson,\n\n }\n\n\n\n let x = Foo {\n\n data: Bson::Binary(Binary {\n\n subtype: BinarySubtype::BinaryOld,\n\n bytes: b\"12345abcde\".to_vec(),\n\n }),\n\n };\n\n\n\n let b = to_bson(&x).unwrap();\n\n assert_eq!(\n\n b.as_document().unwrap(),\n\n &doc! {\"data\": Bson::Binary(Binary { subtype: BinarySubtype::BinaryOld, bytes: b\"12345abcde\".to_vec() })}\n\n );\n\n\n\n let f = from_bson::<Foo>(b).unwrap();\n\n assert_eq!(x, f);\n\n}\n\n\n", "file_path": "src/tests/serde.rs", "rank": 99, "score": 117179.02244856826 } ]
Rust
src/shared/crossterm.rs
jojolepro/crossterm
f4d2ab4feb520e687540e8917793f4fb32f0fe34
use super::super::cursor; use super::super::style; use super::super::terminal::terminal; use Context; use std::fmt::Display; use std::mem; use std::rc::Rc; use std::sync::Arc; use std::convert::From; pub struct Crossterm { context: Rc<Context> } impl From<Rc<Context>> for Crossterm { fn from(context: Rc<Context>) -> Self { return Crossterm { context: context } } } impl Crossterm { pub fn new() -> Crossterm { return Crossterm { context: Context::new() }; } pub fn terminal(&self) -> terminal::Terminal { return terminal::Terminal::new(self.context.clone()); } pub fn cursor(&self) -> cursor::TerminalCursor { return cursor::TerminalCursor::new(self.context.clone()) } pub fn color(&self) -> style::TerminalColor { return style::TerminalColor::new(self.context.clone()); } pub fn paint<'a, D: Display>(&'a self, value: D) -> style::StyledObject<D> { self.terminal().paint(value) } pub fn write<D: Display>(&self, value: D) { self.terminal().write(value) } pub fn context(&self) -> Rc<Context> { self.context.clone() } }
use super::super::cursor; use super::super::style; use super::super::terminal::terminal; use Context; use std::fmt::Display; use std::mem; use std::rc::Rc; use std::sync::Arc; use std::convert::From; pub struct Crossterm { context: Rc<Context> } impl From<Rc<Context>> for Crossterm { fn from(context: Rc<Context>) -> Self { return Crossterm { context: context } } } impl Crossterm { pub fn new() -> Crossterm { return Crossterm { context: Context::new() }; } pub fn terminal(&self) -> terminal::Terminal { return terminal::Terminal::new(self.context.clone()); } pub fn cursor(&self) -> cursor::TerminalCursor { return cursor::TerminalCursor::new(self.context.clone()) } pub fn color(&self) -> style::TerminalColor { return style::TerminalColor::new(self.context.clone());
paint<'a, D: Display>(&'a self, value: D) -> style::StyledObject<D> { self.terminal().paint(value) } pub fn write<D: Display>(&self, value: D) { self.terminal().write(value) } pub fn context(&self) -> Rc<Context> { self.context.clone() } }
} pub fn
random
[ { "content": "/// print wait screen on alternate screen, then swich back.\n\npub fn print_wait_screen_on_alternate_window(context: Rc<Context>) {\n\n // create scope. If this scope ends the screen will be switched back to mainscreen.\n\n // because `AlternateScreen` switches back to main screen when switching back.\n\n {\n\n // create new alternate screen instance and switch to the alternate screen.\n\n let mut screen = AlternateScreen::from(context.clone());\n\n\n\n write!(screen, \"test\");\n\n println!();\n\n // Print the wait screen.\n\n print_wait_screen(context.clone());\n\n }\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/alternate_screen.rs", "rank": 0, "score": 193786.54031265096 }, { "content": "/// paint text with colors using `Crossterm` | demonstration.\n\npub fn use_crossterm_paint()\n\n{\n\n let crossterm = Crossterm::new();\n\n crossterm.paint(\"Black on BLUE\").with(Color::Black).on(Color::Blue);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/crossterm_type/mod.rs", "rank": 1, "score": 179534.8036702217 }, { "content": "/// write text to terminal using `Crossterm` | demonstration.\n\npub fn use_crossterm_write()\n\n{\n\n let crossterm = Crossterm::new();\n\n crossterm.write(\"some text \\nsome text on new line\");\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/crossterm_type/mod.rs", "rank": 2, "score": 179534.80367022177 }, { "content": "/// use the `Crossterm` to get an instance to the terminal module | demonstration.\n\npub fn use_crossterm_terminal()\n\n{\n\n let crossterm = Crossterm::new();\n\n let mut terminal = crossterm.terminal();\n\n terminal.clear(ClearType::All);\n\n terminal.set_size(40,40);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/crossterm_type/mod.rs", "rank": 3, "score": 179534.70097085967 }, { "content": "/// use the `Crossterm` to get an instance to the color module | demonstration.\n\npub fn use_crossterm_color()\n\n{\n\n let crossterm = Crossterm::new();\n\n let mut color = crossterm.color();\n\n color.set_bg(Color::Red);\n\n color.set_fg(Color::Green);\n\n}\n\n\n\nuse crossterm::terminal::ClearType;\n\n\n", "file_path": "examples/Crossterm 0.3.0/crossterm_type/mod.rs", "rank": 4, "score": 179534.70097085967 }, { "content": "/// use the `Crossterm` to get an instance to the cursor module | demonstration.\n\npub fn use_crossterm_cursor()\n\n{\n\n let crossterm = Crossterm::new();\n\n let mut cursor = crossterm.cursor();\n\n cursor.goto(5,5).print(\"test\");\n\n}\n\n\n\nuse crossterm::style::Color;\n\n\n", "file_path": "examples/Crossterm 0.3.0/crossterm_type/mod.rs", "rank": 5, "score": 179534.70097085962 }, { "content": "/// Clear all lines from cursor position X:4, Y:7 up | demonstration\n\npub fn clear_until_new_line()\n\n{\n\n let term = Terminal::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor(&term).goto(4,20);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 6, "score": 170100.841729499 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_until_new_line()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,4);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 7, "score": 170100.841729499 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_until_new_line()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,7);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 8, "score": 170100.841729499 }, { "content": "/// Clear all lines from cursor position X:4, Y:7 up | demonstration\n\npub fn clear_until_new_line() {\n\n let context = Context::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor(&context).goto(4, 20);\n\n\n\n // Clear all the cells until next line.\n\n terminal.clear(ClearType::UntilNewLine);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 9, "score": 170100.841729499 }, { "content": "/// Get an Terminal implementation whereon terminal related actions can be performed.\n\n///\n\n/// Check `/examples/version/terminal` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n/// use crossterm::terminal;\n\n/// use crossterm::Context;\n\n///\n\n/// let context = Context::new();\n\n///\n\n/// let mut term = terminal::terminal(&context);\n\n///\n\n/// // scroll down by 5 lines\n\n/// let size = term.scroll_down(5);\n\n///\n\n/// ```\n\n///\n\npub fn terminal(context: &Rc<Context>) -> Box<Terminal> {\n\n Box::from(Terminal::new(context.clone()))\n\n}\n", "file_path": "src/terminal/terminal.rs", "rank": 10, "score": 169013.39993971726 }, { "content": "/// Get an TerminalCursor implementation whereon cursor related actions can be performed.\n\n///\n\n/// Check `/examples/version/cursor` in the libary for more spesific examples.\n\n///\n\n/// #Example\n\n///\n\n/// ```rust\n\n///\n\n/// extern crate crossterm;\n\n/// use self::crossterm::Context;\n\n/// use self::crossterm::cursor;\n\n///\n\n/// let context = Context::new();\n\n///\n\n/// // Get cursor and goto pos X: 5, Y: 10\n\n/// let mut cursor = cursor::cursor(&context);\n\n/// cursor.goto(5,10);\n\n///\n\n/// cursor.show();\n\n/// cursor.hide();\n\n/// cursor.blink();\n\n/// cursor.move_left(2);\n\n///\n\n/// //Or you can do it in one line.\n\n/// cursor::cursor(&context).goto(5,10);\n\n///\n\n/// ```\n\npub fn cursor(context: &Rc<Context>) -> Box<TerminalCursor> {\n\n Box::from(TerminalCursor::new(context.clone()))\n\n}", "file_path": "src/cursor/cursor.rs", "rank": 11, "score": 166953.56663885867 }, { "content": "/// Get an Color implementation whereon color related actions can be performed.\n\n///\n\n/// Check `/examples/version/color` in the library for more specific examples.\n\n///\n\npub fn color(context: &Rc<Context>) -> Box<TerminalColor> {\n\n Box::from(TerminalColor::new(context.clone()))\n\n}\n", "file_path": "src/style/color/color.rs", "rank": 12, "score": 164957.33622092637 }, { "content": "/// Get the current cursor position.\n\npub fn pos(context: Rc<Context>) -> (u16, u16) {\n\n use std::io::{Read, Write};\n\n\n\n let mut command_id = NoncanonicalModeCommand::new(&context.state_manager);\n\n\n\n CommandManager::execute(context.clone(), command_id);\n\n\n\n // This code is original written by term_cursor credits to them.\n\n use std::io;\n\n let mut std = io::stdout();\n\n // Write command\n\n std.write(b\"\\x1B[6n\");\n\n std.flush();\n\n\n\n // Read back result\n\n let mut buf = [0u8; 2];\n\n // Expect `ESC[`\n\n io::stdin().read_exact(&mut buf);\n\n if buf[0] != 0x1B || buf[1] as char != '[' {\n\n return (0, 0);\n", "file_path": "src/kernel/unix_kernel/terminal.rs", "rank": 13, "score": 164957.33622092637 }, { "content": "/// Get the cursor position based on the current platform.\n\npub fn get_cursor_position(context: Rc<Context>) -> (u16, u16) {\n\n #[cfg(unix)]\n\n return pos(context.clone());\n\n\n\n #[cfg(windows)]\n\n return pos(&context.screen_manager);\n\n}\n\n\n", "file_path": "src/shared/functions.rs", "rank": 14, "score": 164957.33622092637 }, { "content": "pub fn clear_entire_screen(csbi: CONSOLE_SCREEN_BUFFER_INFO, context: &Rc<Context>) {\n\n // position x at start\n\n let x = 0;\n\n // position y at start\n\n let y = 0;\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n\n\n let cells_to_write = csbi.dwSize.X as u32 * csbi.dwSize.Y as u32;\n\n\n\n clear(start_location, cells_to_write, &context.screen_manager);\n\n\n\n // put the cursor back at (0, 0)\n\n cursor(&context).goto(0, 0);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 15, "score": 157775.24047435226 }, { "content": "fn print_wait_screen(context: Rc<Context>) {\n\n let mut terminal = terminal::terminal(&context);\n\n terminal.clear(ClearType::All);\n\n\n\n let mut cursor = cursor(&context);\n\n cursor.goto(0, 0);\n\n cursor.hide();\n\n\n\n terminal.write(\n\n \"Welcome to the wait screen.\\n\\\n\n Please wait a few seconds until we arrive back at the main screen.\\n\\\n\n Progress: \",\n\n );\n\n\n\n // print some progress example.\n\n for i in 1..5 {\n\n // print the current counter at the line of `Seconds to Go: {counter}`\n\n cursor\n\n .goto(10, 2)\n\n .print(terminal.paint(format!(\"{} of the 5 items processed\", i)).with(Color::Red).on(Color::Blue));\n\n\n\n // 1 second delay\n\n thread::sleep(time::Duration::from_secs(1));\n\n }\n\n\n\n stdout().flush();\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/alternate_screen.rs", "rank": 16, "score": 154654.17910188844 }, { "content": "// raw screen is not working correctly currently\n\nfn print_wait_screen(context: Rc<Context>) {\n\n terminal::terminal(&context).clear(ClearType::All);\n\n\n\n let mut cursor = cursor(&context);\n\n cursor.goto(0, 0).print(\"Welcome to the wait screen.\");\n\n cursor\n\n .goto(0, 1)\n\n .print(\"Please wait a few seconds until we arrive back at the main screen.\");\n\n cursor.goto(0, 2).print(\"Progress: \");\n\n\n\n // print some progress example.\n\n for i in 1..5 {\n\n // print the current counter at the line of `Seconds to Go: {counter}`\n\n cursor\n\n .goto(10, 2)\n\n .print(format!(\"{} of the 5 items processed\", i));\n\n\n\n // 1 second delay\n\n thread::sleep(time::Duration::from_secs(1));\n\n }\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/raw_mode.rs", "rank": 17, "score": 154654.17910188844 }, { "content": "pub fn clear_until_line(pos: (u16, u16), csbi: CONSOLE_SCREEN_BUFFER_INFO, context: &Rc<Context>) {\n\n let (x, y) = pos;\n\n\n\n // location where to start clearing\n\n let start_location = COORD {\n\n X: x as i16,\n\n Y: y as i16,\n\n };\n\n // get sum cells before cursor\n\n let cells_to_write = (csbi.dwSize.X - x as i16) as u32;\n\n\n\n clear(start_location, cells_to_write, &context.screen_manager);\n\n\n\n // put the cursor back at original cursor position\n\n cursor(&context).goto(x, y);\n\n}\n\n\n", "file_path": "src/terminal/winapi_terminal.rs", "rank": 18, "score": 148312.54421712674 }, { "content": "/// Switch to alternate screen using the `Context` of `Crossterm` | demonstration.\n\npub fn create_alternate_screen_from_crossterm()\n\n{\n\n use crossterm::screen::*;\n\n use std::convert::From;\n\n\n\n let crossterm = Crossterm::new();\n\n\n\n {\n\n // move into alternate screen\n\n let alternate_screen = AlternateScreen::from(crossterm.context());\n\n\n\n // this will move the cursor and print `some text` on the alternate screen.\n\n crossterm.cursor().goto(10, 10).print(\"Some text\");\n\n } // <- alternate screen ends here an will be switched back to main screen.\n\n\n\n // print \"Some other text\" on the mainscreen at x: 0, y: 10\n\n crossterm.cursor().goto(0,10).print(\"Some other text\");\n\n}", "file_path": "examples/Crossterm 0.3.0/crossterm_type/mod.rs", "rank": 19, "score": 143152.625381494 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up() {\n\n let context = Context::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 20, "score": 139874.44869152125 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_up()\n\n{\n\n let term = Terminal::new();\n\n\n\n print_test_data();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 21, "score": 139874.44869152122 }, { "content": "// scroll down 10 lines\n\npub fn scroll_up()\n\n{\n\n print_test_data();\n\n \n\n // Get terminal \n\n let mut terminal = get();\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 22, "score": 139874.44869152125 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 23, "score": 139874.44869152122 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_down() {\n\n let context = Context::new();\n\n\n\n print_test_data();\n\n\n\n\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 24, "score": 139874.44869152122 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/cursor/mod.rs", "rank": 25, "score": 139874.44869152122 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto() {\n\n let context = Context::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10, 5);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 26, "score": 139874.44869152125 }, { "content": "/// Print character at X: 10 Y: 5 | demonstration.\n\npub fn print()\n\n{\n\n // To print an some displayable content on an certain position. \n\n \n\n // Get the cursor\n\n let mut cursor = get();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5);\n\n // Print the @ symbol at position X: 10, Y: 5 in the terminal\n\n print!(\"@\");\n\n // Rust is line buffered inorder to print at an certain position we need to clear the buffer first. \n\n use std;\n\n use std::io::Write;\n\n std::io::stdout().flush();\n\n \n\n /* Because the above method is a little to mutch code, \n\n you can use the `print()` method for printing an value at an certain position in the terminal.\n\n \n\n Crossterm provides method chaining so that the above points can be inlined.\n\n */\n", "file_path": "examples/Crossterm 0.1.0/cursor/mod.rs", "rank": 27, "score": 139874.44869152122 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 28, "score": 139874.44869152122 }, { "content": "// scroll down 10 lines\n\npub fn scroll_down()\n\n{\n\n print_test_data();\n\n // Get terminal \n\n let mut terminal = get();\n\n // Scroll down 10 lines.\n\n let terminal_size = terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 29, "score": 139874.44869152125 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5); \n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 30, "score": 139874.44869152125 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_up() {\n\n let context = Context::new();\n\n\n\n print_test_data();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n // Scroll up 10 lines.\n\n terminal.scroll_up(5);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 31, "score": 139874.44869152122 }, { "content": "/// exit the current proccess.\n\npub fn exit() {\n\n let context = Context::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n // Get terminal size\n\n terminal.exit();\n\n}\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 32, "score": 139874.44869152122 }, { "content": "/// get the cursor position\n\npub fn pos() {\n\n let context = Context::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // get the cursor position.\n\n let (x, y) = cursor.pos();\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 33, "score": 139874.44869152125 }, { "content": "/// Scroll down 10 lines | demonstration.\n\npub fn scroll_down()\n\n{\n\n let term = Terminal::new();\n\n\n\n print_test_data();\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 34, "score": 139874.44869152122 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5); \n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/cursor/mod.rs", "rank": 35, "score": 139874.44869152125 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down()\n\n{\n\n let context = Terminal::new();\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 36, "score": 139874.44869152122 }, { "content": "/// Print character at X: 10 Y: 5 | demonstration.\n\npub fn print()\n\n{\n\n // To print an some displayable content on an certain position. \n\n \n\n // Get the cursor\n\n let mut cursor = get();\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5);\n\n // Print the @ symbol at position X: 10, Y: 5 in the terminal\n\n print!(\"@\");\n\n // Rust is line buffered inorder to print at an certain position we need to clear the buffer first. \n\n use std;\n\n use std::io::Write;\n\n std::io::stdout().flush();\n\n \n\n /* Because the above method is a little to mutch code, \n\n you can use the `print()` method for printing an value at an certain position in the terminal.\n\n \n\n Crossterm provides method chaining so that the above points can be inlined.\n\n */\n\n \n\n get().goto(10,5).print(\"@\"); \n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 37, "score": 139874.44869152122 }, { "content": "/// Print character at X: 10 Y: 5 | demonstration.\n\npub fn print() {\n\n let context = Context::new();\n\n\n\n // To print an some displayable content on an certain position.\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10, 5);\n\n // Print the @ symbol at position X: 10, Y: 5 in the terminal\n\n print!(\"@\");\n\n // Rust is line buffered inorder to print at an certain position we need to clear the buffer first.\n\n use std;\n\n use std::io::Write;\n\n std::io::stdout().flush();\n\n\n\n /* Because the above method is a little to much code,\n\n you can use the `print()` method for printing an value at an certain position in the terminal.\n\n \n\n Crossterm provides method chaining so that the above points can be inlined.\n\n */\n\n\n\n cursor.goto(10, 5).print(\"@\");\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 38, "score": 139874.44869152125 }, { "content": "/// Print character at X: 10 Y: 5 | demonstration.\n\npub fn print()\n\n{\n\n let context = Terminal::new();\n\n // To print an some displayable content on an certain position. \n\n \n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5);\n\n // Print the @ symbol at position X: 10, Y: 5 in the terminal\n\n print!(\"@\");\n\n // Rust is line buffered inorder to print at an certain position we need to clear the buffer first. \n\n use std;\n\n use std::io::Write;\n\n std::io::stdout().flush();\n\n \n\n /* Because the above method is a little to much code,\n\n you can use the `print()` method for printing an value at an certain position in the terminal.\n\n \n\n Crossterm provides method chaining so that the above points can be inlined.\n\n */\n\n\n\n cursor.goto(10,5).print(\"@\");\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 39, "score": 139874.44869152122 }, { "content": "/// Set the cursor to position X: 10, Y: 5 in the terminal.\n\npub fn goto()\n\n{\n\n let context = Terminal::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Set the cursor to position X: 10, Y: 5 in the terminal\n\n cursor.goto(10,5); \n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 40, "score": 139874.44869152125 }, { "content": "/// Move the cursor 3 up | demonstration.\n\npub fn move_up()\n\n{\n\n let context = Terminal::new();\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the up in the terminal\n\n cursor.move_up(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 41, "score": 139874.44869152122 }, { "content": "// scroll down 10 lines\n\npub fn scroll_up()\n\n{\n\n print_test_data();\n\n \n\n // Get terminal \n\n let mut terminal = get();\n\n // Scroll up 10 lines.\n\n let terminal_size = terminal.scroll_up(10);\n\n}\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 42, "score": 139874.44869152125 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down() {\n\n let context = Context::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 43, "score": 139874.44869152125 }, { "content": "/// Move the cursor 3 down | demonstration.\n\npub fn move_down()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the down in the terminal\n\n cursor.move_down(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/cursor/mod.rs", "rank": 44, "score": 139874.44869152122 }, { "content": "// scroll down 10 lines\n\npub fn scroll_down()\n\n{\n\n print_test_data();\n\n // Get terminal \n\n let mut terminal = get();\n\n // Scroll down 10 lines.\n\n terminal.scroll_down(10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 45, "score": 139874.44869152125 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 46, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up()\n\n{\n\n let term = Terminal::new();\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor(&term).goto(4,4);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 47, "score": 137296.35814801208 }, { "content": "/// Resize the terminal to X: 10, Y: 10 | demonstration.\n\npub fn resize_terminal() {\n\n let context = Context::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n\n\n // Get terminal size\n\n terminal.set_size(10, 10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 48, "score": 137296.35814801208 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines() {\n\n let context = Context::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n\n\n print_test_data();\n\n\n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 49, "score": 137296.35814801208 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font\");\n\n // Call the method `with()` on the object given by `paint()` and pass in any Color from the Color enum.\n\n styledobject = styledobject.with(Color::Red);\n\n // Print the object to the console and see the result. \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font\").with(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 50, "score": 137296.35814801208 }, { "content": "// Resize the terminal to X: 10, Y: 10\n\npub fn resize_terminal()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n // Get terminal size\n\n terminal.set_size(1,1);\n\n}\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 51, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/cursor/mod.rs", "rank": 52, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right()\n\n{\n\n let context = Terminal::new();\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 53, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 54, "score": 137296.35814801208 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background() {\n\n let context = Context::new();\n\n let terminal = terminal::terminal(&context);\n\n\n\n // Pass an string to the `paint()` method with you want to paint.\n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = terminal.paint(\"Red background color\");\n\n // Call the method `on()` on the object given by `paint()` and pass in an Color from the Color enum.\n\n styledobject = styledobject.on(Color::Red);\n\n // Print the object to the console and check see the result\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", terminal.paint(\"Red background color\").on(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/color/mod.rs", "rank": 55, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 56, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down()\n\n{\n\n let term = Terminal::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor(&term).goto(4,8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 57, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 58, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down() {\n\n let context = Context::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor(&context).goto(4, 8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 59, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 down | demonstration\n\npub fn clear_from_cursor_down()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,8);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorDown);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 60, "score": 137296.35814801208 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red background color\");\n\n // Call the method `on()` on the object given by `paint()` and pass in an Color from the Color enum.\n\n styledobject = styledobject.on(Color::Red);\n\n // Print the object to the console and check see the result \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red background color\").on(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 61, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the left | demonstration.\n\npub fn move_left() {\n\n let context = Context::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the left in the terminal\n\n cursor.move_left(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 62, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the left | demonstration.\n\npub fn move_left()\n\n{\n\n let context = Terminal::new();\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the left in the terminal\n\n cursor.move_left(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 63, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the left | demonstration.\n\npub fn move_left()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the left in the terminal\n\n cursor.move_left(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 64, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the left | demonstration.\n\npub fn move_left()\n\n{\n\n // Get the cursor\n\n let mut cursor = get();\n\n // Move the cursor to position 3 times to the left in the terminal\n\n cursor.move_left(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/cursor/mod.rs", "rank": 65, "score": 137296.35814801208 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n \n\n print_test_data();\n\n \n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 66, "score": 137296.35814801208 }, { "content": "/// Hide cursor display | demonstration.\n\npub fn hide_cursor() {\n\n let context = Context::new();\n\n\n\n let cursor = cursor(&context);\n\n cursor.hide();\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 67, "score": 137296.35814801208 }, { "content": "/// Resize the terminal to X: 10, Y: 10 | demonstration.\n\npub fn resize_terminal()\n\n{\n\n let term = Terminal::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n // Get terminal size\n\n terminal.set_size(10,10);\n\n}\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 68, "score": 137296.35814801208 }, { "content": "/// Move the cursor 3 to the right | demonstration.\n\npub fn move_right() {\n\n let context = Context::new();\n\n\n\n // Get the cursor\n\n let mut cursor = cursor(&context);\n\n // Move the cursor to position 3 times to the right in the terminal\n\n cursor.move_right(3);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 69, "score": 137296.35814801208 }, { "content": "/// Show cursor display | demonstration.\n\npub fn show_cursor() {\n\n let context = Context::new();\n\n\n\n let cursor = cursor(&context);\n\n cursor.show();\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 70, "score": 137296.35814801208 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground() {\n\n let context = Context::new();\n\n let terminal = terminal::terminal(&context);\n\n\n\n // Pass an string to the `paint()` method with you want to paint.\n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = terminal.paint(\"Red font\");\n\n // Call the method `with()` on the object given by `paint()` and pass in any Color from the Color enum.\n\n styledobject = styledobject.with(Color::Red);\n\n // Print the object to the console and see the result.\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", terminal.paint(\"Red font\").with(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/color/mod.rs", "rank": 71, "score": 137296.35814801208 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font\");\n\n // Call the method `with()` on the object given by `paint()` and pass in any Color from the Color enum.\n\n styledobject = styledobject.with(Color::Red);\n\n // Print the object to the console and see the result. \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font\").with(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/color/mod.rs", "rank": 72, "score": 137296.35814801208 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red background color\");\n\n // Call the method `on()` on the object given by `paint()` and pass in an Color from the Color enum.\n\n styledobject = styledobject.on(Color::Red);\n\n // Print the object to the console and check see the result \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red background color\").on(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 73, "score": 137296.35814801208 }, { "content": "/// print some font on red background | demonstration.\n\npub fn paint_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red background color\");\n\n // Call the method `on()` on the object given by `paint()` and pass in an Color from the Color enum.\n\n styledobject = styledobject.on(Color::Blue);\n\n // Print the object to the console and check see the result \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red background color\").on(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/color/mod.rs", "rank": 74, "score": 137296.35814801208 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_from_cursor_up() {\n\n let context = Context::new();\n\n\n\n // Get terminal\n\n let mut terminal = terminal(&context);\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n cursor::cursor(&context).goto(4, 4);\n\n\n\n // Clear all cells from current cursor position down.\n\n terminal.clear(ClearType::FromCursorUp);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.3.0/terminal/terminal.rs", "rank": 75, "score": 137296.35814801208 }, { "content": "/// Show cursor display, only works on certain terminals.| demonstration\n\npub fn blink_cursor() {\n\n let context = Context::new();\n\n\n\n let cursor = cursor(&context);\n\n cursor.blink(false);\n\n cursor.blink(false);\n\n}\n", "file_path": "examples/Crossterm 0.3.0/cursor/mod.rs", "rank": 76, "score": 137296.35814801208 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines()\n\n{\n\n let term = Terminal::new();\n\n // Get terminal\n\n let mut terminal = terminal(&term);\n\n\n\n print_test_data();\n\n\n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/terminal/terminal.rs", "rank": 77, "score": 137296.35814801208 }, { "content": "/// Clear all lines in terminal | demonstration\n\npub fn clear_all_lines()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n \n\n print_test_data();\n\n \n\n // Clear all lines in terminal;\n\n terminal.clear(ClearType::All);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 78, "score": 137296.35814801208 }, { "content": "/// print some red font | demonstration.\n\npub fn paint_foreground()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font\");\n\n // Call the method `with()` on the object given by `paint()` and pass in any Color from the Color enum.\n\n styledobject = styledobject.with(Color::Red);\n\n // Print the object to the console and see the result. \n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font\").with(Color::Red));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 79, "score": 137296.35814801208 }, { "content": "/// Set the terminal size to width 10, height: 10.\n\npub fn set_terminal_size()\n\n{\n\n let mut terminal = get();\n\n\n\n terminal.set_size(10,10);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 80, "score": 134845.79890142137 }, { "content": "#[cfg(unix)]\n\npub fn print_font_with_attributes()\n\n{\n\n println!(\"{}\", paint(\"Normal text\"));\n\n println!(\"{}\", paint(\"Bold text\").bold());\n\n println!(\"{}\", paint(\"Italic text\").italic());\n\n println!(\"{}\", paint(\"Slow blinking text\").slow_blink());\n\n println!(\"{}\", paint(\"Rapid blinking text\").rapid_blink());\n\n println!(\"{}\", paint(\"Hidden text\").hidden());\n\n println!(\"{}\", paint(\"Underlined text\").underlined());\n\n println!(\"{}\", paint(\"Reversed color\").reverse());\n\n println!(\"{}\", paint(\"Dim text color\").dim());\n\n println!(\"{}\", paint(\"Crossed out font\").crossed_out());\n\n}\n\n\n\n/// Print all supported rgb colors | demonstration.\n", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 81, "score": 134845.79890142137 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_current_line()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,4);\n\n\n\n // Clear current line cells.\n\n terminal.clear(ClearType::CurrentLine);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 82, "score": 134845.79890142137 }, { "content": "/// print font with fore- background color | demonstration.\n\npub fn paint_foreground_and_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font on blue background color\");\n\n /* Foreground color: \n\n Call the method `with()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.with(Color::Red);\n\n /* Background color: \n\n Call the method `on()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.on(Color::Blue);\n\n // Print the object to the console and see the result.\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font on blue background color\").with(Color::Red).on(Color::Blue));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/color/mod.rs", "rank": 83, "score": 134845.79890142137 }, { "content": "pub fn print_terminal_size()\n\n{\n\n // Get terminal \n\n let mut terminal = get();\n\n // Get terminal size\n\n let terminal_size = terminal.terminal_size().unwrap();\n\n // Print results\n\n print!(\"X: {}, y: {}\", terminal_size.0, terminal_size.1);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/terminal/mod.rs", "rank": 84, "score": 134845.79890142137 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_foreground_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\"■\").with(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\"■\").with(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\"■\").with(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\"■\").with(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\"■\").with(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\"■\").with(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\"■\").with(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\"■\").with(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\"■\").with(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\"■\").with(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\"■\").with(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\"■\").with(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\"■\").with(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\"■\").with(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\"■\").with(Color::White));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 85, "score": 134845.79890142137 }, { "content": "#[cfg(unix)]\n\npub fn print_font_with_attributes()\n\n{\n\n println!(\"{}\", paint(\"Normal text\"));\n\n println!(\"{}\", paint(\"Bold text\").bold());\n\n println!(\"{}\", paint(\"Italic text\").italic());\n\n println!(\"{}\", paint(\"Slow blinking text\").slow_blink());\n\n println!(\"{}\", paint(\"Rapid blinking text\").rapid_blink());\n\n println!(\"{}\", paint(\"Hidden text\").hidden());\n\n println!(\"{}\", paint(\"Underlined text\").underlined());\n\n println!(\"{}\", paint(\"Reversed color\").reverse());\n\n println!(\"{}\", paint(\"Dim text color\").dim());\n\n println!(\"{}\", paint(\"Crossed out font\").crossed_out());\n\n}\n\n\n\n/// Print all supported rgb colors \n", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 86, "score": 134845.79890142137 }, { "content": "#[cfg(unix)]#[cfg(unix)]\n\npub fn print_supported_colors()\n\n{ \n\n let count = crossterm::crossterm_style::get().get_available_color_count().unwrap();\n\n\n\n for i in 0..count\n\n {\n\n println!(\"{}\", paint(format!(\"Color: {}\",i)).with(Color::AnsiValue(i as u8)));\n\n\n\n }\n\n}", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 87, "score": 134845.79890142137 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_foreground_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\"■\").with(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\"■\").with(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\"■\").with(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\"■\").with(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\"■\").with(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\"■\").with(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\"■\").with(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\"■\").with(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\"■\").with(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\"■\").with(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\"■\").with(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\"■\").with(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\"■\").with(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\"■\").with(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\"■\").with(Color::White));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.1.0/color/mod.rs", "rank": 88, "score": 134845.79890142137 }, { "content": "#[cfg(unix)]\n\npub fn print_supported_colors()\n\n{ \n\n let count = crossterm::style::color().get_available_color_count().unwrap();\n\n\n\n for i in 0..count\n\n {\n\n println!(\"{}\", paint(format!(\"Color: {}\",i)).with(Color::AnsiValue(i as u8)));\n\n }\n\n}", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 89, "score": 134845.79890142137 }, { "content": "/// Clear all lines from cursor position X:4, Y:4 up | demonstration\n\npub fn clear_current_line()\n\n{\n\n // Get terminal\n\n let mut terminal = get();\n\n\n\n print_test_data();\n\n\n\n // Set terminal cursor position (see example for more info).\n\n crossterm_cursor::get().goto(4,4);\n\n\n\n // Clear current line cells.\n\n terminal.clear(ClearType::CurrentLine);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 90, "score": 134845.79890142137 }, { "content": "/// Save and reset cursor position.\n\npub fn safe_and_reset_position()\n\n{\n\n let mut cursor = get();\n\n \n\n // Goto X: 5 Y: 5\n\n cursor.goto(5,5);\n\n // Safe cursor position: X: 5 Y: 5\n\n cursor.safe_position();\n\n // Goto X: 5 Y: 20\n\n cursor.goto(5,20);\n\n // Print at X: 5 Y: 20.\n\n print!(\"Yea!\");\n\n // Reset back to X: 5 Y: 5.\n\n cursor.reset_position();\n\n // Print Back at X: 5 Y: 5.\n\n print!(\"Back\");\n\n\n\n println!()\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/cursor/mod.rs", "rank": 91, "score": 134845.79890142137 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_background_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\" \").on(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\" \").on(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\" \").on(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\" \").on(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\" \").on(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\" \").on(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\" \").on(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\" \").on(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\" \").on(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\" \").on(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\" \").on(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\" \").on(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\" \").on(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\" \").on(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\" \").on(Color::White));\n\n}", "file_path": "examples/Crossterm 0.1.0/color/mod.rs", "rank": 92, "score": 134845.79890142137 }, { "content": "/// print font with fore- background color | demonstration.\n\npub fn paint_foreground_and_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font on blue background color\");\n\n /* Foreground color: \n\n Call the method `with()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.with(Color::Red);\n\n /* Background color: \n\n Call the method `on()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.on(Color::Blue);\n\n // Print the object to the console and see the result.\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font on blue background color\").with(Color::Red).on(Color::Blue));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 93, "score": 134845.79890142137 }, { "content": "/// print font with fore- background color | demonstration.\n\npub fn paint_foreground_and_background()\n\n{ \n\n // Pass an string to the `paint()` method with you want to paint. \n\n // This will give you an object back wits can be styled and displayed.\n\n let mut styledobject = paint(\"Red font on blue background color\");\n\n /* Foreground color: \n\n Call the method `with()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.with(Color::Red);\n\n /* Background color: \n\n Call the method `on()` on the object given by `paint()`\n\n Pass in an Color from the Color enum.\n\n */\n\n styledobject = styledobject.on(Color::Blue);\n\n // Print the object to the console and see the result.\n\n println!(\"{}\", styledobject);\n\n\n\n // Crossterm provides method chaining so that the above points can be inlined.\n\n println!(\"{}\", paint(\"Red font on blue background color\").with(Color::Red).on(Color::Blue));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 94, "score": 134845.79890142137 }, { "content": "pub fn print_terminal_size()\n\n{\n\n // Get terminal \n\n let mut terminal = get();\n\n // Get terminal size\n\n let terminal_size = terminal.terminal_size().unwrap();\n\n // Print results\n\n print!(\"X: {}, y: {}\", terminal_size.0, terminal_size.1);\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.0/terminal/mod.rs", "rank": 95, "score": 134845.79890142137 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_background_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\" \").on(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\" \").on(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\" \").on(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\" \").on(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\" \").on(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\" \").on(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\" \").on(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\" \").on(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\" \").on(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\" \").on(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\" \").on(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\" \").on(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\" \").on(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\" \").on(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\" \").on(Color::White));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::Rgb {r: 10, g: 10, b: 10}));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::AnsiValue(50)));\n\n}\n\n\n\n/// Print font with all available attributes. Note that this can only be used at unix systems and that some are not supported widely | demonstration..\n", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 96, "score": 134845.79890142137 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_background_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\" \").on(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\" \").on(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\" \").on(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\" \").on(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\" \").on(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\" \").on(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\" \").on(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\" \").on(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\" \").on(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\" \").on(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\" \").on(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\" \").on(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\" \").on(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\" \").on(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\" \").on(Color::White));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::Rgb {r: 10, g: 10, b: 10}));\n\n #[cfg(unix)]\n\n println!(\"RGB (10,10,10): \\t {}\", paint(\" \").on(Color::AnsiValue(50)));\n\n}\n\n\n\n/// Print font with all available attributes. Note that this can only be used at unix systems and that some are not supported widely.\n", "file_path": "examples/Crossterm 0.2.0/color/mod.rs", "rank": 97, "score": 134845.79890142137 }, { "content": "/// Print all available foreground colors | demonstration.\n\npub fn print_all_foreground_colors()\n\n{\n\n println!(\"Black : \\t {}\", paint(\"■\").with(Color::Black));\n\n println!(\"Red : \\t\\t {}\", paint(\"■\").with(Color::Red));\n\n println!(\"Dark Red: \\t {}\", paint(\"■\").with(Color::DarkRed));\n\n println!(\"Green : \\t {}\", paint(\"■\").with(Color::Green));\n\n println!(\"Dark Green : \\t {}\", paint(\"■\").with(Color::DarkGreen));\n\n println!(\"Yellow : \\t {}\", paint(\"■\").with(Color::Yellow));\n\n println!(\"Dark Yellow : \\t {}\", paint(\"■\").with(Color::DarkYellow));\n\n println!(\"Blue : \\t\\t {}\", paint(\"■\").with(Color::Blue));\n\n println!(\"Dark Blue : \\t {}\", paint(\"■\").with(Color::DarkBlue));\n\n println!(\"Magenta : \\t {}\", paint(\"■\").with(Color::Magenta));\n\n println!(\"Dark Magenta : \\t {}\", paint(\"■\").with(Color::DarkMagenta));\n\n println!(\"Cyan : \\t\\t {}\", paint(\"■\").with(Color::Cyan));\n\n println!(\"Dark Cyan : \\t {}\", paint(\"■\").with(Color::DarkCyan));\n\n println!(\"Grey : \\t\\t {}\", paint(\"■\").with(Color::Grey));\n\n println!(\"White : \\t {}\", paint(\"■\").with(Color::White));\n\n}\n\n\n", "file_path": "examples/Crossterm 0.2.1/color/mod.rs", "rank": 98, "score": 134845.79890142137 }, { "content": "/// Save and reset cursor position | demonstration..\n\npub fn safe_and_reset_position()\n\n{\n\n let context = Terminal::new();\n\n let mut cursor = cursor(&context);\n\n \n\n // Goto X: 5 Y: 5\n\n cursor.goto(5,5);\n\n // Safe cursor position: X: 5 Y: 5\n\n cursor.save_position();\n\n // Goto X: 5 Y: 20\n\n cursor.goto(5,20);\n\n // Print at X: 5 Y: 20.\n\n println!(\"Yea!\");\n\n // Reset back to X: 5 Y: 5.\n\n cursor.reset_position();\n\n // Print Back at X: 5 Y: 5.\n\n println!(\"Back\");\n\n\n\n println!()\n\n}\n", "file_path": "examples/Crossterm 0.2.1/cursor/mod.rs", "rank": 99, "score": 134845.79890142137 } ]
Rust
nrf-softdevice/src/flash.rs
timokroeger/nrf-softdevice
6a01c4ceb7d1f1d9fc06e74e9a264037ac1159c5
use core::future::Future; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; use embassy::traits::flash::Error as FlashError; use crate::raw; use crate::util::{DropBomb, Signal}; use crate::{RawError, Softdevice}; pub struct Flash { _private: PhantomData<*mut ()>, } static FLASH_TAKEN: AtomicBool = AtomicBool::new(false); impl Flash { const PAGE_SIZE: usize = 4096; pub fn take(_sd: &Softdevice) -> Flash { if FLASH_TAKEN .compare_exchange(false, true, Ordering::AcqRel, Ordering::Acquire) .is_err() { panic!("nrf_softdevice::Softdevice::take_flash() called multiple times.") } Flash { _private: PhantomData, } } } static SIGNAL: Signal<Result<(), FlashError>> = Signal::new(); pub(crate) fn on_flash_success() { SIGNAL.signal(Ok(())) } pub(crate) fn on_flash_error() { SIGNAL.signal(Err(FlashError::Failed)) } impl embassy::traits::flash::Flash for Flash { type ReadFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; type WriteFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; type ErasePageFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn read<'a>(&'a mut self, address: usize, data: &'a mut [u8]) -> Self::ReadFuture<'a> { async move { data.copy_from_slice(unsafe { core::slice::from_raw_parts(address as *const u8, data.len()) }); Ok(()) } } fn write<'a>(&'a mut self, address: usize, data: &'a [u8]) -> Self::WriteFuture<'a> { async move { let data_ptr = data.as_ptr(); let data_len = data.len() as u32; if address % 4 != 0 { return Err(FlashError::AddressMisaligned); } if (data_ptr as u32) % 4 != 0 || data_len % 4 != 0 { return Err(FlashError::BufferMisaligned); } let words_ptr = data_ptr as *const u32; let words_len = data_len / 4; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_write(address as _, words_ptr, words_len) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_write err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } fn erase<'a>(&'a mut self, address: usize) -> Self::ErasePageFuture<'a> { async move { if address % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } let page_number = address / Self::PAGE_SIZE; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_page_erase(page_number as u32) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } fn size(&self) -> usize { 256 * 4096 } fn read_size(&self) -> usize { 1 } fn write_size(&self) -> usize { 4 } fn erase_size(&self) -> usize { 4096 } }
use core::future::Future; use core::marker::PhantomData; use core::sync::atomic::{AtomicBool, Ordering}; use embassy::traits::flash::Error as FlashError; use crate::raw; use crate::util::{DropBomb, Signal}; use crate::{RawError, Softdevice}; pub struct Flash { _private: PhantomData<*mut ()>, } static FLASH_TAKEN: AtomicBool = AtomicBool::new(false); impl Flash { const PAGE_SIZE: usize = 4096; pub fn take(_sd: &Softdevice) -> Flash { if FLASH_TAKEN .compare_exchange(false, true, Order
a.as_ptr(); let data_len = data.len() as u32; if address % 4 != 0 { return Err(FlashError::AddressMisaligned); } if (data_ptr as u32) % 4 != 0 || data_len % 4 != 0 { return Err(FlashError::BufferMisaligned); } let words_ptr = data_ptr as *const u32; let words_len = data_len / 4; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_write(address as _, words_ptr, words_len) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_write err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } fn erase<'a>(&'a mut self, address: usize) -> Self::ErasePageFuture<'a> { async move { if address % Self::PAGE_SIZE != 0 { return Err(FlashError::AddressMisaligned); } let page_number = address / Self::PAGE_SIZE; let bomb = DropBomb::new(); let ret = unsafe { raw::sd_flash_page_erase(page_number as u32) }; let ret = match RawError::convert(ret) { Ok(()) => SIGNAL.wait().await, Err(_e) => { warn!("sd_flash_page_erase err {:?}", _e); Err(FlashError::Failed) } }; bomb.defuse(); ret } } fn size(&self) -> usize { 256 * 4096 } fn read_size(&self) -> usize { 1 } fn write_size(&self) -> usize { 4 } fn erase_size(&self) -> usize { 4096 } }
ing::AcqRel, Ordering::Acquire) .is_err() { panic!("nrf_softdevice::Softdevice::take_flash() called multiple times.") } Flash { _private: PhantomData, } } } static SIGNAL: Signal<Result<(), FlashError>> = Signal::new(); pub(crate) fn on_flash_success() { SIGNAL.signal(Ok(())) } pub(crate) fn on_flash_error() { SIGNAL.signal(Err(FlashError::Failed)) } impl embassy::traits::flash::Flash for Flash { type ReadFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; type WriteFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; type ErasePageFuture<'a> = impl Future<Output = Result<(), FlashError>> + 'a; fn read<'a>(&'a mut self, address: usize, data: &'a mut [u8]) -> Self::ReadFuture<'a> { async move { data.copy_from_slice(unsafe { core::slice::from_raw_parts(address as *const u8, data.len()) }); Ok(()) } } fn write<'a>(&'a mut self, address: usize, data: &'a [u8]) -> Self::WriteFuture<'a> { async move { let data_ptr = dat
random
[ { "content": "pub fn gen_bindings(\n\n tmp_dir: &PathBuf,\n\n src_dir: &PathBuf,\n\n dst: &PathBuf,\n\n mut f: impl FnMut(String) -> String,\n\n) {\n\n let mut wrapper = String::new();\n\n\n\n for entry in WalkDir::new(src_dir)\n\n .follow_links(true)\n\n .into_iter()\n\n .filter_map(|e| e.ok())\n\n {\n\n let _f_name = entry.path().to_string_lossy();\n\n if entry.file_type().is_file() {\n\n if entry.file_name().to_string_lossy() == \"nrf_nvic.h\" {\n\n continue;\n\n }\n\n\n\n let data = fs::read_to_string(entry.path()).unwrap();\n", "file_path": "nrf-softdevice-gen/src/main.rs", "rank": 0, "score": 170181.4076460192 }, { "content": "/// This will fail if an indication is already in progress\n\npub fn indicate_value(\n\n conn: &Connection,\n\n handle: u16,\n\n val: &[u8],\n\n) -> Result<(), IndicateValueError> {\n\n let conn_handle = conn.with_state(|state| state.check_connected())?;\n\n\n\n let mut len: u16 = val.len() as _;\n\n let params = raw::ble_gatts_hvx_params_t {\n\n handle,\n\n type_: raw::BLE_GATT_HVX_INDICATION as u8,\n\n offset: 0,\n\n p_data: val.as_ptr() as _,\n\n p_len: &mut len,\n\n };\n\n let ret = unsafe { raw::sd_ble_gatts_hvx(conn_handle, &params) };\n\n RawError::convert(ret)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 1, "score": 167974.44711971324 }, { "content": "pub fn try_write_without_response(\n\n conn: &Connection,\n\n handle: u16,\n\n buf: &[u8],\n\n) -> Result<(), TryWriteError> {\n\n let conn_handle = conn.with_state(|state| state.check_connected())?;\n\n\n\n assert!(buf.len() <= u16::MAX as usize);\n\n let params = raw::ble_gattc_write_params_t {\n\n write_op: raw::BLE_GATT_OP_WRITE_CMD as u8,\n\n flags: 0,\n\n handle,\n\n p_value: buf.as_ptr(),\n\n len: buf.len() as u16,\n\n offset: 0,\n\n };\n\n\n\n let ret = unsafe { raw::sd_ble_gattc_write(conn_handle, &params) };\n\n match RawError::convert(ret) {\n\n Err(RawError::Resources) => Err(TryWriteError::BufferFull),\n", "file_path": "nrf-softdevice/src/ble/gatt_client.rs", "rank": 2, "score": 163827.65160015525 }, { "content": "pub fn get_value(_sd: &Softdevice, handle: u16, buf: &mut [u8]) -> Result<usize, GetValueError> {\n\n let mut value = raw::ble_gatts_value_t {\n\n p_value: buf.as_mut_ptr(),\n\n len: buf.len() as _,\n\n offset: 0,\n\n };\n\n let ret = unsafe {\n\n raw::sd_ble_gatts_value_get(raw::BLE_CONN_HANDLE_INVALID as u16, handle, &mut value)\n\n };\n\n RawError::convert(ret)?;\n\n\n\n if value.len as usize > buf.len() {\n\n return Err(GetValueError::Truncated);\n\n }\n\n\n\n Ok(value.len as _)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 3, "score": 149357.73479287088 }, { "content": "struct CriticalSection;\n\ncritical_section::custom_impl!(CriticalSection);\n\n\n\nunsafe impl critical_section::Impl for CriticalSection {\n\n unsafe fn acquire() -> u8 {\n\n let nvic = &*NVIC::ptr();\n\n let nested_cs = CS_FLAG.load(Ordering::SeqCst);\n\n\n\n if !nested_cs {\n\n raw_critical_section(|| {\n\n CS_FLAG.store(true, Ordering::Relaxed);\n\n\n\n // Store the state of irqs.\n\n CS_MASK = nvic.icer[0].read();\n\n\n\n // Disable only not-reserved irqs.\n\n nvic.icer[0].write(!RESERVED_IRQS);\n\n });\n\n }\n\n\n", "file_path": "nrf-softdevice/src/critical_section_impl.rs", "rank": 4, "score": 145195.97183402212 }, { "content": "pub fn get_address(_sd: &Softdevice) -> Address {\n\n unsafe {\n\n let mut addr: raw::ble_gap_addr_t = mem::zeroed();\n\n let ret = raw::sd_ble_gap_addr_get(&mut addr);\n\n unwrap!(RawError::convert(ret), \"sd_ble_gap_addr_get\");\n\n Address::from_raw(addr)\n\n }\n\n}\n\n\n", "file_path": "nrf-softdevice/src/ble/mod.rs", "rank": 5, "score": 143428.6816769865 }, { "content": "fn on_user_mem_release(_ble_evt: *const raw::ble_evt_t) {\n\n trace!(\"on_user_mem_release\");\n\n}\n", "file_path": "nrf-softdevice/src/ble/common.rs", "rank": 6, "score": 137837.25508474058 }, { "content": "fn on_user_mem_request(_ble_evt: *const raw::ble_evt_t) {\n\n trace!(\"on_user_mem_request\");\n\n}\n", "file_path": "nrf-softdevice/src/ble/common.rs", "rank": 7, "score": 137837.25508474058 }, { "content": "pub fn set_address(_sd: &Softdevice, addr: &Address) {\n\n unsafe {\n\n let addr = addr.into_raw();\n\n let ret = raw::sd_ble_gap_addr_set(&addr);\n\n unwrap!(RawError::convert(ret), \"sd_ble_gap_addr_set\");\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/ble/mod.rs", "rank": 8, "score": 137247.5029404512 }, { "content": "struct WaitFuture<'a, T> {\n\n signal: &'a Signal<T>,\n\n}\n\n\n\nimpl<'a, T: Send> Future for WaitFuture<'a, T> {\n\n type Output = T;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {\n\n unsafe {\n\n cortex_m::interrupt::free(|_| {\n\n let state = &mut *self.signal.state.get();\n\n match state {\n\n State::None => {\n\n *state = State::Waiting(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n State::Waiting(w) if w.will_wake(cx.waker()) => Poll::Pending,\n\n State::Waiting(_) => panic!(\"waker overflow\"),\n\n State::Signaled(_) => match mem::replace(state, State::None) {\n\n State::Signaled(res) => Poll::Ready(res),\n\n _ => unreachable!(),\n\n },\n\n }\n\n })\n\n }\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/util/signal.rs", "rank": 9, "score": 134814.41362903753 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 10, "score": 130567.24995010582 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 11, "score": 130567.24995010582 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 12, "score": 130567.24995010582 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 13, "score": 130567.24995010582 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 8usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t>())).callback_action\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 14, "score": 130567.24995010582 }, { "content": "fn index_by_handle(conn_handle: u16) -> &'static Cell<Option<u8>> {\n\n unsafe { &INDEX_BY_HANDLE[conn_handle as usize] }\n\n}\n", "file_path": "nrf-softdevice/src/ble/connection.rs", "rank": 15, "score": 130104.29820291008 }, { "content": "/// Get temperature reading in Celsius\n\n///\n\n/// Note this blocks for ~50us\n\npub fn temperature_celsius(_sd: &Softdevice) -> Result<I30F2, TempError> {\n\n let mut temp: i32 = 0;\n\n let ret = unsafe { raw::sd_temp_get(&mut temp) };\n\n RawError::convert(ret)?;\n\n Ok(I30F2::from_bits(temp))\n\n}\n", "file_path": "nrf-softdevice/src/temperature.rs", "rank": 16, "score": 130068.74125258872 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 17, "score": 127158.72000604431 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 18, "score": 127158.72000604431 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 19, "score": 127158.72000604431 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 20, "score": 127158.72000604431 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>(),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1>()))\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 21, "score": 127158.72000604431 }, { "content": "#[proc_macro_attribute]\n\npub fn gatt_server(_args: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut struc = syn::parse_macro_input!(item as syn::ItemStruct);\n\n\n\n let struct_fields = match &mut struc.fields {\n\n syn::Fields::Named(n) => n,\n\n _ => {\n\n struc\n\n .ident\n\n .span()\n\n .unwrap()\n\n .error(\"gatt_server structs must have named fields, not tuples.\")\n\n .emit();\n\n return TokenStream::new();\n\n }\n\n };\n\n let fields = struct_fields\n\n .named\n\n .iter()\n\n .cloned()\n\n .collect::<Vec<syn::Field>>();\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 22, "score": 126660.14978410889 }, { "content": "#[proc_macro_attribute]\n\npub fn gatt_service(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n let mut struc = syn::parse_macro_input!(item as syn::ItemStruct);\n\n\n\n let args = match ServiceArgs::from_list(&args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return e.write_errors().into();\n\n }\n\n };\n\n\n\n let mut chars = Vec::new();\n\n\n\n let struct_fields = match &mut struc.fields {\n\n syn::Fields::Named(n) => n,\n\n _ => {\n\n struc\n\n .ident\n\n .span()\n\n .unwrap()\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 23, "score": 126660.14978410889 }, { "content": "#[proc_macro_attribute]\n\npub fn gatt_client(args: TokenStream, item: TokenStream) -> TokenStream {\n\n let args = syn::parse_macro_input!(args as syn::AttributeArgs);\n\n let mut struc = syn::parse_macro_input!(item as syn::ItemStruct);\n\n\n\n let args = match ServiceArgs::from_list(&args) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n return e.write_errors().into();\n\n }\n\n };\n\n\n\n let mut chars = Vec::new();\n\n\n\n let struct_fields = match &mut struc.fields {\n\n syn::Fields::Named(n) => n,\n\n _ => {\n\n struc\n\n .ident\n\n .span()\n\n .unwrap()\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 24, "score": 126660.14978410889 }, { "content": "fn allocate_index<T>(f: impl FnOnce(u8, &mut ConnectionState) -> T) -> Result<T, OutOfConnsError> {\n\n unsafe {\n\n for (i, s) in STATES.iter().enumerate() {\n\n let state = &mut *s.get();\n\n if state.refcount == 0 && state.conn_handle.is_none() {\n\n return Ok(f(i as u8, state));\n\n }\n\n }\n\n Err(OutOfConnsError)\n\n }\n\n}\n\n\n\n// conn_handle -> index mapping. Used to make stuff go faster\n\nconst INDEX_NONE: Cell<Option<u8>> = Cell::new(None);\n\nstatic mut INDEX_BY_HANDLE: [Cell<Option<u8>>; CONNS_MAX] = [INDEX_NONE; CONNS_MAX];\n\n\n", "file_path": "nrf-softdevice/src/ble/connection.rs", "rank": 25, "score": 125311.61893603088 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 26, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 27, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 28, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 29, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 30, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 31, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 32, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 33, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_1)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 34, "score": 123991.19772611704 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Size of: \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2>(\n\n ),\n\n 4usize,\n\n concat!(\n\n \"Alignment of \",\n\n stringify!(nrf_radio_signal_callback_return_param_t__bindgen_ty_1__bindgen_ty_2)\n\n )\n\n );\n\n assert_eq!(\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 35, "score": 123991.19772611704 }, { "content": "/// Get cryptographically-securerandom bytes.\n\npub fn random_bytes(_sd: &Softdevice, buf: &mut [u8]) -> Result<(), RandomError> {\n\n if buf.len() > u8::MAX as usize {\n\n return Err(RandomError::BufferTooBig);\n\n }\n\n\n\n let ret = unsafe { raw::sd_rand_application_vector_get(buf[..].as_mut_ptr(), buf.len() as u8) };\n\n match RawError::convert(ret) {\n\n Ok(()) => Ok(()),\n\n Err(RawError::SocRandNotEnoughValues) => Err(RandomError::NotEnoughEntropy),\n\n Err(e) => Err(e.into()),\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/random.rs", "rank": 36, "score": 122877.26364579838 }, { "content": "pub fn register<S: Server>(sd: &Softdevice) -> Result<S, RegisterError> {\n\n S::register(sd)\n\n}\n\n\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 37, "score": 121204.78150112252 }, { "content": "pub fn register_service<S: Service>(_sd: &Softdevice) -> Result<S, RegisterError> {\n\n let uuid = S::uuid();\n\n let mut service_handle: u16 = 0;\n\n let ret = unsafe {\n\n raw::sd_ble_gatts_service_add(\n\n raw::BLE_GATTS_SRVC_TYPE_PRIMARY as u8,\n\n uuid.as_raw_ptr(),\n\n &mut service_handle as _,\n\n )\n\n };\n\n RawError::convert(ret)?;\n\n\n\n S::register(service_handle, |char, initial_value| {\n\n let mut cccd_attr_md: raw::ble_gatts_attr_md_t = unsafe { mem::zeroed() };\n\n cccd_attr_md.read_perm = raw::ble_gap_conn_sec_mode_t {\n\n _bitfield_1: raw::ble_gap_conn_sec_mode_t::new_bitfield_1(1, 1),\n\n };\n\n cccd_attr_md.write_perm = raw::ble_gap_conn_sec_mode_t {\n\n _bitfield_1: raw::ble_gap_conn_sec_mode_t::new_bitfield_1(1, 1),\n\n };\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 38, "score": 119592.49164209625 }, { "content": "pub fn set_value(_sd: &Softdevice, handle: u16, val: &[u8]) -> Result<(), SetValueError> {\n\n let mut value = raw::ble_gatts_value_t {\n\n p_value: val.as_ptr() as _,\n\n len: val.len() as _,\n\n offset: 0,\n\n };\n\n let ret = unsafe {\n\n raw::sd_ble_gatts_value_set(raw::BLE_CONN_HANDLE_INVALID as u16, handle, &mut value)\n\n };\n\n RawError::convert(ret)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum NotifyValueError {\n\n Disconnected,\n\n Raw(RawError),\n\n}\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 39, "score": 114369.21208366963 }, { "content": "/// Multiple notifications can be queued. Will fail when the queue is full.\n\npub fn notify_value(conn: &Connection, handle: u16, val: &[u8]) -> Result<(), NotifyValueError> {\n\n let conn_handle = conn.with_state(|state| state.check_connected())?;\n\n\n\n let mut len: u16 = val.len() as _;\n\n let params = raw::ble_gatts_hvx_params_t {\n\n handle,\n\n type_: raw::BLE_GATT_HVX_NOTIFICATION as u8,\n\n offset: 0,\n\n p_data: val.as_ptr() as _,\n\n p_len: &mut len,\n\n };\n\n let ret = unsafe { raw::sd_ble_gatts_hvx(conn_handle, &params) };\n\n RawError::convert(ret)?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\n#[cfg_attr(feature = \"defmt\", derive(defmt::Format))]\n\npub enum IndicateValueError {\n", "file_path": "nrf-softdevice/src/ble/gatt_server.rs", "rank": 40, "score": 114369.21208366963 }, { "content": "fn main() {\n\n // Put `memory.x` in our output directory and ensure it's\n\n // on the linker search path.\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"memory.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"memory.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n\n\n // By default, Cargo will re-run a build script whenever\n\n // any file in the project changes. By specifying `memory.x`\n\n // here, we ensure the build script is only re-run when\n\n // `memory.x` is changed.\n\n println!(\"cargo:rerun-if-changed=memory.x\");\n\n\n\n println!(\"cargo:rustc-link-arg-bins=--nmagic\");\n\n println!(\"cargo:rustc-link-arg-bins=-Tlink.x\");\n\n println!(\"cargo:rustc-link-arg-bins=-Tdefmt.x\");\n\n}\n", "file_path": "examples/build.rs", "rank": 41, "score": 95890.43423180413 }, { "content": "#[derive(Debug)]\n\nstruct Characteristic {\n\n name: String,\n\n ty: syn::Type,\n\n args: CharacteristicArgs,\n\n span: Span,\n\n vis: syn::Visibility,\n\n}\n\n\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 42, "score": 94633.16399057483 }, { "content": "#[derive(Debug, FromMeta)]\n\nstruct ServiceArgs {\n\n uuid: Uuid,\n\n}\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 43, "score": 93419.42663022527 }, { "content": "#[derive(Debug, FromMeta)]\n\nstruct CharacteristicArgs {\n\n uuid: Uuid,\n\n #[darling(default)]\n\n read: bool,\n\n #[darling(default)]\n\n write: bool,\n\n #[darling(default)]\n\n notify: bool,\n\n #[darling(default)]\n\n indicate: bool,\n\n}\n\n\n", "file_path": "nrf-softdevice-macro/src/lib.rs", "rank": 44, "score": 93419.42663022527 }, { "content": "#[defmt::global_logger]\n\nstruct Logger;\n\n\n\n/// Global logger lock.\n\nstatic TAKEN: AtomicBool = AtomicBool::new(false);\n\nstatic INTERRUPTS_TOKEN: AtomicU8 = AtomicU8::new(0);\n\nstatic mut ENCODER: defmt::Encoder = defmt::Encoder::new();\n\n\n\nunsafe impl defmt::Logger for Logger {\n\n fn acquire() {\n\n let token = unsafe { critical_section::acquire() };\n\n\n\n if !TAKEN.load(Ordering::Relaxed) {\n\n // no need for CAS because interrupts are disabled\n\n TAKEN.store(true, Ordering::Relaxed);\n\n\n\n INTERRUPTS_TOKEN.store(token, Ordering::Relaxed);\n\n\n\n // safety: accessing the `static mut` is OK because we have disabled interrupts.\n\n unsafe { ENCODER.start_frame(do_write) }\n\n } else {\n", "file_path": "nrf-softdevice-defmt-rtt/src/lib.rs", "rank": 45, "score": 93419.42663022527 }, { "content": "#[repr(C)]\n\nstruct Header {\n\n id: [u8; 16],\n\n max_up_channels: usize,\n\n max_down_channels: usize,\n\n up_channel: Channel,\n\n}\n\n\n\nconst MODE_MASK: usize = 0b11;\n\n/// Block the application if the RTT buffer is full, wait for the host to read data.\n\nconst MODE_BLOCK_IF_FULL: usize = 2;\n\n/// Don't block if the RTT buffer is full. Truncate data to output as much as fits.\n\nconst MODE_NON_BLOCKING_TRIM: usize = 1;\n\n\n\n// TODO make configurable\n\n// NOTE use a power of 2 for best performance\n\nconst SIZE: usize = 1024;\n\n\n\n// make sure we only get shared references to the header/channel (avoid UB)\n\n/// # Safety\n\n/// `Channel` API is not re-entrant; this handle should not be held from different execution\n", "file_path": "nrf-softdevice-defmt-rtt/src/lib.rs", "rank": 46, "score": 93419.42663022527 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n\n\n let src_dir = PathBuf::from(&args[1]);\n\n let dst_path = PathBuf::from(&args[2]);\n\n\n\n let tmp_dir = PathBuf::from(\"./tmp\");\n\n let tmp_bindings_path = tmp_dir.join(\"bindings.rs\");\n\n\n\n let _ = fs::remove_dir_all(&tmp_dir);\n\n fs::create_dir_all(&tmp_dir).unwrap();\n\n\n\n gen_bindings(&tmp_dir, &src_dir, &tmp_bindings_path, |data| {\n\n let re = Regex::new(r\"SVCALL\\((?P<svc>[A-Za-z0-9_]+),\\s*(?P<ret>[A-Za-z0-9_]+),\\s*(?P<name>[A-Za-z0-9_]+)\\((?P<args>.*)\\)\\);\").unwrap();\n\n re.replace_all(&data, \"uint32_t __svc_$name = $svc;\").into()\n\n });\n\n\n\n let mut svc_nums = HashMap::new();\n\n\n\n let data = fs::read_to_string(&tmp_bindings_path).unwrap();\n", "file_path": "nrf-softdevice-gen/src/main.rs", "rank": 47, "score": 91964.55743749501 }, { "content": "fn assert_thread_mode() {\n\n assert!(\n\n cortex_m::peripheral::SCB::vect_active()\n\n == cortex_m::peripheral::scb::VectActive::ThreadMode,\n\n \"portals are not usable from interrupts\"\n\n );\n\n}\n\n\n\nimpl<T> Portal<T> {\n\n pub const fn new() -> Self {\n\n Self {\n\n state: UnsafeCell::new(State::None),\n\n }\n\n }\n\n\n\n pub fn call(&self, val: T) -> bool {\n\n assert_thread_mode();\n\n\n\n // safety: this runs from thread mode\n\n unsafe {\n", "file_path": "nrf-softdevice/src/util/portal.rs", "rank": 48, "score": 89628.30017086875 }, { "content": "pub trait Try {\n\n type Ok;\n\n type Error;\n\n fn into_result(self) -> Result<Self::Ok, Self::Error>;\n\n}\n\n\n\nimpl<T> Try for Option<T> {\n\n type Ok = T;\n\n type Error = NoneError;\n\n\n\n #[inline]\n\n fn into_result(self) -> Result<T, NoneError> {\n\n self.ok_or(NoneError)\n\n }\n\n}\n\n\n\nimpl<T, E> Try for Result<T, E> {\n\n type Ok = T;\n\n type Error = E;\n\n\n\n #[inline]\n\n fn into_result(self) -> Self {\n\n self\n\n }\n\n}\n", "file_path": "nrf-softdevice/src/fmt.rs", "rank": 49, "score": 88629.852360945 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_evt_t>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_evt_t>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_evt_t),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 50, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_cfg_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_cfg_t>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_cfg_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_cfg_t>())).conn_cfg as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_cfg_t),\n\n \"::\",\n\n stringify!(conn_cfg)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 51, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_version_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_version_t>(),\n\n 6usize,\n\n concat!(\"Size of: \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_version_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_version_t>())).version_number as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_version_t),\n\n \"::\",\n\n stringify!(version_number)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 52, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_evt_t>(),\n\n 44usize,\n\n concat!(\"Size of: \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_evt_t>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_evt_t),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 53, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid_t>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 54, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_opt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_opt_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_opt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_opt_t>())).common_opt as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_opt_t),\n\n \"::\",\n\n stringify!(common_opt)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 55, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_data_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_data_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_data_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_data_t>())).p_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_data_t),\n\n \"::\",\n\n stringify!(p_data)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 56, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid128_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid128_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid128_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid128_t>())).uuid128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid128_t),\n\n \"::\",\n\n stringify!(uuid128)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 57, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_version_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_version_t>(),\n\n 6usize,\n\n concat!(\"Size of: \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_version_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_version_t>())).version_number as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_version_t),\n\n \"::\",\n\n stringify!(version_number)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 58, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_opt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_opt_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_opt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_opt_t>())).common_opt as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_opt_t),\n\n \"::\",\n\n stringify!(common_opt)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 59, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_version_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_version_t>(),\n\n 6usize,\n\n concat!(\"Size of: \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_version_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_version_t>())).version_number as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_version_t),\n\n \"::\",\n\n stringify!(version_number)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 60, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid_t>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 61, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_data_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_data_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_data_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_data_t>())).p_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_data_t),\n\n \"::\",\n\n stringify!(p_data)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 62, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_data_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_data_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_data_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_data_t>())).p_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_data_t),\n\n \"::\",\n\n stringify!(p_data)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 63, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_opt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_opt_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_opt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_opt_t>())).common_opt as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_opt_t),\n\n \"::\",\n\n stringify!(common_opt)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 64, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_cfg_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_cfg_t>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_cfg_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_cfg_t>())).conn_cfg as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_cfg_t),\n\n \"::\",\n\n stringify!(conn_cfg)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 65, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_data_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_data_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_data_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_data_t>())).p_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_data_t),\n\n \"::\",\n\n stringify!(p_data)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 66, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_opt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_opt_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_opt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_opt_t>())).common_opt as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_opt_t),\n\n \"::\",\n\n stringify!(common_opt)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 67, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_cfg_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_cfg_t>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_cfg_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_cfg_t>())).conn_cfg as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_cfg_t),\n\n \"::\",\n\n stringify!(conn_cfg)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 68, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_opt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_opt_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_opt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_opt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_opt_t>())).common_opt as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_opt_t),\n\n \"::\",\n\n stringify!(common_opt)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 69, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid_t>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 70, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_evt_t>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_evt_t>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_evt_t),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 71, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_version_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_version_t>(),\n\n 6usize,\n\n concat!(\"Size of: \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_version_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_version_t>())).version_number as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_version_t),\n\n \"::\",\n\n stringify!(version_number)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 72, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_evt_t>(),\n\n 48usize,\n\n concat!(\"Size of: \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_evt_t>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_evt_t),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 73, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_data_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_data_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_data_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_data_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_data_t>())).p_data as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_data_t),\n\n \"::\",\n\n stringify!(p_data)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 74, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_cfg_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_cfg_t>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_cfg_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_cfg_t>())).conn_cfg as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_cfg_t),\n\n \"::\",\n\n stringify!(conn_cfg)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 75, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid128_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid128_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid128_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid128_t>())).uuid128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid128_t),\n\n \"::\",\n\n stringify!(uuid128)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 76, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_cfg_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_cfg_t>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_cfg_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_cfg_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_cfg_t>())).conn_cfg as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_cfg_t),\n\n \"::\",\n\n stringify!(conn_cfg)\n\n )\n", "file_path": "nrf-softdevice-s122/src/bindings.rs", "rank": 77, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid_t>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 78, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid128_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid128_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid128_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid128_t>())).uuid128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid128_t),\n\n \"::\",\n\n stringify!(uuid128)\n\n )\n", "file_path": "nrf-softdevice-s113/src/bindings.rs", "rank": 79, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_evt_t>(),\n\n 44usize,\n\n concat!(\"Size of: \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_evt_t>())).header as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_evt_t),\n\n \"::\",\n\n stringify!(header)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 80, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid_t>(),\n\n 4usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 81, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid128_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid128_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid128_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid128_t>())).uuid128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid128_t),\n\n \"::\",\n\n stringify!(uuid128)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 82, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_version_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_version_t>(),\n\n 6usize,\n\n concat!(\"Size of: \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_version_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_version_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_version_t>())).version_number as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_version_t),\n\n \"::\",\n\n stringify!(version_number)\n\n )\n", "file_path": "nrf-softdevice-s140/src/bindings.rs", "rank": 83, "score": 87484.4410230175 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_uuid128_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_uuid128_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_uuid128_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_uuid128_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_uuid128_t>())).uuid128 as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_uuid128_t),\n\n \"::\",\n\n stringify!(uuid128)\n\n )\n", "file_path": "nrf-softdevice-s112/src/bindings.rs", "rank": 84, "score": 87484.4410230175 }, { "content": "struct RawAdvertisement<'a> {\n\n kind: u8,\n\n adv_data: Option<&'a [u8]>,\n\n scan_data: Option<&'a [u8]>,\n\n}\n\n\n\n/// Connectable advertisement types, which can accept connections from interested Central devices.\n\npub enum ConnectableAdvertisement<'a> {\n\n ScannableUndirected {\n\n adv_data: &'a [u8],\n\n scan_data: &'a [u8],\n\n },\n\n NonscannableDirected {\n\n scan_data: &'a [u8],\n\n },\n\n NonscannableDirectedHighDuty {\n\n scan_data: &'a [u8],\n\n },\n\n #[cfg(any(feature = \"s132\", feature = \"s140\"))]\n\n ExtendedNonscannableUndirected {\n", "file_path": "nrf-softdevice/src/ble/peripheral.rs", "rank": 85, "score": 87450.5347940551 }, { "content": "#[test]\n\nfn bindgen_test_layout_nrf_radio_request_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<nrf_radio_request_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(nrf_radio_request_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<nrf_radio_request_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(nrf_radio_request_t))\n\n );\n\n assert_eq!(\n\n unsafe {\n\n &(*(::core::ptr::null::<nrf_radio_request_t>())).request_type as *const _ as usize\n\n },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(nrf_radio_request_t),\n\n \"::\",\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 86, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gattc_desc_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gattc_desc_t>(),\n\n 6usize,\n\n concat!(\"Size of: \", stringify!(ble_gattc_desc_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gattc_desc_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gattc_desc_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gattc_desc_t>())).handle as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gattc_desc_t),\n\n \"::\",\n\n stringify!(handle)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 87, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_irk_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_irk_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_irk_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_irk_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_irk_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gap_irk_t>())).irk as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gap_irk_t),\n\n \"::\",\n\n stringify!(irk)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 88, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gattc_include_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gattc_include_t>(),\n\n 10usize,\n\n concat!(\"Size of: \", stringify!(ble_gattc_include_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gattc_include_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gattc_include_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gattc_include_t>())).handle as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gattc_include_t),\n\n \"::\",\n\n stringify!(handle)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 89, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gattc_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gattc_evt_t>(),\n\n 14usize,\n\n concat!(\"Size of: \", stringify!(ble_gattc_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gattc_evt_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gattc_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gattc_evt_t>())).conn_handle as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gattc_evt_t),\n\n \"::\",\n\n stringify!(conn_handle)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 90, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_addr_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_addr_t>(),\n\n 7usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_addr_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_addr_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_addr_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gap_addr_t>())).addr as *const _ as usize },\n\n 1usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gap_addr_t),\n\n \"::\",\n\n stringify!(addr)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 91, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_cfg_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_cfg_t>(),\n\n 12usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_cfg_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_cfg_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_cfg_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gap_cfg_t>())).role_count_cfg as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gap_cfg_t),\n\n \"::\",\n\n stringify!(role_count_cfg)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 92, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_evt_t>(),\n\n 44usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gap_evt_t>())).conn_handle as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gap_evt_t),\n\n \"::\",\n\n stringify!(conn_handle)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 93, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_l2cap_evt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_l2cap_evt_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(ble_l2cap_evt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_l2cap_evt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_l2cap_evt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_l2cap_evt_t>())).conn_handle as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_l2cap_evt_t),\n\n \"::\",\n\n stringify!(conn_handle)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 94, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_sd_mbr_command_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<sd_mbr_command_t>(),\n\n 16usize,\n\n concat!(\"Size of: \", stringify!(sd_mbr_command_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<sd_mbr_command_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(sd_mbr_command_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<sd_mbr_command_t>())).command as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(sd_mbr_command_t),\n\n \"::\",\n\n stringify!(command)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 95, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_phys_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_phys_t>(),\n\n 2usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_phys_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_phys_t>(),\n\n 1usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_phys_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gap_phys_t>())).tx_phys as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gap_phys_t),\n\n \"::\",\n\n stringify!(tx_phys)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 96, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gattc_service_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gattc_service_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_gattc_service_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gattc_service_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gattc_service_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gattc_service_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gattc_service_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 97, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gap_opt_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gap_opt_t>(),\n\n 8usize,\n\n concat!(\"Size of: \", stringify!(ble_gap_opt_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gap_opt_t>(),\n\n 4usize,\n\n concat!(\"Alignment of \", stringify!(ble_gap_opt_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gap_opt_t>())).ch_map as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gap_opt_t),\n\n \"::\",\n\n stringify!(ch_map)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 98, "score": 86477.38342016209 }, { "content": "#[test]\n\nfn bindgen_test_layout_ble_gattc_char_t() {\n\n assert_eq!(\n\n ::core::mem::size_of::<ble_gattc_char_t>(),\n\n 10usize,\n\n concat!(\"Size of: \", stringify!(ble_gattc_char_t))\n\n );\n\n assert_eq!(\n\n ::core::mem::align_of::<ble_gattc_char_t>(),\n\n 2usize,\n\n concat!(\"Alignment of \", stringify!(ble_gattc_char_t))\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::core::ptr::null::<ble_gattc_char_t>())).uuid as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(ble_gattc_char_t),\n\n \"::\",\n\n stringify!(uuid)\n\n )\n", "file_path": "nrf-softdevice-s132/src/bindings.rs", "rank": 99, "score": 86477.38342016209 } ]